[
  {
    "path": ".dockerignore",
    "content": "# Ignore build artifacts to keep Docker context small\n.stack-work/\n.git/\nout/\ncontainer/static-build/.stack-work/\nmorloc-manager/.stack-work/\ndata/rust/target/\n*.hi\n*.o\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "# Release workflow: build all binaries + containers, run tests, publish release.\n#\n# Triggered by pushing a version tag:\n#   git tag v0.68.0\n#   git push origin v0.68.0\n#\n# Produces for each platform (linux-x86_64, linux-arm64, macos-arm64):\n#   - morloc-manager  (Rust binary)\n#   - libmorloc.so    (Rust runtime library; .dylib on macOS)\n#   - morloc-nexus    (Rust binary)\n#\n# All three are attached to the GitHub Release.\n\nname: Release\n\non:\n  push:\n    tags: ['v*']\n\nenv:\n  REGISTRY: ghcr.io\n  IMAGE_BASE: ghcr.io/morloc-project/morloc\n\njobs:\n  # ---- Build Rust binaries (libmorloc + morloc-nexus + morloc-manager) per platform ----\n  rust-binary:\n    strategy:\n      fail-fast: false\n      matrix:\n        include:\n          - os: ubuntu-latest\n            platform: linux-x86_64\n            method: docker\n          - os: ubuntu-24.04-arm\n            platform: linux-arm64\n            method: docker\n          - os: macos-latest\n            platform: macos-arm64\n            method: native\n    runs-on: ${{ matrix.os }}\n    timeout-minutes: 30\n    steps:\n      - uses: actions/checkout@v4\n\n      # ---- Linux: Docker container build ----\n      # libmorloc.so + morloc-nexus: glibc (Ubuntu 20.04)\n      # morloc-manager: static (Alpine/musl)\n      - name: Build Rust binaries (Linux)\n        if: matrix.method == 'docker'\n        run: |\n          docker build -t morloc-rust-build \\\n            -f container/static-build/Dockerfile .\n          mkdir -p out\n          docker run --rm -v \"$(pwd)/out:/out\" morloc-rust-build\n          # Verify morloc-manager is static\n          file out/morloc-manager | grep -qE \"static(ally|-pie) linked\"\n\n      - name: Rename artifacts (Linux)\n        if: matrix.method == 'docker'\n        run: |\n          mv out/libmorloc.so out/libmorloc-${{ matrix.platform }}.so\n          mv out/morloc-nexus out/morloc-nexus-${{ matrix.platform }}\n          mv out/morloc-manager out/morloc-manager-${{ matrix.platform }}\n\n      # ---- macOS: native cargo build ----\n      - name: Setup Rust (macOS)\n        if: matrix.method == 'native'\n        uses: dtolnay/rust-toolchain@stable\n\n      - name: Cache Cargo (macOS)\n        if: matrix.method == 'native'\n        uses: actions/cache@v4\n        with:\n          path: |\n            ~/.cargo/registry\n            ~/.cargo/git\n            data/rust/target\n          key: cargo-macos-${{ hashFiles('data/rust/Cargo.lock') }}\n          restore-keys: cargo-macos-\n\n      - name: Build Rust binaries (macOS)\n        if: matrix.method == 'native'\n        run: |\n          cd data/rust\n          # Build libmorloc (cdylib produces .dylib on macOS)\n          cargo build --release -p morloc-runtime\n          # Install .dylib so nexus can link against it\n          mkdir -p $HOME/.local/share/morloc/lib\n          cp target/release/libmorloc_runtime.dylib $HOME/.local/share/morloc/lib/libmorloc.dylib\n          # Build nexus\n          cargo build --release -p morloc-nexus\n          # Build manager\n          cargo build --release -p morloc-manager\n          # Collect artifacts\n          mkdir -p ../../out\n          cp target/release/libmorloc_runtime.dylib ../../out/libmorloc-${{ matrix.platform }}.dylib\n          cp target/release/morloc-nexus ../../out/morloc-nexus-${{ matrix.platform }}\n          cp target/release/morloc-manager ../../out/morloc-manager-${{ matrix.platform }}\n          strip ../../out/morloc-nexus-${{ matrix.platform }} || true\n          strip ../../out/morloc-manager-${{ matrix.platform }} || true\n\n      - name: Upload Rust artifacts\n        uses: actions/upload-artifact@v4\n        with:\n          name: rust-binaries-${{ matrix.platform }}\n          path: out/*\n\n  # ---- Run tests using the Rust binaries ----\n  test:\n    needs: rust-binary\n    runs-on: ubuntu-latest\n    timeout-minutes: 60\n    env:\n      DEBIAN_FRONTEND: noninteractive\n    steps:\n      - uses: actions/checkout@v4\n      - uses: haskell-actions/setup@v2\n        with:\n          ghc-version: '9.6.7'\n          enable-stack: true\n          stack-version: 'latest'\n      - uses: actions/cache@v4\n        with:\n          path: |\n            ~/.stack/snapshots\n            ~/.stack/setup-exe-cache\n            .stack-work\n          key: stack-deps-release-${{ hashFiles('stack.yaml.lock', 'package.yaml') }}\n          restore-keys: stack-deps-release-\n      - name: Increase shared memory\n        run: sudo mount -o remount,size=4G /dev/shm\n      - name: Install system dependencies\n        run: |\n          sudo apt-get update\n          sudo apt-get install -y --no-install-recommends \\\n            r-base-core python3 python3-dev python3-pip \\\n            python3-numpy g++ gcc make libgsl-dev\n          python3 -m pip install --break-system-packages --upgrade setuptools pyarrow\n\n      - name: Cache R packages\n        uses: actions/cache@v4\n        id: r-cache\n        with:\n          path: ~/R/library\n          key: r-lib-${{ runner.os }}-${{ runner.arch }}\n\n      - name: Install R packages\n        if: steps.r-cache.outputs.cache-hit != 'true'\n        run: |\n          mkdir -p ~/R/library\n          LIBARROW_MINIMAL=true ARROW_S3=OFF ARROW_GCS=OFF \\\n            Rscript -e 'install.packages(\"arrow\", lib=\"~/R/library\", repos = \"https://cloud.r-project.org\")'\n\n      - name: Download Rust binaries\n        uses: actions/download-artifact@v4\n        with:\n          name: rust-binaries-linux-x86_64\n          path: rust-bin\n\n      - name: Prepare Rust binaries\n        run: |\n          mkdir -p prebuilt\n          mv rust-bin/libmorloc-linux-x86_64.so prebuilt/libmorloc.so\n          mv rust-bin/morloc-nexus-linux-x86_64 prebuilt/morloc-nexus\n          mv rust-bin/morloc-manager-linux-x86_64 prebuilt/morloc-manager\n          chmod +x prebuilt/libmorloc.so prebuilt/morloc-nexus prebuilt/morloc-manager\n\n      - name: Add morloc to PATH\n        run: |\n          echo \"$HOME/.local/bin\" >> $GITHUB_PATH\n          echo \"$HOME/.local/share/morloc/bin\" >> $GITHUB_PATH\n          echo \"R_LIBS_USER=$HOME/R/library\" >> $GITHUB_ENV\n      - name: Build morloc\n        run: stack install --system-ghc --no-install-ghc --no-run-tests\n      - name: Initialize morloc (using pre-built Rust binaries)\n        run: |\n          MORLOC_RUST_BIN=$(pwd)/prebuilt morloc init -f\n          morloc install stdlib\n      - name: Run tests\n        run: stack test --system-ghc --no-install-ghc morloc:morloc-test\n        timeout-minutes: 10\n\n  # ---- Build and push container images ----\n  containers:\n    needs: test\n    runs-on: ubuntu-latest\n    timeout-minutes: 120\n    permissions:\n      packages: write\n    steps:\n      - uses: actions/checkout@v4\n\n      - name: Extract version from tag\n        id: ver\n        run: echo \"version=${GITHUB_REF_NAME#v}\" >> \"$GITHUB_OUTPUT\"\n\n      - name: Login to GHCR\n        uses: docker/login-action@v3\n        with:\n          registry: ghcr.io\n          username: ${{ github.actor }}\n          password: ${{ secrets.GITHUB_TOKEN }}\n\n      - name: Build tiny (from local source)\n        run: |\n          docker build \\\n            -t ${{ env.IMAGE_BASE }}/morloc-tiny:${{ steps.ver.outputs.version }} \\\n            -t ${{ env.IMAGE_BASE }}/morloc-tiny:edge \\\n            -f container/tiny/Dockerfile .\n\n      - name: Build full (uses local tiny)\n        run: |\n          docker build \\\n            --build-arg MORLOC_VERSION=${{ steps.ver.outputs.version }} \\\n            -t ${{ env.IMAGE_BASE }}/morloc-full:${{ steps.ver.outputs.version }} \\\n            -t ${{ env.IMAGE_BASE }}/morloc-full:edge \\\n            container/full/\n\n      - name: Push all images\n        run: |\n          for img in morloc-tiny morloc-full; do\n            docker push ${{ env.IMAGE_BASE }}/${img}:${{ steps.ver.outputs.version }}\n            docker push ${{ env.IMAGE_BASE }}/${img}:edge\n          done\n\n  # ---- Create GitHub Release ----\n  release:\n    if: always() && needs.containers.result == 'success'\n    needs: [containers, rust-binary]\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n      - name: Download all artifacts\n        uses: actions/download-artifact@v4\n        with:\n          pattern: 'rust-binaries-*'\n          merge-multiple: true\n\n      - name: List artifacts\n        run: ls -lh\n\n      - name: Create GitHub Release\n        uses: softprops/action-gh-release@v2\n        with:\n          files: |\n            morloc-manager-*\n            libmorloc-*\n            morloc-nexus-*\n          generate_release_notes: true\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Test Morloc\non: [push]\n\njobs:\n  linux-test:\n    runs-on: ubuntu-latest\n    timeout-minutes: 60\n\n    env:\n      DEBIAN_FRONTEND: noninteractive\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Haskell (GHC + Stack)\n        uses: haskell-actions/setup@v2\n        with:\n          ghc-version: '9.6.7'\n          enable-stack: true\n          stack-version: 'latest'\n\n      - name: Setup Rust\n        uses: dtolnay/rust-toolchain@stable\n\n      - name: Cache Stack dependencies\n        uses: actions/cache@v4\n        with:\n          path: |\n            ~/.stack/snapshots\n            ~/.stack/setup-exe-cache\n            .stack-work\n          key: stack-deps-${{ hashFiles('stack.yaml.lock', 'package.yaml') }}\n          restore-keys: |\n            stack-deps-\n\n      - name: Cache Cargo dependencies\n        uses: actions/cache@v4\n        with:\n          path: |\n            ~/.cargo/registry\n            ~/.cargo/git\n            data/rust/target\n          key: cargo-deps-${{ hashFiles('data/rust/Cargo.lock') }}\n          restore-keys: |\n            cargo-deps-\n\n      - name: Increase shared memory (needed for morloc SHM pool tests)\n        run: sudo mount -o remount,size=4G /dev/shm\n\n      - name: Install system dependencies\n        run: |\n          sudo apt-get update\n          sudo apt-get install -y --no-install-recommends \\\n            r-base-core \\\n            python3 \\\n            python3-dev \\\n            python3-pip \\\n            python3-numpy \\\n            g++ \\\n            gcc \\\n            make \\\n            libgsl-dev\n          python3 -m pip install --break-system-packages --upgrade setuptools pyarrow\n\n      - name: Cache R packages\n        uses: actions/cache@v4\n        id: r-cache\n        with:\n          path: ~/R/library\n          key: r-lib-${{ runner.os }}-${{ runner.arch }}\n\n      - name: Install R packages\n        if: steps.r-cache.outputs.cache-hit != 'true'\n        run: |\n          mkdir -p ~/R/library\n          LIBARROW_MINIMAL=true ARROW_S3=OFF ARROW_GCS=OFF \\\n            Rscript -e 'install.packages(\"arrow\", lib=\"~/R/library\", repos = \"https://cloud.r-project.org\")'\n\n      - name: Add morloc bin folders to PATH\n        run: |\n          echo \"$HOME/.local/bin\" >> $GITHUB_PATH\n          echo \"$HOME/.local/share/morloc/bin\" >> $GITHUB_PATH\n          echo \"R_LIBS_USER=$HOME/R/library\" >> $GITHUB_ENV\n\n      - name: Build Morloc\n        run: |\n          stack install --system-ghc --no-install-ghc --no-run-tests\n\n      - name: Initialize morloc and install modules\n        run: |\n          MORLOC_RUST_DIR=$(pwd)/data/rust morloc init -f\n          morloc install stdlib\n\n      - name: Test morloc\n        run: |\n          stack test --system-ghc --no-install-ghc morloc:morloc-test\n        timeout-minutes: 10\n"
  },
  {
    "path": ".gitignore",
    "content": "tags\n*.hi\n*.o\n.stack-work/\n.history\nmorloc.cabal\n*.out\nrun/\n.idea/\n*.iml\n# ignore debugging files\n*.aux\n*.hp\n*.prof\n*.ps\n.bash_history\n.local\n.stack\nCLAUDE.md\n.claude/\nclaude-memory/\n.RData\n.Rhistory\npools/\nnexus\nout/\n"
  },
  {
    "path": "ChangeLog.md",
    "content": "0.81.0 [2026-94-22]\n-------------------\n * fix type inference for type families\n * extend intrinsic support for language-agnostic code\n * do shell-expansion of flagfile lines in morloc-manager\n * fix loss of pool stderr on crash (maybe?)\n\n0.80.1 [2026-04-21]\n-------------------\n * add `morloc-manager new --include <path>` src:dest syntax\n * make stderr/stdout mim principles more consistent\n\n0.80.0 [2026-04-20]\n-------------------\n * cleaner morloc-manager\n * fixed --print bug in nexus\n\n0.79.1-5 [2026-04-19]\n * lot's morloc-manager updates\n\n0.79.0 [2026-04-17]\n-------------------\n * add @datafile intrinsic for finding installed files\n * default to including all files when installing\n   - parse `.morlocignore` to find non-install files\n   - allow `include` in package.yaml for strict file inclusion\n * add `morloc-manage doctor` subcommand for health checks\n * add additional `freeze/unfreeze` validation\n * fix unfreezing bugs\n\n0.78.0 [2026-04-17]\n-------------------\n * clean up CLI usage statements\n * suppress \"null\" outpput in Unit-returning CLIs\n * fix string escape bugs\n * fix where parsing after do-block\n * fix many small morloc-manager issues\n * fix paths in installed morloc programs\n\n\n0.77.1 [2026-04-16]\n-------------------\n * resurrected `morloc-manager log` subcommand\n\n0.77.0 [2026-04-15]\n-------------------\n * many many deployment fixes\n\n0.76.0 [2026-04-13]\n-------------------\n * fix docstring groups\n * add module docstrings\n * fix bug in unfreeze\n\n0.75.0 [2026-04-12]\n-------------------\n * allow effectful final do-notation statements\n * lots of bug fixes\n * make changelog version source of truth\n\n0.74.0 [2026-04-06]\n-------------------\n * unify morloc-manager env, workspace, and version\n * allow docstrings before declarations\n * allow literal leading spaces in docstrings\n * allow escaped `<word>:` syntax\n\n0.73.0 [2026-04-01]\n-------------------\n * fix @load error on missing file\n * fix error reporting on failed `morloc-manager install`\n\n0.72.0 [2026-03-30]\n-------------------\n * fix unicode support in comments and literal strings\n * many fixes to the morloc-manager\n\n0.71.0-2 [2026-03-29]\n-------------------\n * port morloc-manager to rust\n\n0.70.1 to 0.70.6 [2026-03-29]\n-----------------------------\n * fix portability for libmorloc and morloc-nexus on darwin and linux-arm64\n * python flushing face condition\n\n0.70.0 [2026-03-29]\n-------------------\n * port libmorloc and morloc-nexus from C to Rust\n\n0.69.0 [2026-03-27]\n-------------------\n * port morloc-manager from shell to Haskell\n * share it as a static binary\n\n0.68.0 [2026-03-22]\n-------------------\n * remove explicit universal quntifiers\n    - before: `id a :: a -> a`\n    - now:    `id   :: a -> a`\n * add `morloc uninstall --all` option\n * do not require repeated `let` terms\n\n0.67.1 [2026-03-17]\n-------------------\n * add missing Nat kind annotation to root modules\n * better test coverage of Nat kinds\n * add `morloc uninstall --all` option\n * add `stdlib` module that simplifies recursive install of everything\n\n0.67.0 [2026-03-17]\n-------------------\n * add Arrow tables with zero-copy between language sharing\n * add tensors and\n * remove explicit qualifiers\n   - no more `id a :: a -> a`\n   - now `id :: a -> a`\n   - this does not require C++ be more explicit in their typing\n * inline when packet data is smaller than 65kb\n   - this avoids fragmenting the shared memory with many small single-use values\n\n0.66.0 [2026-03-12]\n-------------------\n * fix propagation of errors from crashed pools\n * fix shm memory alignment (inefficiency on x86, crashes on ARM)\n * add `--sanitize` option to `morloc init` for strict memory\n * replace megaparsec with parsec\n * fix label and remote handling\n\n0.65.0 [2026-03-10]\n-------------------\n * `morloc eval` for running anonymous morloc expressions\n * add %inline pragma to skip manifold gen\n * allow batch syntax for instances\n * add @show and @read intrinsics\n * add namespaces for morloc imports\n\n0.64.0 [2026-03-03]\n-------------------\n * replace thunks with more granular effect system\n * add intrinsics\n * disambiguate local module imports \n * change null to Null\n\n0.63.0 [2026-02-25]\n-------------------\n * add optionals with implicit coercion\n * for `?Str` with `literal: true`, force `default: null`\n * add daemon args to nexus\n\n0.62.0 [2026-02-24]\n-------------------\n * term recursion support\n * conditional guards\n * nexus pretty printing option (-p/--print)\n * print defaults in usage statements\n * more bug fixes\n\n0.61.0 [2026-02-21]\n-------------------\n\nBuild updates\n * make nexus a constant binary (not recreated and recompiled)\n * add install handling and portable scripts\n * add morloc daemon mode accessible through HTTP/TCP and sockets\n * add `morloc install --build` option for installing both executable modules\n * add `morloc uninstall`\n * add `morloc list` with -v option for listing types of all exports\n\nTypesystem updates\n * add let syntax that enforces execution order\n * class constraints (e.g., unique :: Eq a => [a] -> [a])\n * superclasses (e.g., class Semigroup => Monoid a where ...)\n * add effect system for delayed execution\n * add do-syntax for imperative programming with effects\n\nBetter errors and UI\n * cleaner error messages\n * error message localization\n * clean `morloc typecheck` output\n * add CLI subcommand tested grouping\n * Add shell TAB-completion\n * fix haddock for future hackage release\n\nBug fixes\n * cleaned up memory issues in all C code\n * replaced mcparallel in R with forked pool of workers\n * fix bug in `morloc install .`\n\nTesting\n * added stress test for zombies and memory issues\n * added daemon tests\n\nPerformance\n * Split the monolithic (~7000 line) morloc.h file\n * Use a libmorloc.so shared library rather than importing all as header\n * Removed all the zombie swarms that where killing heavy morloc projects\n * Remove mcparallel from R, move most of the pool to C, 4X interop speed\n * Compile and reuse a single nexus (reduce compile costs)\n\nLanguage onboarding\n * Added codegen IR that greatly simplifies new language addition\n * Factor all language-specific material out of main Haskell codebase\n * Move all grammar into template yaml specs\n * Created MessagePack bridge to bypass voidstar, making lang onboard almost\n   trivial (at a performance penalty).\n\nOther\n * Transitioned parser from recursive descent to LR1\n\n0.60.0 [2026-02-07]\n-------------------\n * add infix operator support\n\nTesting\n * Generate many new tests with Claude\n * Extensive testing of infix operators and module inheritance\n\nCleanup\n * add formatting with fourmulo\n * add metric and benchmarking\n\nBug fixes\n * Fix several memory issues in morloc.h and lang bindings\n * Fixed indexing bug in Generator.hs\n * Fix missing space in git module cp\n * Make missing folders in install path\n * Fix `morloc install` deletion of mod folder when install has trailing '/' \n * Disallow space between sign and number\n\nPerformance\n * Fixed exponential case hit on eta resolution\n * Fixed quadratic case in subtype algorithm\n\nClaude memory\n * Added claude memory files\n\n0.59.0 [2026-01-23]\n-------------------\n * allow functions in data structures\n * allow source functions to return functions\n * allow application of expressions that evaluate to functions\n * fix bug in C++ bool deserialization\n\n0.58.3 [2026-01-03]\n-------------------\n * fix record docstring inheritance\n\n0.58.2 [2025-12-29]\n-------------------\n * fix minor bug optional versus positional\n\n0.58.1 [2025-12-28]\n-------------------\n * fix minor bug in record unrolling\n\n0.58.0 [2025-12-28]\n-------------------\n * add hex, octal, and binary numeric representations\n * new record syntax\n * generate CLI from docstrings\n     * resolve records into groups of arguments\n     * allow literal strings without the extra JSON quoting\n * change to Apache-2.0\n\n0.57.1 [2025-11-12]\n-------------------\n * minor bug fixes\n\n0.57.0 [2025-11-11]\n-------------------\n * re-allow underscores in variable names\n * add placeholder syntax / lambda lifting from holes\n * multi-line strings\n * string interpolation\n * getter patterns\n * setter patterns\n * write full Morloc nexus evaluator\n * Fix bug in local installs with \".\"\n\n0.56.0 [2025-10-08]\n-------------------\n\nNew file organization\n * Change file layout to conform to XDG spec; \n   replaced `~/.morloc` with `~/.local/share/morloc`\n * Move standard library to `$MORLOC_HOME/src/morloc/plane/default/morloclib`\n   * src/ - stores any source code morloc needs, not just modules \n   * morloc/ - stores morloc modules\n   * plane/ - stores morloc modules that are part of a plane\n   * default/ - the current default plane\n   * morloclib/ - the org name for the core modules in the default plane\n\nNew `morloc install` subcommand functionality\n * Multiple imports may be chained on one command\n * Source and ref can be included per install, for example:\n   * `root` - install a core module\n   * `root@hash:abcdef1234` - specific core instance\n   * `root@tag:v1.0.0` - specific tag/version\n   * `codeberg:weena/calendar@hash:abcdef1234` - 3rd party with source and ref\n * Support for install of local modules\n   \n\n0.55.1 [2025-09-29]\n-------------------\n\n * Bug fixes\n\n0.55.0 [2025-09-27]\n-------------------\n\n * Allow general types to be declared and imported/exported\n * Allow undirected dependencies\n * Allow dashes in module names\n * Fix many bugs in native Morloc code handling\n * Fix bug in certain higher order foreign functions\n * Simplify internal unique naming conventions\n * Move all tests to use the new root module (rather than base)\n * Slightly improved error messages\n\nBreaking changes:\n * Move to reverse tree model of dependency handling\n * Allow exactly one type signature for each term and class\n * Require explicit typeclass import/export\n * Fix handling of Python builtin imports\n    Now builtins must not be imported from Python source. Instead\n    import directly from Python, for example: `source Py (\"len\")`\n * More efficient Haskell Map usage\n\n0.54.2 [2025-08-09]\n-------------------\n\n * Enforce lowercase module name rule\n * Fix handling of executable file name option\n * Fix record handling\n\n0.54.1 [2025-07-26]\n-------------------\n\n * Fix pickle bug in Python multiprocessing\n * Replace asprintf non-standard C function \n * Partially fix interop for C structs\n * Fix bug in unqualified imports\n\n0.54.0 [2025-07-13]\n-------------------\n\n * Add full MessagePack and VoidStar IO to nexus\n * Fixed type pretty printing in usage and typechecking output\n * Add nexus option and usage info\n * Add support for one-line command docstrings\n\n0.53.7 [2025-05-31]\n-------------------\n\n * Fix bug in parameterized type definition concrete type inference\n\n0.53.6 [2025-05-31]\n-------------------\n\n * Improve container instructions in README\n * Fix all README examples\n * Make Dockerfiles more portable\n * Fix bug in implicit typeclass instance inheritance\n\n0.53.5 [2025-05-12]\n-------------------\n\n * Fixed unnecessary copying in C libs\n * Fixed double let-binding in code gen\n\n0.53.4 [2025-05-08]\n-------------------\n\n * Replace C daemon forking with thread pooling (4X speedup)\n * Fix bugs related to 0-length array memory allocation\n\n0.53.3 [2025-05-06]\n-------------------\n\n * Fix handling of empty vectors\n\n0.53.2 [2025-05-06]\n-------------------\n\n * Fix bug in JSON parsing\n\n0.53.1 [2025-05-06]\n-------------------\n\n * Update containers\n * Update github actions\n * Fix bug in type scoping (sort of)\n\n0.53.0 [2025-05-04]\n-------------------\n\nUnify backend under shared C library\n\n * Replace python nexus with C nexus\n * Move packets to little-endian format\n * Move all packet handling, binary protocol specs, socket handling, JSON\n   parsing from pool code into the shared morloc.h library\n * Replace R `future` parallelism scheme with builtin `parallel` library\n\nAdd remote worker and resource management support\n\n * Add conventions for specifying caching and evaluation strategy\n * Add xxhash hashing for caching\n * Add remote handling with experimental slurm support\n\nOther\n\n * `morloc` commands now return proper exit codes\n\n0.52.1 [2025-02-16]\n-------------------\n * Add python native bytes and bytearray support\n\n0.52.0 [2025-02-09]\n-------------------\n\nType evaluation and specialization\n * Delayed general alias evaluation\n * Add strict numeric types (`int8`, `uint8`, `int16` etc)\n * Allow concrete alias specialization\n * Add type hints allowing concrete type conservation across foreign calls\n\nBug fixes\n * Fix bug in opening existing shared memory volumes\n\nLanguage support\n  * Add C++ support for std template list-like types (`list`, `forward_list`,\n     `deque`, `stack`, and `set` (I know, they're a tad different)\n  * Add Python support for `numpy` vectors and arrays\n  * Allow raw R vectors to be interpreted as strings\n  * Nexus no longer creates python objects from returned data, instead writes\n    results directly through C library (`morloc.h`)\n\n0.51.1 [2024-12-04]\n-------------------\n\n * Do not catch STDOUT and STDERR\n * Fix NULL return errors\n * Fix container setup\n\n0.51.0 [2024-12-02]\n------------------\n\nShared memory\n\n * Allow processes to communicate through shared memory\n\nSetup\n\n * `morloc init` command will now build the `morloc` ecosystem, writing all\n   required headers to `morloc` home and compiling any required shared libraries.\n\nOther\n\n * Fix cases where morloc stalls when errors are transmitted across languages\n * Moved demos to the dedicated example repo\n\n\n0.50.0 [2024-11-08]\n-------------------\n\nBetter installation\n\n * `morloc install --commit 45d8df12` - for github retrieval by commit hash\n * `morloc install --branch dev` - to retrieve latest from specific branch\n * `morloc install --tag 0.1.0` - to retrieve specific tag\n\nBetter containers\n * Use podman rather than docker in Makefile\n * `morloc-tiny:<version>` - everything needed to compile morloc projects\n * `morloc-full:<version>` - an environment for running projects\n\n\n0.49.0 [2024-11-04]\n-------------------\n\nNew backend with better performance\n\n * Mediate inter-process communication with UNIX domain sockets\n * Transmit data with MessagePack rather than JSON\n * Added a benchmarking suite\n * Added `morloc init` command to configure morloc ecosystem\n\n0.48.0 [2024-05-10]\n-------------------\n\nSecond PeerJ submission (after return by editor for technical reasons)\n\n * Made type parameters explicit as type arguments:\n   `id a :: a -> a`\n    rather than either of\n   `id :: a -> a`\n   `id :: forall a . a -> a`.\n * Pass-by-reference in generated C++ code\n * Simplify generated C++ code by using function template arguments rather\n   than type casting.\n\n\n0.47.2 [2024-04-28]\n-------------------\n\n * made the build static\n * fixed the dockerfile\n * exported the vim-syntax file out to its own repo\n * added a --version option\n * upgraded to LTS 22.18\n\n0.47.2 [2024-04-28]\n-------------------\n\nFirst PeerJ submission\n\n0.47.0 [2024.03.10]\n-------------------\n * Add value checker\n * Raise error when implementations have equal score (no arbitrary choice)\n\n0.46.0 [2024.03.06]\n-------------------\n\n * Fix precedence of '@' operator - it binds tightest\n * Update flu demo\n * Fix github actions\n * Fix Dockerfile\n * Address all compiler warnings\n\n0.45.0 [2024.02.14]\n-------------------\n\n * Allow explicit import of polymorphic terms\n * Fix infinite loop bug when two many arguments are applied to a function\n * Synchronise tests with new core libraries type classes\n\n0.44.0 [2024.02.08]\n-------------------\n\nAdd support for ad hoc polymorphism.\n  * Support sources and declarations in classes \n  * Support multiple parameters\n  * Support overlapping instances\n  * Packers are now implemented through the `Packable` typeclass\n\nSome missing features:\n  * No support typeclass constraints in the type signatures.\n  * No support for parameterized class variables\n  * No support for polymorphic recursion (does anyone want that?)\n\n0.43.0 [2024.01.14]\n-------------------\n\nNew features\n * Allow a module to explicitly export packers\n * Show pool pseudocode for `typecheck -r` \n * Add `typecheck dump` subcommand to show expressions and indices\n * Allow nexus inputs to be files\n * Remove concrete type signatures - always infer\n * Make fields in language-specific table decs optional\n     Rather than this:\n       table (Person a) = Person {name :: Str, info :: a}\n       table R (Person a) = \"data.frame\" {name :: Str, info :: a}\n     Allow this:\n       table (Person a) = Person {name :: Str, info :: a}\n       table R (Person a) = \"data.frame\"\n     Really, I need to totally redo the table/record/object handling.\n * Remove support for anonymous records in type signatures\n     I will re-add this possibly at a future time when I refactor\n\nInfrastructure changes\n * Pass all data between pools as files rather than string arguments\n * Raise an error if any general type cannot be translated to a concrete type\n\nFixes\n * Fix record type inference \n * Fix bug in collecting packers (missed packers required by root manifold)\n * Fix C++ handling of quotes and special characters in JSON strings\n\n0.42.0 [2023.10.11]\n-------------------\n\n * Infer concrete types from inferred general types when necessary and possible\n * More informative error messages \n * Fix template resolution in generated C++ code\n * Fix include name shadowing conflict in generated C++ code\n * Partially fix naming conflict in Python and R pools caused by use of \"f\". My\n   solution was name mangling, though we need a more permanent solution to our\n   problem.\n * Let user write correct \"list\" R types for lists, tuples, and records\n * Fix bug in code generation of lets\n\n## Internal\n * For `ForeignInterfaceM` constructor of ExprM, store the full call type, not\n   just the return type\n * Parameterize `ExprM` with the type type (e.g., `Maybe TypeP` or `TypeM`)\n * Replace `Argument` and `PreArgument` with a parameterized `Arg` type.\n * Fix broken `ForeignInterfaceM` and `PoolCallM` cases in `typeOfExprM`\n * Refactor backend\n\n Testing and documentation:\n * Complete flu demo - it builds now, just need to implement the library code\n   for align and other functions.\n\n\n0.41.1 [2023.05.26]\n-------------------\n\n * Print nothing if \"null\" is returned\n * Fix the import of working directory modules\n * Resolve bug in occur check\n * Streamline github actions script\n * Fix `morloc install` path download for core modules\n * Raise type error for untyped concrete term\n * Fix bug in concrete type synthesis\n\n0.41.0 [2023.04.16]\n-------------------\n\nLanguage updates\n * Add check to avoid infinite typechecker recursion\n * Synthesize concrete types from general types when possible\n * Improve exports\n   * Move exports to module list (as in Haskell)\n   * Add `*` wildcard to export every top-level named term or type\n   * Raise an error if a non-existing term is exported from a module\n * Allow concrete (un)packers to vary in number of generic terms\n * Set functions with empty inputs (e.g., `()`) to have empty lists of arguments\n * Replace the `Null` literal term with `()`\n\n\nPackage updates\n * Default to c++17, rather than c++11\n * Restructure library to avoid name conflicts with pre-existing packages\n * Replace Perl nexus with Python nexus and remove Perl dependencies\n\nBetter error messages and logs\n * Resolve \"shit output\" error message (map index to export name)\n * Tidy up the verbose diagnostics\n * Print general and concrete types for typecheck with -r option\n * Add multiple levels of verbosity (quiet by default)\n\nBug fixes\n * Typechecking bug in record access\n * Fix bug allowing undefined things in module export lists\n * Fix cousin module imports\n * Fix unparameterized (un)packer serialization\n * Fix error raised when a module exporting a type is compiled \n * Fix out of order record fields in nexus output\n\n0.40.0 [2023.02.04]\n-------------------\n\n * Infer types of records\n * Fix bug in concrete type inference\n * Fix bugs in foreign higher order function code generation\n * Simplify generator code\n * Add many tests\n\n0.39.0 [2023.01.03]\n-------------------\n\n * Add compose operator\n * Allow eta-reduction\n\n0.38.0 [2022.12.23]\n-------------------\n\n * Choose casing convention\n   * camel case for terms (for now, underscore is illegal)\n   * pascal case for types\n * Fix sub-module handling\n * Fix import/export of type definitions\n * Better error messages for import/export errors\n * Somewhat formatted `typecheck` subcommand output \n * Add option to typecheck backend (concrete types and language selection)\n\n0.37.0 [2022.12.11]\n-------------------\n\n * Remove extra space printed at the end of R JSON\n * Clarify error message for missing exports\n * Clarify error message for missing concrete signature\n * Fix exponential time bug in parser\n * Allow prime characters in names after first position\n * Allow '.' to separate namespaces in imports/modules\n * Fix infinite loop bug when module name != import name\n\n0.36.0 [2022.02.17]\n-------------------\n\n * Separate reals from integers\n * Remove global haskell extensions from package.yaml\n\n0.36.0 [2022.02.17]\n-------------------\n\n * Separate reals from integers\n * Remove global haskell extensions from package.yaml\n\n0.35.0 [2021.10.24]\n-------------------\n\nWhere scoping and a total architecture refactor\n\n * Fix handling for generic parameterized types\n * Improve whitespace handling\n * Statements are order invariant\n * Thread link from expression to source expression down to generators \n * Typecheck over final abstract syntax trees rather than expressions\n * Separate general and concrete typechecking\n * Pretty instances for all data types\n * More testing\n * Agonized over deep and wide structures\n\n0.34.0 [2021.03.05]\n-------------------\n\n * Add handling for C++ float primitives\n * Let C++ programs be imported through a header and shared library\n * Remove semicolon requirement\n * Add hie.yaml for compatibility with hsl\n * Update dependency set to LTS-17.4\n * Add subparsers to CLI with pcapriotti/optparse-applicative \n * Remove brace requirement for modules and `where` statements\n * Add `-o` option to compiler to specify output executable names\n * Acceptable syntax error messages\n\n0.33.0 [2020.11.03]\n-------------------\n\nFirst hackage release\n\n * Haddock documentation\n * Update README\n * In help statements write universal, not concrete, types\n * Make default containers non-existential (probably a bad decision?)\n\n0.32.0 [2020.11.01]\n-------------------\n\n * Add record/table field access\n * Fix JSON handling in nexus\n * Fix nexus bug necessitated escaping quotations and braces\n * Print general types in nexus help\n * Resolve most GHC warnings\n\n0.31.0 [2020.10.29]\n-------------------\n\n * Fix anonymous records in C++\n * Distinguish 'record', 'object', and 'table'\n * Add object handling\n * Add table handling\n\n0.30.0 [2020.10.23]\n-------------------\n\n * Add `object` keyword for defining record types\n * Add full record serialization handling (C++, py, R)\n\n0.29.0 [2020.10.21]\n-------------------\n\n * Add AST directed (de)serialization framework\n * Add type constructors for parameterized types\n\n0.28.0 [2020.10.12]\n-------------------\n\n * Allow import/export of type aliases\n * Refactor with DAGs all through the parser and typechecker\n\n0.27.0 [2020.10.04]\n-------------------\n\n * Add systematic tests for data serialization\n * Fix bug in C++ serialization\n * Move to serialize to dedicated libraries that require no import\n\n0.26.0 [2020.09.27]\n-------------------\n\nAdd `type` keyword for defining type aliases\n\n0.25.0 [2020.09.26]\n-------------------\n\nNo explicit forall. Instead use Haskell convention of generics being lowercase\nand non-generics being uppercase. \n\n * no more explicit \"forall\"\n * generics are lowercase in type signatures\n * non-generic types are uppercase\n * normal functions are lowercase\n * class constructors are uppercase (though handling for this is not yet implemented)\n\n0.24.0 [2020.09.22]\n-------------------\n\nAllow integration of many instances\n\n0.23.0 [2020.05.14]\n\nBug fixes and code cleanup\n\nBug fixes / tests\n - [x] [x] github issue #7 - new Var=> typechecking rule\n - [x] [x] github issue #9 - rewire container type inference\n - [x] [x] github issue #10\n - [x] [x] github issue #11\n\n\n0.22.0 [2020.04.28]\n-------------------\n\nImplement a schema-directed composable serialization system\n\nMajor changes\n * Fully composable serialization over containers and primitives\n * Improved C++ support of generic functions\n * Record support for R and Python3 (not C++ yet)\n * Refactor generator - replace old grammar system\n * Allow arguments to be passed to general functions\n   (e.g., `foo x = [x]`, where no specific language is needed) \n\nMinor changes\n * change default python3 interpreter from \"python\" to \"python3\"\n * add default library and tmp paths to config handler\n * test composable serialization functions in all supported languages\n * allow wrapped comments in R\n\nTesting - grammar directed testing\n * test record handling\n * remove and replace out-of-date golden tests\n * systematic argument handling tests\n * systematic manifold form tests\n * systematic interop testing\n\n0.21.0 [2020.03.31]\n-------------------\n\nMajor - add handling and test for many many corner cases\n * Allow export of data statements\n * Allow export of non-functions\n * Allow functions with containers at the root\n * Allow export of 0-argument functions \n\nMinor\n * proof-of-concept composable serialization functions in C++ (cppbase)\n * add python tests\n * make the test output look pretty (no weird whitespace)\n\n0.20.0 [2020.03.23]\n-------------------\n\n * Add composable default types\n\n0.19.1 [2020.02.22]\n-------------------\n\n * bug fixes\n\n0.19.0 [2020.02.20]\n-------------------\n\nMajor changes\n * Allow currying\n * Add realization optimizations\n * Refactor generator into series of clear transformations\n * Added handling for dealing with ambiguous ASTs\n\nMinor bug fixes and updates\n * Prettier code generation for C++, Python and R\n * More detailed comments in generated code\n * Allow tags on parenthesized types\n * Fix bug in functions with multiple parameters \n * Fix bug preventing loading of package metadata \n\n0.18.1 [2019.11.08]\n-------------------\n\n * Fix travis\n * Use C++11 for C++ builds\n * Make .morloc/config optional\n * Fix bug in parsing unit type: `()`\n\n0.18.0 [2019.11.04]\n-------------------\n\n * Add bidirectional type system\n * Allow parameterized concrete types\n * Allow higher-order functions\n * Allow properties to contain multiple terms \n * Add many tests\n * Add module system\n * Allow non-primitive types in lists, tuples, and records\n * Removed arq and SPARQL dependency (very fast compilation)\n\n0.17.4 [2019.06.29]\n-------------------\n\n * Add C and C++ handling\n * Define Ord intance for MTypeMeta\n * Allow pools to be called as executables\n * Add type handling to generators\n * Remove redundant SPARQL queries (better performance)\n * New RDF list semantics\n * Use strings to represent concrete types (e.g. \"char\\*\")\n * Write pretty-printed diagnostic files to `$MORLOC_HOME/tmp` \n * Handling for multiple concrete type signatures (e.g., definition of\n   a function in multiple languages).\n * Handling for multiple abstract type signatures\n * Handling for multiple function declarations\n\n0.17.3 [2019.06.14]\n-------------------\n\n * Partial C support\n   - execution of sourced functions\n   - no composition\n   - no foreign calls\n\n * Partial transition to typed generators\n   - bound arguments are still not typed correctly\n\n * Use integer IDs to identify manifolds in pools and the nexus (can to make\n   calls between them) instead of long, mangled names.\n\n * Replace string names of languages (e.g., \"python\") with a sum type.\n\n0.17.2 [2019.05.05]\n-------------------\n\n  Pycon release\n\n0.17.1 [2019.04.26]\n-------------------\n\n * Fix output serialization in generate code\n * Fix module linking in generated code\n\n0.17.0 [2019.04.16]\n-------------------\n\n * Add morloc home\n * Load modules from `$MORLOCHOME/lib`\n * Create monad stack\n\n0.16.2 [2018.03.05]\n-------------------\n\n * Add Zenodo badge making the project citable\n * Move to `morloc-project/morloc` github repo\n\n0.16.1 [2018.09.24]\n-------------------\n\nMinor release consisting of internal refactoring\n\n * Pruned unnecessary code\n * Pruned unnecessary imports\n * Compliance with stricter compile flags\n\n0.16.0 [2018.09.14]\n-------------------\n\n * Write RDF bools in lowercase (\"true\", rather than \"True\"), as per specs\n * Stricter node typing (replace ad hoc names with elements from an ADT)\n * Add very rudimentary typechecking\n * Remove SPARQL server dependency (now there's a sluggish Jena dependency)\n\n0.15.1 [2018.09.10]\n-------------------\n\n * Add error handling and reporting to pools\n * Add type signature comments to generated pools \n * Richer internal data structures\n\n0.15.0 [2018.09.05]\n-------------------\n\n * Generalize code generators using grammar records\n * Add Python compatibility\n * Replace unit tests with golden tests\n * Use docopt and USAGE template for argument handling\n * Report number of arguments in nexus usage statements\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n"
  },
  {
    "path": "README.md",
    "content": "<p align=\"center\">\n  <a href=\"https://github.com/morloc-project/morloc/actions/workflows/.test.yml\">\n    <img src=\"https://github.com/morloc-project/morloc/actions/workflows/.test.yml/badge.svg\" alt=\"build status\">\n  </a>\n  <a href=\"https://github.com/morloc-project/morloc/releases\">\n    <img src=\"https://img.shields.io/github/release/morloc-project/morloc.svg?label=current+release\" alt=\"github release\">\n  </a>\n  <a href=\"https://www.apache.org/licenses/LICENSE-2.0\">\n      <img src=\"https://img.shields.io/badge/License-Apache%202.0-blue.svg\" alt=\"license: Apache 2.0\">\n</a>\n</p>\n\n<p align=\"center\">\n  <a href=\"https://morloc-project.github.io/docs\">Manual</a> |\n  <a href=\"https://discord.gg/dyhKd9sJfF\">Discord</a> |\n  <a href=\"https://peerj.com/articles/cs-3435/\">Paper</a> |\n  <a href=\"https://x.com/morlocproject\">X</a> |\n  <a href=\"https://bsky.app/profile/morloc-project.bsky.social\">BlueSky</a> |\n  <a href= \"mailto:z@morloc.io\">Email</a>\n</p>\n\n\n\n<div align=\"center\">\n<h1>Morloc</h1>\n<h2>compose functions across languages under a common type system</h2>\n</div>\n\n**Why use Morloc?**\n\n * Universal function composition: Import functions from multiple languages and\n   compose them together under a unified, strongly-typed functional framework.\n\n * Polyglot without boilerplate: Use the best language for each task with no\n   manual bindings or interop code.\n\n * Type-directed CLI generation: Write concrete function signatures once and\n   automatically generate elegant command-line interfaces with argument\n   parsing, validation, help text, and documentation.\n\n * Composable CLI tools: Morloc CLI programs can be composed by simply importing\n   them into a new Morloc module and re-exporting their functions.\n\n * Seamless benchmarking and testing: Swap implementations and run the same\n   benchmarks/tests across languages with consistent type signatures and data\n   representation.\n\n * Design universal libraries: Build abstract, type-driven libraries and\n   populate them with foreign language implementations, enabling rigorous code\n   organization and reuse.\n\n * Smarter workflows: Replace brittle application/file-based pipelines with\n   faster, more maintainable pipelines made from functions acting on structured\n   data.\n\n\nBelow is a simple example, for installation details and more examples, see the\n[Manual](https://morloc-project.github.io/docs).\n\nA Morloc module can import functions from foreign languages, assign them general\ntypes, and compose new functions:\n\n\n```morloc\n-- Morloc code, in \"main.loc\"\nmodule m (vsum)\n\nimport root-py\nimport root-cpp\n\nsource Py from \"foo.py\" (\"pmap\")\npmap a b :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"sum\")\nsum :: [Real] -> Real\n\n--' Input numeric vectors that will be summed in parallel\n--' metavar: VECTORS\ntype Vectors = [[Real]]\n\n--' Sum a list of numeric vectors\n--' return: Final sum of all elements in all vectors\nvsum :: Vectors -> Real\nvsum = sum . pmap sum \n```\n\nThe imported code is natural code with no Morloc-specific dependencies.\n\nBelow is the C++ code that defines `sum` as a function of a standard C++ vector\nof `double`s that returns a `double`:\n\n```C++\n// C++ code, in \"foo.hpp\"\n\n#pragma once\n\n#include <vector>\n#include <numeric>\n\ndouble sum(std::vector<double> xs) {\n    return std::accumulate(\n       xs.begin(), xs.end(), 0.0);\n}\n```\n\nBelow is Python code that defines a parallel map function:\n\n```python\n# Python code, in \"foo.py\"\n\nimport multiprocessing as mp\n\n# Parallel map function\ndef pmap(f, xs):\n    with mp.Pool() as pool:\n        results = pool.map(f, xs)\n    return results\n```\n\nThis program can be compiled and run as below:\n\n```\n$ menv morloc make main.loc\n\n$ menv ./nexus vsum -h\nUsage: ./nexus vsum VECTORS\n\nSum a list of numeric vectors\n\nPositional arguments:\n  VECTORS  Input numeric vectors that will be summed in parallel\n           type: [[Real]]\n\nReturn: Real\n  Final sum of all elements in all vectors\n\n$ menv ./nexus vsum [[1.2],[0,0.1]]\n1.3\n```\n"
  },
  {
    "path": "bench/Bench.hs",
    "content": "{-|\nModule      : Bench\nDescription : Benchmark suite for morloc compiler\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nThis benchmark suite tracks performance of key compiler components:\n- Parser: parsing .loc source files\n- Type checker: type inference and checking\n- Code generator: nexus and pool generation\n\nRun with: stack bench\nRun with options: stack bench --benchmark-arguments '--csv bench-results.csv'\n-}\n\nmodule Main (main) where\n\nimport Test.Tasty.Bench\nimport qualified Data.Text as T\nimport System.FilePath ((</>))\nimport qualified System.Directory as SD\n\nimport Morloc (typecheckFrontend, typecheck)\nimport Morloc.Namespace.Prim (Code(..), Defaultable(..))\nimport Morloc.Namespace.State (Config(..), MorlocMonad, MorlocError)\nimport qualified Morloc.Monad as MM\n\n-- | Helper to read benchmark test files\nreadTestFile :: FilePath -> IO Code\nreadTestFile name = do\n  let path = \"bench\" </> \"test-data\" </> name\n  Code . T.pack <$> readFile path\n\n-- | Create a minimal config for benchmarking\nemptyConfig :: IO Config\nemptyConfig = do\n  home <- SD.getHomeDirectory\n  return $ Config\n    { configHome        = home <> \"/.local/share/morloc\"\n    , configLibrary     = home <> \"/.local/share/src/morloc\"\n    , configPlane       = \"default\"\n    , configPlaneCore   = \"morloclib\"\n    , configTmpDir      = home <> \"/.morloc/tmp\"\n    , configBuildConfig = home <> \"/.morloc/.build-config.yaml\"\n    , configLangOverrides = mempty\n    }\n\n-- | Run a MorlocMonad action for benchmarking\nrunBench :: MorlocMonad a -> IO (Either MorlocError a)\nrunBench action = do\n  config <- emptyConfig\n  ((result, _), _) <- MM.runMorlocMonad Nothing 0 config defaultValue action\n  return result\n\nmain :: IO ()\nmain = defaultMain\n  [ bgroup \"Parser\"\n    [ bench \"parse-simple\" $ whnfIO $ do\n        code <- readTestFile \"simple.loc\"\n        runBench (typecheckFrontend Nothing code)\n\n    , bench \"parse-interop\" $ whnfIO $ do\n        code <- readTestFile \"interop.loc\"\n        runBench (typecheckFrontend Nothing code)\n\n    , bench \"parse-complex-types\" $ whnfIO $ do\n        code <- readTestFile \"complex-types.loc\"\n        runBench (typecheckFrontend Nothing code)\n    ]\n\n  , bgroup \"Type Checker\"\n    [ bench \"typecheck-simple\" $ whnfIO $ do\n        code <- readTestFile \"simple.loc\"\n        runBench (typecheck Nothing code)\n\n    , bench \"typecheck-interop\" $ whnfIO $ do\n        code <- readTestFile \"interop.loc\"\n        runBench (typecheck Nothing code)\n\n    , bench \"typecheck-complex-types\" $ whnfIO $ do\n        code <- readTestFile \"complex-types.loc\"\n        runBench (typecheck Nothing code)\n    ]\n\n  -- Note: Code generation benchmarks commented out as they require\n  -- file system access and module initialization\n  -- Uncomment after setting up appropriate test environment\n  {-\n  , bgroup \"Code Generation\"\n    [ bench \"generate-simple\" $ whnfIO $ do\n        code <- readTestFile \"simple.loc\"\n        runBench (writeProgram Nothing code)\n    ]\n  -}\n  ]\n"
  },
  {
    "path": "bench/test-data/complex-types.loc",
    "content": "module complex (processRecords)\n\ntype Record = {id :: Int, name :: Str, values :: [Real]}\n\nprocessRecords :: [Record] -> [Int]\nprocessRecords rs = map (\\r -> r.id) rs\n"
  },
  {
    "path": "bench/test-data/interop.loc",
    "content": "module interop (processList)\n\nimport math (sqrt from python)\n\nprocessList :: [Real] -> [Real]\nprocessList xs = map sqrt xs\n"
  },
  {
    "path": "bench/test-data/simple.loc",
    "content": "module simple (add, mul)\n\nadd :: Int -> Int -> Int\nadd x y = x + y\n\nmul :: Int -> Int -> Int\nmul x y = x * y\n"
  },
  {
    "path": "container/Makefile",
    "content": "# Pushing to the github registry requires a personal token with package\n# permissions. Login is required, for example:\n#\n#   $ echo $GITHUB_TOKEN | podman login ghcr.io -u morloc-project --password-stdin\n#\n# I am currently using the \"classic\" token (ghp_*), not the fine-grained\n# token. These need to be refreshed every few months\n#\n# You need to provide the desired morloc version as an environmental variable,\n# so run the make commands like so:\n#\n# $ make MORLOC_VERSION=0.70.0 build-tiny\n\ndefine HEREDOC\n#!/bin/bash\nmkdir -p ~/.morloc\npodman run --rm \\\n  -e HOME=$$HOME \\\n  -v $$HOME/.morloc:$$HOME/.morloc \\\n  -v $$PWD:$$HOME \\\n  -w $$HOME ghcr.io/morloc-project/morloc/morloc-tiny:edge \\\n  morloc \"$$@\"\nendef\nexport HEREDOC\n\ninstall:\n\t# Pull the exact version to ensure it exists locally\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-tiny:edge\n\t@mkdir -p ${HOME}/bin\n\t@echo \"$$HEREDOC\" > ${HOME}/bin/morloc-edge\n\tchmod 755 ${HOME}/bin/morloc-edge\n\n\n# Build a container that just has the morloc compiler.\n# Context is the repo root (..) so COPY gets the local source tree.\nbuild-tiny:\n\tpodman build --no-cache --force-rm \\\n\t\t-t ghcr.io/morloc-project/morloc/morloc-tiny:$(MORLOC_VERSION) \\\n\t\t-t ghcr.io/morloc-project/morloc/morloc-tiny:edge \\\n\t\t-f tiny/Dockerfile ..\n\n# Build the full interactive development image.\n# Copies the morloc binary from the locally-built morloc-tiny image.\n# Run build-tiny first.\nbuild-full:\n\tpodman build --no-cache --force-rm \\\n\t\t--build-arg MORLOC_VERSION=$(MORLOC_VERSION) \\\n\t\t-t ghcr.io/morloc-project/morloc/morloc-full:$(MORLOC_VERSION) \\\n\t\t-t ghcr.io/morloc-project/morloc/morloc-full:edge \\\n\t\tfull\n\n# Build the required docker image\nbuild-test:\n\tpodman build --no-cache --force-rm -t ghcr.io/morloc-project/morloc/morloc-test test\n\tpodman tag ghcr.io/morloc-project/morloc/morloc-test ghcr.io/morloc-project/morloc/morloc-test:edge\n\nshell:\n\tpodman run --shm-size=4g --rm -it ghcr.io/morloc-project/morloc/morloc-full:edge /bin/bash\n\nshell-tiny:\n\tpodman run --shm-size=4g --rm -it ghcr.io/morloc-project/morloc/morloc-tiny:edge /bin/bash\n\nshell-test:\n\tpodman run --shm-size=4g --rm -it ghcr.io/morloc-project/morloc/morloc-test /bin/bash\n\n# push local containers to the github registry\n# Ensures edge moves to the same digest as $(MORLOC_VERSION)\npush:\n\t# Push tiny version and edge\n\tpodman push ghcr.io/morloc-project/morloc/morloc-tiny:$(MORLOC_VERSION)\n\tpodman push ghcr.io/morloc-project/morloc/morloc-tiny:edge\n\t# Push full version and edge\n\tpodman push ghcr.io/morloc-project/morloc/morloc-full:$(MORLOC_VERSION)\n\tpodman push ghcr.io/morloc-project/morloc/morloc-full:edge\n\t# Push test (no moving tag)\n\tpodman push ghcr.io/morloc-project/morloc/morloc-test:edge\n\n# retrieve the latest morloc builds from the github registry\npull:\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-tiny:$(MORLOC_VERSION)\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-full:$(MORLOC_VERSION)\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-test\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-test:edge\n\n# Convenience: pull moving edge tags\npull-edge:\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-tiny:edge\n\tpodman pull ghcr.io/morloc-project/morloc/morloc-full:edge\n\n# Cleanup of podman images may be done as follows\n# $ podman image prune\n# $ podman container prune\n# $ podman image rm -f $(podman image ls -q)\n"
  },
  {
    "path": "container/full/Dockerfile",
    "content": "# Copy the morloc binary from the matching tiny image\nARG MORLOC_VERSION=edge\nFROM ghcr.io/morloc-project/morloc/morloc-tiny:${MORLOC_VERSION} AS morloc-bin\n\nFROM docker.io/library/ubuntu:24.04\n\nLABEL org.opencontainers.image.source=https://github.com/morloc-project/morloc\nLABEL org.opencontainers.image.description=\"A morloc container intended for interactive shell use\"\nLABEL org.opencontainers.image.licenses=Apache-2.0\n\nCOPY --from=morloc-bin /bin/morloc /bin/morloc\nCOPY --from=morloc-bin /opt/morloc-rust-bin /opt/morloc-rust-bin\n\nWORKDIR $HOME\n\nENV DEBIAN_FRONTEND=noninteractive\nENV PATH=\"/root/.local/bin:/root/.local/share/morloc/bin:${PATH}\"\n# Pre-built Rust binaries: morloc init uses these directly (no cargo needed)\nENV MORLOC_RUST_BIN=/opt/morloc-rust-bin\n\nRUN apt-get update\n\n# Set the timezone, this avoids hanging later on\nRUN TZ=Antarctica/Troll apt-get -y install tzdata\n\nRUN apt-get install -y r-base python3 python3-dev python3-pip python3-numpy libgsl-dev git curl\n\nRUN python3 -m pip install --break-system-packages --upgrade setuptools\n\n# Setup the morloc home (uses pre-built Rust binaries, no cargo needed)\nRUN morloc init\n# Copy morloc-nexus to a system path so it remains on PATH even when\n# morloc-manager bind-mounts over ~/.local/bin with a host directory\nRUN cp /root/.local/share/morloc/bin/morloc-nexus /usr/local/bin/morloc-nexus\nRUN echo \"lang_python3 : python3\" >> $HOME/.local/share/morloc/config\n\nRUN morloc install stdlib\n\nRUN apt-get install -y vim\n\n# Copy over custom vimrc\nCOPY assets/vimrc /root/.vimrc\nCOPY assets/README /root/README\n\n# Set up vim highlighting for morloc\nRUN git clone https://github.com/morloc-project/vimmorloc \\\n  && mkdir -p ~/.vim/syntax/ \\\n  && mkdir -p ~/.vim/ftdetect/ \\\n  && cp vimmorloc/loc.vim ~/.vim/syntax/ \\\n  && echo 'au BufRead,BufNewFile *.loc set filetype=loc' > ~/.vim/ftdetect/loc.vim \\\n  && rm -rf vimmorloc\n\nRUN git clone https://github.com/morloc-project/morloc ~/morloc\n\nCOPY assets/bashrc /etc/bash.bashrc\n\n# Cleanup to reduce image size\nRUN apt-get clean && rm -rf /var/lib/apt/lists/*\n"
  },
  {
    "path": "container/full/assets/README",
    "content": "This container has a full morloc installation and language support for R, Python3, and C++.\n\nThe ~/tests folder contains a set of tests from the morloc test suite. These may\nserve as simplistic examples of morloc scripts. Though they are designed for\ntesting the language not pedagogy.\n"
  },
  {
    "path": "container/full/assets/bashrc",
    "content": "# basic morloc bashrc\n\n# If not running interactively, don't do anything\ncase $- in\n    *i*) ;;\n      *) return;;\nesac\n\n# History Configuration\nHISTCONTROL=ignoreboth:erasedups\nHISTSIZE=10000\nHISTFILESIZE=20000\nshopt -s histappend\n\n# Shell Options\nshopt -s checkwinsize\nshopt -s globstar 2>/dev/null\n\n# Colors\nif [ -x /usr/bin/dircolors ]; then\n    test -r ~/.dircolors && eval \"$(dircolors -b ~/.dircolors)\" || eval \"$(dircolors -b)\"\nfi\n\n# Prompt\n# Detect if we have color support\nif [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then\n    # Define colors\n    RESET='\\[\\033[0m\\]'\n    BOLD='\\[\\033[1m\\]'\n    DIM='\\[\\033[2m\\]'\n    \n    # Color palette\n    BLUE='\\[\\033[38;5;75m\\]'\n    GREEN='\\[\\033[38;5;114m\\]'\n    YELLOW='\\[\\033[38;5;221m\\]'\n    GRAY='\\[\\033[38;5;245m\\]'\n    RED='\\[\\033[38;5;204m\\]'\n\n    DEVNAME=\"\"\n    if [ ! -z \"$MORLOC_ENV_NAME\" ]; then\n      DEVNAME=\"${DIM}${GRAY}(${MORLOC_ENV_NAME})${RESET} \"\n    fi \n\n    MORLOC_VERSION=$(morloc --version)\n    if [ $? -eq 0 ]; then\n      MORLOC_VERSION=\"-${MORLOC_VERSION}\"\n    fi\n    \n    # Format: (container) morloc $\n    PS1=\"${DEVNAME}${YELLOW}morloc${MORLOC_VERSION}${RESET} $ \"\nelse\n    # Fallback for no color\n    PS1=\"morloc $ \"\nfi\n\n# aliases\nalias ls='ls --color=auto'\nalias ll='ls -lh'\nalias la='ls -lah'\nalias l='ls -CF'\n\n# coloring\nalias grep='grep --color=auto'\nalias fgrep='fgrep --color=auto'\nalias egrep='egrep --color=auto'\n\n# fast navigation\nalias ..='cd ..'\nalias ...='cd ../..'\nalias ....='cd ../../..'\nalias .....='cd ../../../..'\n\n# setup path\n[ -d \"$HOME/.local/bin\" ] && PATH=\"$HOME/.local/bin:$PATH\"\n[ -d \"$HOME/bin\" ] && PATH=\"$HOME/bin:$PATH\"\n\n# completions\nif ! shopt -oq posix; then\n  if [ -f /usr/share/bash-completion/bash_completion ]; then\n    . /usr/share/bash-completion/bash_completion\n  elif [ -f /etc/bash_completion ]; then\n    . /etc/bash_completion\n  fi\nfi\n"
  },
  {
    "path": "container/full/assets/vimrc",
    "content": "\" Jump to the last position when reopening a file\nau BufReadPost * if line(\"'\\\"\") > 1 && line(\"'\\\"\") <= line(\"$\") | exe \"normal! g'\\\"\" | endif\n\n\" Load indentation rules and plugins according to the detected filetype.\nfiletype plugin indent on\nsyntax on\nset showcmd             \" Show (partial) command in status line.\nset showmatch           \" Show matching brackets.\nset ignorecase          \" Do case insensitive matching\nset smartcase           \" Do smart case matching\nset incsearch           \" Incremental search\nset autowrite           \" Automatically save before commands like :next and :make\nset hidden              \" Hide buffers when they are abandoned\n\n\" An OK colorscheme\ncolorscheme torte\n"
  },
  {
    "path": "container/static-build/Dockerfile",
    "content": "# Portable build of libmorloc.so, morloc-nexus, and morloc-manager.\n#\n# Strategy:\n#   - libmorloc.so: shared library built on Ubuntu 20.04 (glibc >= 2.31)\n#   - morloc-nexus: dynamically linked to libmorloc.so (glibc >= 2.31)\n#   - morloc-manager: fully static binary (Alpine/musl, runs on any Linux)\n#\n# Build:\n#   docker build -t morloc-rust-build -f container/static-build/Dockerfile .\n#   docker run --rm -v $(pwd)/out:/out morloc-rust-build\n#\n# Output:\n#   ./out/libmorloc.so      (shared library, glibc >= 2.31)\n#   ./out/morloc-nexus      (binary, glibc >= 2.31, links libmorloc.so)\n#   ./out/morloc-manager    (static binary, runs on any Linux including NixOS)\n\n# ===========================================================================\n# Stage 1: Build libmorloc.so + morloc-nexus on Ubuntu (glibc)\n#\n# morloc-nexus dynamically links libmorloc.so at runtime, so both must be\n# built against the same libc. Ubuntu 20.04 gives glibc 2.31 forward compat.\n# ===========================================================================\nFROM docker.io/library/ubuntu:20.04 AS glibc-builder\n\nENV DEBIAN_FRONTEND=noninteractive\n\nRUN apt-get update && apt-get install -y --no-install-recommends \\\n    curl ca-certificates gcc g++ make pkg-config \\\n    && rm -rf /var/lib/apt/lists/*\n\nRUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable \\\n    && /root/.cargo/bin/cargo --version\nENV PATH=\"/root/.cargo/bin:${PATH}\"\n\nWORKDIR /build\n\n# Copy Cargo manifests first for dependency caching\nCOPY data/rust/Cargo.toml data/rust/Cargo.lock ./\nCOPY data/rust/morloc-runtime/Cargo.toml ./morloc-runtime/\nCOPY data/rust/morloc-nexus/Cargo.toml ./morloc-nexus/\nCOPY data/rust/morloc-manager/Cargo.toml ./morloc-manager/\n\n# Create dummy source files for dependency pre-build\nRUN mkdir -p morloc-runtime/src morloc-nexus/src morloc-manager/src \\\n    && echo \"fn main() {}\" > morloc-nexus/src/main.rs \\\n    && echo \"fn main() {}\" > morloc-manager/src/main.rs \\\n    && echo \"\" > morloc-runtime/src/lib.rs\n\nRUN cargo build --release -p morloc-runtime 2>/dev/null || true\n\n# Copy actual source\nCOPY data/rust/ ./\n\n# Force rebuild after copying real source over dummy stubs\nRUN touch morloc-runtime/src/lib.rs morloc-nexus/src/main.rs\n\n# Build libmorloc.so from staticlib via gcc --whole-archive to export ALL\n# symbols (internal Rust runtime state must be visible to language extensions)\nRUN cargo build --release -p morloc-runtime\n\nRUN mkdir -p /root/.local/share/morloc/lib \\\n    && gcc -shared -o /root/.local/share/morloc/lib/libmorloc.so \\\n       -Wl,--whole-archive target/release/libmorloc_runtime.a -Wl,--no-whole-archive \\\n       -lpthread -lrt -ldl -lm\n\n# Build morloc-nexus (dynamically links libmorloc.so)\nRUN cargo build --release -p morloc-nexus\n\nRUN strip /root/.local/share/morloc/lib/libmorloc.so target/release/morloc-nexus\n\n# ===========================================================================\n# Stage 2: Build morloc-manager on Alpine (musl, fully static)\n#\n# morloc-manager has no native dependencies — pure Rust crates only.\n# Building on Alpine produces a fully static musl binary that runs on any\n# Linux, including NixOS and minimal containers.\n# ===========================================================================\nFROM docker.io/library/alpine:3.20 AS musl-builder\n\nRUN apk add --no-cache curl gcc musl-dev\n\nRUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable \\\n    && /root/.cargo/bin/cargo --version\nENV PATH=\"/root/.cargo/bin:${PATH}\"\n\nWORKDIR /build\n\n# Copy Cargo manifests for dependency caching\nCOPY data/rust/Cargo.toml data/rust/Cargo.lock ./\nCOPY data/rust/morloc-runtime/Cargo.toml ./morloc-runtime/\nCOPY data/rust/morloc-nexus/Cargo.toml ./morloc-nexus/\nCOPY data/rust/morloc-manager/Cargo.toml ./morloc-manager/\n\nRUN mkdir -p morloc-runtime/src morloc-nexus/src morloc-manager/src \\\n    && echo \"fn main() {}\" > morloc-nexus/src/main.rs \\\n    && echo \"fn main() {}\" > morloc-manager/src/main.rs \\\n    && echo \"\" > morloc-runtime/src/lib.rs\n\nRUN cargo build --release -p morloc-manager 2>/dev/null || true\n\nCOPY data/rust/ ./\n\n# Force rebuild: cargo may skip if it thinks the cached dummy binary is fresh\nRUN touch morloc-manager/src/main.rs \\\n    && cargo build --release -p morloc-manager\n\nRUN strip target/release/morloc-manager\n\n# ===========================================================================\n# Stage 3: Verify and output\n# ===========================================================================\nFROM docker.io/library/alpine:3.20\n\nRUN apk add --no-cache file\n\nCOPY --from=glibc-builder /root/.local/share/morloc/lib/libmorloc.so /out-staging/libmorloc.so\nCOPY --from=glibc-builder /build/target/release/morloc-nexus /out-staging/morloc-nexus\nCOPY --from=musl-builder /build/target/release/morloc-manager /out-staging/morloc-manager\n\n# Verify morloc-manager is static (no dynamic linker needed)\nRUN file /out-staging/morloc-manager \\\n    && ! ldd /out-staging/morloc-manager 2>&1 | grep -q \"=>\" \\\n    && echo \"OK: morloc-manager has no dynamic dependencies\"\n\nCMD [\"sh\", \"-c\", \"cp /out-staging/libmorloc.so /out-staging/morloc-nexus /out-staging/morloc-manager /out/ && echo 'Wrote libmorloc.so, morloc-nexus, morloc-manager' && ls -lh /out/libmorloc.so /out/morloc-nexus /out/morloc-manager && echo && file /out/morloc-manager\"]\n"
  },
  {
    "path": "container/static-build/build.sh",
    "content": "#!/bin/sh\n# Build portable libmorloc.so, morloc-nexus, and morloc-manager.\n#\n# Usage:\n#   ./container/static-build/build.sh\n#\n# Output:\n#   ./out/libmorloc.so      (shared library, glibc >= 2.31)\n#   ./out/morloc-nexus      (binary, glibc >= 2.31, links libmorloc.so)\n#   ./out/morloc-manager    (static binary, runs on any Linux)\n\nset -e\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"$0\")\" && pwd)\"\nPROJECT_DIR=\"$(cd \"$SCRIPT_DIR/../..\" && pwd)\"\n\n# Detect container engine (prefer podman)\nif command -v podman >/dev/null 2>&1; then\n    ENGINE=podman\nelif command -v docker >/dev/null 2>&1; then\n    ENGINE=docker\nelse\n    echo \"Error: neither podman nor docker found\" >&2\n    exit 1\nfi\n\necho \"Building libmorloc.so, morloc-nexus, and morloc-manager with $ENGINE...\"\n\nmkdir -p \"$PROJECT_DIR/out\"\n\n$ENGINE build \\\n    -t morloc-rust-build \\\n    -f \"$SCRIPT_DIR/Dockerfile\" \\\n    \"$PROJECT_DIR\"\n\n$ENGINE run --rm \\\n    -v \"$PROJECT_DIR/out:/out\" \\\n    morloc-rust-build\n\necho \"\"\necho \"Binaries:\"\nls -lh \"$PROJECT_DIR/out/libmorloc.so\" \"$PROJECT_DIR/out/morloc-nexus\" \"$PROJECT_DIR/out/morloc-manager\"\nfile \"$PROJECT_DIR/out/libmorloc.so\" \"$PROJECT_DIR/out/morloc-nexus\" \"$PROJECT_DIR/out/morloc-manager\"\n"
  },
  {
    "path": "container/test/Dockerfile",
    "content": "FROM docker.io/library/ubuntu:24.04\n\nENV DEBIAN_FRONTEND=noninteractive\n\nRUN apt-get update && apt-get install -y \\\n  git \\\n  gdb \\\n  curl \\\n  pkg-config \\\n  libglib2.0-dev \\\n  build-essential \\\n  libffi-dev \\\n  libgmp-dev \\\n  zlib1g-dev\n\n# Install GHCup into /opt/ghcup (accessible to all users)\nENV GHCUP_INSTALL_BASE_PREFIX=/opt\nENV BOOTSTRAP_HASKELL_NONINTERACTIVE=1\nRUN curl --proto '=https' --tlsv1.2 -sSf https://get-ghcup.haskell.org | sh\n\n# Add ghcup to PATH\nENV PATH=\"/opt/.ghcup/bin:${PATH}\"\n\n# Install Rust toolchain (needed by morloc init to build libmorloc.so)\nRUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y\nENV PATH=\"/root/.cargo/bin:${PATH}\"\n\n# Set the timezone, this avoids hanging later on\nRUN TZ=Antarctica/Troll apt-get -y install tzdata\n\n# hyperfine is needed for shell benchmarks\nRUN apt-get install -y r-base python3 python3-dev python3-pip libgsl-dev git hyperfine\nRUN python3 -m pip install --break-system-packages --upgrade setuptools numpy pyarrow\n\n# Set up R environment\n# stringi is needed for benchmarks\nRUN Rscript -e 'install.packages(\"stringi\", repos  = \"https://cloud.r-project.org\")'\n\n# Install R Arrow package for cross-language Arrow IPC support\nENV LIBARROW_MINIMAL=true\nENV ARROW_S3=OFF\nENV ARROW_GCS=OFF\nRUN Rscript -e 'install.packages(\"arrow\", repos = \"https://cloud.r-project.org\")'\n\n# Cleanup to reduce image size\nRUN apt-get clean && rm -rf /var/lib/apt/lists/*\n\n# Create /home/dev with permissive access so any --user UID:GID can write here.\n# morloc-manager bind-mounts .stack and .local into this directory.\nRUN mkdir -p /home/dev && chmod 1777 /home/dev\n\nCOPY assets/bashrc /etc/bash.bashrc\n"
  },
  {
    "path": "container/test/assets/bashrc",
    "content": "# basic morloc bashrc\n\n# If not running interactively, don't do anything\ncase $- in\n    *i*) ;;\n      *) return;;\nesac\n\n# History Configuration\nHISTCONTROL=ignoreboth:erasedups\nHISTSIZE=10000\nHISTFILESIZE=20000\nshopt -s histappend\n\n# Shell Options\nshopt -s checkwinsize\nshopt -s globstar 2>/dev/null\n\n# Colors\nif [ -x /usr/bin/dircolors ]; then\n    test -r ~/.dircolors && eval \"$(dircolors -b ~/.dircolors)\" || eval \"$(dircolors -b)\"\nfi\n\n# Prompt\n# Detect if we have color support\nif [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then\n    # Define colors\n    RESET='\\[\\033[0m\\]'\n    BOLD='\\[\\033[1m\\]'\n    DIM='\\[\\033[2m\\]'\n    \n    # Color palette\n    BLUE='\\[\\033[38;5;75m\\]'\n    GREEN='\\[\\033[38;5;114m\\]'\n    YELLOW='\\[\\033[38;5;221m\\]'\n    GRAY='\\[\\033[38;5;245m\\]'\n    RED='\\[\\033[38;5;204m\\]'\n\n    DEVNAME=\"\"\n    if [ ! -z \"$MORLOC_ENV_NAME\" ]; then\n      DEVNAME=\"${DIM}${GRAY}(${MORLOC_ENV_NAME})${RESET} \"\n    fi \n    \n    # Format: (container) morloc $\n    PS1=\"${DEVNAME}${YELLOW}morloc-dev${RESET} $ \"\nelse\n    # Fallback for no color\n    PS1=\"morloc-dev $ \"\nfi\n\n# aliases\nalias ls='ls --color=auto'\nalias ll='ls -lh'\nalias la='ls -lah'\nalias l='ls -CF'\n\n# coloring\nalias grep='grep --color=auto'\nalias fgrep='fgrep --color=auto'\nalias egrep='egrep --color=auto'\n\n# fast navigation\nalias ..='cd ..'\nalias ...='cd ../..'\nalias ....='cd ../../..'\nalias .....='cd ../../../..'\n\n# setup path\n[ -d \"$HOME/.local/bin\" ] && PATH=\"$HOME/.local/bin:$PATH\"\n[ -d \"$HOME/bin\" ] && PATH=\"$HOME/bin:$PATH\"\n\n# completions\nif ! shopt -oq posix; then\n  if [ -f /usr/share/bash-completion/bash_completion ]; then\n    . /usr/share/bash-completion/bash_completion\n  elif [ -f /etc/bash_completion ]; then\n    . /etc/bash_completion\n  fi\nfi\n"
  },
  {
    "path": "container/tiny/Dockerfile",
    "content": "###############################################################################\n# Stage 1: Build the morloc compiler from local source\nFROM docker.io/library/ubuntu:24.04 AS morloc-build\n\nENV DEBIAN_FRONTEND=noninteractive\n\nRUN apt-get update && apt-get install -y git curl pkg-config libglib2.0-dev\n\n# Install stack\nRUN curl -SL https://get.haskellstack.org/ | sh\n\n# Copy the local source tree (no git clone -- builds exactly this checkout)\nCOPY . /morloc\n\n# Build morloc\nRUN cd /morloc && stack install --no-run-tests\n\n###############################################################################\n# Stage 2: Build Rust binaries (libmorloc.so + morloc-nexus + morloc-manager)\nFROM docker.io/library/ubuntu:24.04 AS rust-build\n\nENV DEBIAN_FRONTEND=noninteractive\n\nRUN apt-get update && apt-get install -y --no-install-recommends \\\n    curl ca-certificates gcc g++ make pkg-config \\\n    && rm -rf /var/lib/apt/lists/*\n\nRUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable\nENV PATH=\"/root/.cargo/bin:${PATH}\"\n\nCOPY data/rust/ /build/\nWORKDIR /build\n\n# Build libmorloc.so from staticlib\nRUN cargo build --release -p morloc-runtime\nRUN gcc -shared -o libmorloc.so \\\n    -Wl,--whole-archive target/release/libmorloc_runtime.a -Wl,--no-whole-archive \\\n    -lpthread -lrt -ldl -lm\n\n# Install libmorloc.so where the nexus linker expects it\nRUN mkdir -p /root/.local/share/morloc/lib \\\n    && cp libmorloc.so /root/.local/share/morloc/lib/\n\n# Build morloc-nexus and morloc-manager\nRUN cargo build --release -p morloc-nexus\nRUN cargo build --release -p morloc-manager\n\n# Strip all\nRUN strip libmorloc.so target/release/morloc-nexus target/release/morloc-manager\n\n# Collect into a single directory\nRUN mkdir -p /rust-bin \\\n    && cp libmorloc.so /rust-bin/ \\\n    && cp target/release/morloc-nexus /rust-bin/ \\\n    && cp target/release/morloc-manager /rust-bin/\n\n###############################################################################\n# Stage 3: Final minimal image with compiler + pre-built Rust binaries\n\nLABEL org.opencontainers.image.source=https://github.com/morloc-project/morloc\nLABEL org.opencontainers.image.description=\"Morloc executable in a minimal container\"\nLABEL org.opencontainers.image.licenses=Apache-2.0\n\nFROM docker.io/library/ubuntu:24.04\nCOPY --from=morloc-build /root/.local/bin/morloc /bin/morloc\nCOPY --from=rust-build /rust-bin/ /opt/morloc-rust-bin/\n\n# morloc make builds C++ pools, so g++ is needed\nRUN apt-get update && apt-get install -y --no-install-recommends g++ \\\n    && apt-get clean && rm -rf /var/lib/apt/lists/*\n\n# Point morloc init at pre-built binaries (no cargo needed at runtime)\nENV MORLOC_RUST_BIN=/opt/morloc-rust-bin\n"
  },
  {
    "path": "data/lang/c/lang.yaml",
    "content": "# C language metadata for morloc compiler\nname: c\nextension: c\naliases: []\nis_compiled: true\nrun_command: []\nserial_type: \"\"\ncost: 1\n"
  },
  {
    "path": "data/lang/cpp/cppmorloc.cpp",
    "content": "#include <stdexcept>\n#include <string>\n#include <cstdlib>\n\n#include \"morloc.h\"\n\nabsptr_t cpp_rel2abs(relptr_t ptr){\n    char* errmsg = NULL;\n    absptr_t absptr = rel2abs(ptr, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return absptr;\n}\n\nrelptr_t abs2rel_cpp(absptr_t ptr){\n    char* errmsg = NULL;\n    relptr_t relptr = abs2rel(ptr, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return relptr;\n}\n\nbool shfree_cpp(absptr_t ptr){\n    char* errmsg = NULL;\n    bool success = shfree(ptr, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return success;\n}\n\nSchema* parse_schema_cpp(const char* schema_ptr){\n    char* errmsg = NULL;\n    Schema* schema = parse_schema(schema_ptr, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return schema;\n}\n\nvoid* shmalloc_cpp(size_t size){\n    char* errmsg = NULL;\n    void* new_ptr = shmalloc(size, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return new_ptr;\n}\n\nshm_t* shinit_cpp(const char* shm_basename, size_t volume_index, size_t shm_size) {\n    char* errmsg = NULL;\n    shm_t* new_ptr = shinit(shm_basename, volume_index, shm_size, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return new_ptr;\n}\n\nint pack_with_schema_cpp(const void* mlc, const Schema* schema, char** mpk, size_t* mpk_size){\n    char* errmsg = NULL;\n    int exitcode = pack_with_schema(mlc, schema, mpk, mpk_size, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return exitcode;\n}\n\nint unpack_with_schema_cpp(const char* mgk, size_t mgk_size, const Schema* schema, void** mlcptr){\n    char* errmsg = NULL;\n    int exitcode = unpack_with_schema(mgk, mgk_size, schema, mlcptr, &errmsg);\n    if(errmsg != NULL){\n        std::string msg(errmsg); free(errmsg);\n        throw std::runtime_error(msg);\n    }\n    return exitcode;\n}\n"
  },
  {
    "path": "data/lang/cpp/cppmorloc.hpp",
    "content": "#ifndef __CPPMORLOC_HPP__\n#define __CPPMORLOC_HPP__\n\n#include <vector>\n#include <stack>\n#include <list>\n#include <forward_list>\n#include <queue>\n#include <deque>\n#include <optional>\n\n#include <algorithm>\n#include <tuple>\n#include <stdexcept>\n#include <cstring>\n#include <string>\n#include <type_traits>\n\n#include \"morloc.h\"\n#include \"mlc_tensor.hpp\"\n\n// ============================================================\n// Type traits for container dispatch\n// ============================================================\n\ntemplate<typename T> struct is_std_vector : std::false_type {};\ntemplate<typename T, typename A> struct is_std_vector<std::vector<T, A>> : std::true_type {};\n\ntemplate<typename T> struct is_std_list : std::false_type {};\ntemplate<typename T, typename A> struct is_std_list<std::list<T, A>> : std::true_type {};\n\ntemplate<typename T> struct is_std_forward_list : std::false_type {};\ntemplate<typename T, typename A> struct is_std_forward_list<std::forward_list<T, A>> : std::true_type {};\n\ntemplate<typename T> struct is_std_deque : std::false_type {};\ntemplate<typename T, typename A> struct is_std_deque<std::deque<T, A>> : std::true_type {};\n\ntemplate<typename T> struct is_std_stack : std::false_type {};\ntemplate<typename T, typename C> struct is_std_stack<std::stack<T, C>> : std::true_type {};\n\ntemplate<typename T> struct is_std_queue : std::false_type {};\ntemplate<typename T, typename C> struct is_std_queue<std::queue<T, C>> : std::true_type {};\n\ntemplate<typename T> struct is_std_tuple : std::false_type {};\ntemplate<typename... Args> struct is_std_tuple<std::tuple<Args...>> : std::true_type {};\n\ntemplate<typename T> struct is_std_pair : std::false_type {};\ntemplate<typename A, typename B> struct is_std_pair<std::pair<A, B>> : std::true_type {};\n\ntemplate<typename T> struct is_std_optional : std::false_type {};\ntemplate<typename T> struct is_std_optional<std::optional<T>> : std::true_type {};\n\ntemplate<typename T>\ninline constexpr bool is_non_vector_container_v =\n    is_std_list<T>::value || is_std_forward_list<T>::value ||\n    is_std_deque<T>::value || is_std_stack<T>::value ||\n    is_std_queue<T>::value;\n\n\n// ============================================================\n// Container-to-vector conversion\n// ============================================================\n\ntemplate<typename Container>\nauto to_vector(const Container& c) {\n    using T = typename Container::value_type;\n    if constexpr (is_std_stack<Container>::value) {\n        std::vector<T> v;\n        auto copy = c;\n        while (!copy.empty()) { v.push_back(copy.top()); copy.pop(); }\n        std::reverse(v.begin(), v.end());\n        return v;\n    } else if constexpr (is_std_queue<Container>::value) {\n        std::vector<T> v;\n        auto copy = c;\n        while (!copy.empty()) { v.push_back(copy.front()); copy.pop(); }\n        return v;\n    } else {\n        return std::vector<T>(c.begin(), c.end());\n    }\n}\n\n\n// ============================================================\n// C runtime wrappers (implementations in cppmorloc.cpp)\n// ============================================================\n\nabsptr_t cpp_rel2abs(relptr_t ptr);\nrelptr_t abs2rel_cpp(absptr_t ptr);\n\n// Resolve a relative pointer using either base-pointer arithmetic (inline data)\n// or SHM. When base_ptr is non-null, data lives in a contiguous malloc'd blob.\nstatic inline void* resolve_relptr_cpp(relptr_t relptr, const void* base_ptr) {\n    if (base_ptr) {\n        return (char*)base_ptr + relptr;\n    }\n    return cpp_rel2abs(relptr);\n}\nbool shfree_cpp(absptr_t ptr);\nSchema* parse_schema_cpp(const char* schema_ptr);\nvoid* shmalloc_cpp(size_t size);\nshm_t* shinit_cpp(const char* shm_basename, size_t volume_index, size_t shm_size);\nint pack_with_schema_cpp(const void* mlc, const Schema* schema, char** mpk, size_t* mpk_size);\nint unpack_with_schema_cpp(const char* mgk, size_t mgk_size, const Schema* schema, void** mlcptr);\n\n\n// ============================================================\n// mpk_pack / mpk_unpack declarations\n// ============================================================\n\ntemplate<typename T>\nstd::vector<char> mpk_pack(const T& data, const std::string& schema_str);\n\ntemplate<typename T>\nT mpk_unpack(const std::vector<char>& packed_data, const std::string& schema_str);\n\n\n// ============================================================\n// schema_alignment (C++ mirror of the C function in schema.c)\n// ============================================================\n\ninline size_t schema_alignment_cpp(const Schema* schema) {\n    switch (schema->type) {\n        case MORLOC_NIL: case MORLOC_BOOL: case MORLOC_SINT8: case MORLOC_UINT8: return 1;\n        case MORLOC_SINT16: case MORLOC_UINT16: return 2;\n        case MORLOC_SINT32: case MORLOC_UINT32: case MORLOC_FLOAT32: return 4;\n        case MORLOC_SINT64: case MORLOC_UINT64: case MORLOC_FLOAT64:\n        case MORLOC_STRING: case MORLOC_ARRAY: case MORLOC_TENSOR: return alignof(size_t);\n        case MORLOC_TUPLE: case MORLOC_MAP: {\n            size_t max_align = 1;\n            for (size_t i = 0; i < schema->size; i++) {\n                size_t a = schema_alignment_cpp(schema->parameters[i]);\n                if (a > max_align) max_align = a;\n            }\n            return max_align;\n        }\n        case MORLOC_OPTIONAL: return schema_alignment_cpp(schema->parameters[0]);\n        default: return alignof(size_t);\n    }\n}\n\n\n// ============================================================\n// get_shm_size\n// ============================================================\n\n// Forward declaration\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const T& data);\n\nsize_t get_shm_size(const Schema* schema, const std::nullptr_t&) {\n    return sizeof(int8_t);\n}\n\n// Primitives\ntemplate<typename Primitive>\nsize_t get_shm_size(const Schema* schema, const Primitive& data) {\n    return schema->width;\n}\n\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::vector<T>& data) {\n    size_t total_size = schema->width;\n    // worst-case cursor alignment padding for element data\n    total_size += schema_alignment_cpp(schema->parameters[0]) - 1;\n    switch(schema->parameters[0]->type){\n        case MORLOC_NIL:\n        case MORLOC_BOOL:\n        case MORLOC_SINT8:\n        case MORLOC_SINT16:\n        case MORLOC_SINT32:\n        case MORLOC_SINT64:\n        case MORLOC_UINT8:\n        case MORLOC_UINT16:\n        case MORLOC_UINT32:\n        case MORLOC_UINT64:\n        case MORLOC_FLOAT32:\n        case MORLOC_FLOAT64:\n            total_size += data.size() * schema->parameters[0]->width;\n            break;\n        case MORLOC_STRING:\n        case MORLOC_ARRAY:\n        case MORLOC_TUPLE:\n        case MORLOC_MAP:\n        case MORLOC_OPTIONAL:\n            for(size_t i = 0; i < data.size(); i++){\n               total_size += get_shm_size(schema->parameters[0], data[i]);\n            }\n            break;\n    }\n    return total_size;\n}\n\n// Optional: tag byte + aligned inner value\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::optional<T>& data) {\n    if (!data.has_value()) {\n        return schema->width;\n    }\n    size_t inner_size = get_shm_size(schema->parameters[0], *data);\n    size_t extra = (inner_size > schema->parameters[0]->width) ? inner_size - schema->parameters[0]->width : 0;\n    return schema->width + extra;\n}\n\nsize_t get_shm_size(const Schema* schema, const std::string& data) {\n    return schema->width + data.size();\n}\n\nsize_t get_shm_size(void* dest, const Schema* schema, const char* data) {\n    return schema->width + strlen(data);\n}\n\ntemplate<typename Tuple, size_t... Is>\nsize_t createTupleShmSizeHelper(const Schema* schema, const Tuple& data, std::index_sequence<Is...>) {\n    size_t total_size = schema->width;\n    (void)std::initializer_list<int>{(\n        [&](){\n            size_t elem = get_shm_size(schema->parameters[Is], std::get<Is>(data));\n            if (elem > schema->parameters[Is]->width) {\n                total_size += elem - schema->parameters[Is]->width;\n            }\n        }(),\n        0\n    )...};\n    return total_size;\n}\n\ntemplate<typename... Args>\nsize_t get_shm_size(const Schema* schema, const std::tuple<Args...>& data) {\n    return createTupleShmSizeHelper(schema, data, std::index_sequence_for<Args...>{});\n}\n\n// Non-vector containers: convert to vector and delegate\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::list<T>& data) {\n    return get_shm_size(schema, to_vector(data));\n}\n\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::forward_list<T>& data) {\n    return get_shm_size(schema, to_vector(data));\n}\n\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::deque<T>& data) {\n    return get_shm_size(schema, to_vector(data));\n}\n\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::stack<T>& data) {\n    return get_shm_size(schema, to_vector(data));\n}\n\ntemplate<typename T>\nsize_t get_shm_size(const Schema* schema, const std::queue<T>& data) {\n    return get_shm_size(schema, to_vector(data));\n}\n\n// Tensor: header + shape array + contiguous data\ntemplate<typename T, int NDim>\nsize_t get_shm_size(const Schema* schema, const mlc::Tensor<T, NDim>& data) {\n    using S = mlc::tensor_storage_t<T>;\n    size_t total = sizeof(Tensor);\n    // alignment padding for shape array\n    total += alignof(int64_t) - 1;\n    total += NDim * sizeof(int64_t);\n    // alignment padding for element data\n    total += schema_alignment_cpp(schema->parameters[0]) - 1;\n    total += data.size() * sizeof(S);\n    return total;\n}\n\n\n// ============================================================\n// toAnything - top-level (allocating)\n// ============================================================\n\n// Generic top-level: compute size, allocate, serialize\ntemplate<typename T>\nvoid* toAnything(const Schema* schema, const T& data){\n    size_t total_size = get_shm_size(schema, data);\n    void* dest = shmalloc_cpp(total_size);\n    void* cursor = (void*)((char*)dest + schema->width);\n    try {\n        return toAnything(dest, &cursor, schema, data);\n    } catch (...) {\n        shfree_cpp(dest);\n        throw;\n    }\n}\n\n// Non-vector containers: convert to vector and delegate\ntemplate<typename T>\nvoid* toAnything(const Schema* schema, const std::stack<T>& data) {\n    return toAnything(schema, to_vector(data));\n}\n\ntemplate<typename T>\nvoid* toAnything(const Schema* schema, const std::forward_list<T>& data) {\n    return toAnything(schema, to_vector(data));\n}\n\ntemplate<typename T>\nvoid* toAnything(const Schema* schema, const std::queue<T>& data) {\n    return toAnything(schema, to_vector(data));\n}\n\ntemplate<typename T>\nvoid* toAnything(const Schema* schema, const std::deque<T>& data) {\n    return toAnything(schema, to_vector(data));\n}\n\ntemplate<typename T>\nvoid* toAnything(const Schema* schema, const std::list<T>& data) {\n    return toAnything(schema, to_vector(data));\n}\n\n\n// ============================================================\n// toAnything - cursor-based (recursive)\n// ============================================================\n\n// Forward declaration\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const T& data);\n\n// Write raw binary data as an array\nvoid* binarytoAnything(void* dest, void** cursor, const Schema* schema, const uint8_t* data, size_t size) {\n    Array* result = static_cast<Array*>(dest);\n    result->size = size;\n    if(size == 0){\n        result->data = RELNULL;\n        return dest;\n    }\n    absptr_t data_ptr = static_cast<absptr_t>(*cursor);\n    result->data = abs2rel_cpp(data_ptr);\n    *cursor = static_cast<char*>(*cursor) + size * schema->parameters[0]->width;\n    memcpy(data_ptr, data, size);\n    return dest;\n}\n\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::nullptr_t&) {\n    *((int8_t*)dest) = (int8_t)0;\n    return dest;\n}\n\n// Primitives\ntemplate<typename Primitive>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const Primitive& data) {\n    *((Primitive*)dest) = data;\n    return dest;\n}\n\n// Vector (primary array implementation)\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::vector<T>& data) {\n    Array* result = static_cast<Array*>(dest);\n    result->size = data.size();\n    if(data.size() == 0){\n        result->data = RELNULL;\n        return dest;\n    }\n    // align cursor for element data placement\n    *cursor = reinterpret_cast<void*>(ALIGN_UP(reinterpret_cast<uintptr_t>(*cursor), schema_alignment_cpp(schema->parameters[0])));\n    result->data = abs2rel_cpp(static_cast<absptr_t>(*cursor));\n    *cursor = static_cast<char*>(*cursor) + data.size() * schema->parameters[0]->width;\n    char* start = (char*)cpp_rel2abs(result->data);\n    size_t width = schema->parameters[0]->width;\n    for (size_t i = 0; i < data.size(); ++i) {\n         toAnything(start + width * i, cursor, schema->parameters[0], data[i]);\n    }\n    return dest;\n}\n\n// Shared helper for iterable containers (list, forward_list, deque)\ntemplate<typename Container>\nvoid* toAnything_seq(void* dest, void** cursor, const Schema* schema, const Container& data, size_t size) {\n    Array* result = static_cast<Array*>(dest);\n    result->size = size;\n    if(size == 0){\n        result->data = RELNULL;\n        return dest;\n    }\n    // align cursor for element data placement\n    *cursor = reinterpret_cast<void*>(ALIGN_UP(reinterpret_cast<uintptr_t>(*cursor), schema_alignment_cpp(schema->parameters[0])));\n    result->data = abs2rel_cpp(static_cast<absptr_t>(*cursor));\n    *cursor = static_cast<char*>(*cursor) + size * schema->parameters[0]->width;\n    char* start = (char*)cpp_rel2abs(result->data);\n    size_t width = schema->parameters[0]->width;\n    size_t i = 0;\n    for (const auto& item : data) {\n        toAnything(start + width * i, cursor, schema->parameters[0], item);\n        ++i;\n    }\n    return dest;\n}\n\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::list<T>& data) {\n    return toAnything_seq(dest, cursor, schema, data, data.size());\n}\n\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::forward_list<T>& data) {\n    return toAnything_seq(dest, cursor, schema, data, std::distance(data.begin(), data.end()));\n}\n\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::deque<T>& data) {\n    return toAnything_seq(dest, cursor, schema, data, data.size());\n}\n\n// Stack and queue: convert to vector and delegate\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::queue<T>& data) {\n    return toAnything(dest, cursor, schema, to_vector(data));\n}\n\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::stack<T>& data) {\n    return toAnything(dest, cursor, schema, to_vector(data));\n}\n\n// String and C string\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::string& data) {\n    return binarytoAnything(dest, cursor, schema, (const uint8_t*)data.c_str(), data.size());\n}\n\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const char* data) {\n    return binarytoAnything(dest, cursor, schema, (const uint8_t*)data, strlen(data));\n}\n\n// Tuple\ntemplate<typename Tuple, size_t... Is>\nvoid* createTupleAnythingHelper(void* dest, const Schema* schema, void** cursor, const Tuple& data, std::index_sequence<Is...>) {\n    (void)std::initializer_list<int>{(\n        toAnything((char*)dest + schema->offsets[Is], cursor, schema->parameters[Is], std::get<Is>(data)),\n        0\n    )...};\n    return dest;\n}\n\ntemplate<typename... Args>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::tuple<Args...>& data) {\n    return createTupleAnythingHelper(dest, schema, cursor, data, std::index_sequence_for<Args...>{});\n}\n\n// Pair (reuses tuple helper since std::pair supports std::get)\ntemplate<typename A, typename B>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::pair<A, B>& data) {\n    return createTupleAnythingHelper(dest, schema, cursor, data, std::index_sequence<0, 1>{});\n}\n\n// Optional\ntemplate<typename T>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const std::optional<T>& data) {\n    if (!data.has_value()) {\n        *((uint8_t*)dest) = 0;\n        memset((char*)dest + schema->offsets[0], 0, schema->parameters[0]->width);\n    } else {\n        *((uint8_t*)dest) = 1;\n        toAnything((char*)dest + schema->offsets[0], cursor, schema->parameters[0], *data);\n    }\n    return dest;\n}\n\n// Tensor: write Tensor header + shape array + contiguous data\ntemplate<typename T, int NDim>\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const mlc::Tensor<T, NDim>& data) {\n    Tensor* result = static_cast<Tensor*>(dest);\n    result->total_elements = data.size();\n    result->device_type = 0;\n    result->device_id = 0;\n\n    if (data.size() == 0) {\n        result->shape = RELNULL;\n        result->data = RELNULL;\n        return dest;\n    }\n\n    // Write shape array\n    *cursor = reinterpret_cast<void*>(ALIGN_UP(reinterpret_cast<uintptr_t>(*cursor), alignof(int64_t)));\n    result->shape = abs2rel_cpp(static_cast<absptr_t>(*cursor));\n    int64_t* shape_dst = (int64_t*)*cursor;\n    for (int i = 0; i < NDim; i++) shape_dst[i] = data.shape()[i];\n    *cursor = (char*)*cursor + NDim * sizeof(int64_t);\n\n    // Write data buffer (contiguous row-major)\n    using S = mlc::tensor_storage_t<T>;\n    size_t elem_align = schema_alignment_cpp(schema->parameters[0]);\n    *cursor = reinterpret_cast<void*>(ALIGN_UP(reinterpret_cast<uintptr_t>(*cursor), elem_align));\n    result->data = abs2rel_cpp(static_cast<absptr_t>(*cursor));\n    size_t data_bytes = data.size() * sizeof(S);\n    memcpy(*cursor, data.data(), data_bytes);\n    *cursor = (char*)*cursor + data_bytes;\n\n    return dest;\n}\n\n\n// ============================================================\n// fromAnything - single template with if constexpr dispatch\n// ============================================================\n\n// Forward declaration for recursive calls\ntemplate<typename T>\nT fromAnything(const Schema* schema, const void* data, T* = nullptr, const void* base_ptr = nullptr);\n\n// Tuple helper (needs forward declaration of fromAnything)\ntemplate<typename Tuple, size_t... Is>\nTuple fromTupleAnythingHelper(\n  const Schema* schema,\n  const void* anything,\n  std::index_sequence<Is...>,\n  Tuple* = nullptr,\n  const void* base_ptr = nullptr\n) {\n    return Tuple(fromAnything(schema->parameters[Is],\n                              (char*)anything + schema->offsets[Is],\n                              static_cast<std::tuple_element_t<Is, Tuple>*>(nullptr),\n                              base_ptr)...);\n}\n\ntemplate<typename T>\nT fromAnything(const Schema* schema, const void* data, T*, const void* base_ptr) {\n    if(data == NULL){\n        throw std::runtime_error(\"Void error in fromAnything\");\n    }\n\n    if constexpr (std::is_same_v<T, bool>) {\n        // NOTE: do NOT use bool here since its width is often not 1 byte\n        return *(uint8_t*)data == 1;\n    }\n    else if constexpr (std::is_same_v<T, std::string>) {\n        Array* array = (Array*)data;\n        if(array->size > 0){\n            return std::string((char*)resolve_relptr_cpp(array->data, base_ptr), array->size);\n        }\n        return std::string(\"\");\n    }\n    else if constexpr (is_std_vector<T>::value) {\n        using ElemT = typename T::value_type;\n        std::vector<ElemT> result;\n        Array* array = (Array*)data;\n        if(array->size == 0) return result;\n\n        // Fast path for primitive arrays\n        switch(schema->parameters[0]->type){\n            case MORLOC_NIL:\n            case MORLOC_BOOL:\n            case MORLOC_SINT8:\n            case MORLOC_SINT16:\n            case MORLOC_SINT32:\n            case MORLOC_SINT64:\n            case MORLOC_UINT8:\n            case MORLOC_UINT16:\n            case MORLOC_UINT32:\n            case MORLOC_UINT64:\n            case MORLOC_FLOAT32:\n            case MORLOC_FLOAT64: {\n                ElemT* arr_start = (ElemT*)resolve_relptr_cpp(array->data, base_ptr);\n                std::vector<ElemT> pv(arr_start, arr_start + array->size);\n                return pv;\n            }\n        }\n\n        // Complex element types\n        result.reserve(array->size);\n        const Schema* elem_schema = schema->parameters[0];\n        char* start = (char*)resolve_relptr_cpp(array->data, base_ptr);\n        for(size_t i = 0; i < array->size; i++){\n            result.push_back(fromAnything(elem_schema, (void*)(start + i * elem_schema->width), static_cast<ElemT*>(nullptr), base_ptr));\n        }\n        return result;\n    }\n    else if constexpr (is_non_vector_container_v<T>) {\n        using ElemT = typename T::value_type;\n        Array* array = (Array*)data;\n        T result;\n        if(array->size == 0) return result;\n\n        const Schema* elem_schema = schema->parameters[0];\n        char* start = (char*)resolve_relptr_cpp(array->data, base_ptr);\n\n        constexpr bool reverse = is_std_stack<T>::value || is_std_forward_list<T>::value;\n\n        if constexpr (reverse) {\n            for (size_t i = array->size; i > 0; --i) {\n                auto elem = fromAnything(elem_schema, (void*)(start + (i-1) * elem_schema->width), static_cast<ElemT*>(nullptr), base_ptr);\n                if constexpr (is_std_stack<T>::value) result.push(std::move(elem));\n                else result.push_front(std::move(elem));\n            }\n        } else {\n            for (size_t i = 0; i < array->size; ++i) {\n                auto elem = fromAnything(elem_schema, (void*)(start + i * elem_schema->width), static_cast<ElemT*>(nullptr), base_ptr);\n                if constexpr (is_std_queue<T>::value) result.push(std::move(elem));\n                else result.push_back(std::move(elem));\n            }\n        }\n        return result;\n    }\n    else if constexpr (is_std_tuple<T>::value) {\n        return fromTupleAnythingHelper(\n            schema, data,\n            std::make_index_sequence<std::tuple_size_v<T>>{},\n            static_cast<T*>(nullptr),\n            base_ptr\n        );\n    }\n    else if constexpr (is_std_pair<T>::value) {\n        return fromTupleAnythingHelper(\n            schema, data,\n            std::index_sequence<0, 1>{},\n            static_cast<T*>(nullptr),\n            base_ptr\n        );\n    }\n    else if constexpr (is_std_optional<T>::value) {\n        using InnerT = typename T::value_type;\n        uint8_t tag = *(const uint8_t*)data;\n        if (tag == 0) {\n            return std::nullopt;\n        }\n        return std::optional<InnerT>(fromAnything(schema->parameters[0], (const char*)data + schema->offsets[0], static_cast<InnerT*>(nullptr), base_ptr));\n    }\n    else if constexpr (mlc::is_mlc_tensor_v<T>) {\n        using ElemT = mlc::tensor_element_t<T>;\n        using StorageT = mlc::tensor_storage_t<ElemT>;\n        constexpr int NDim = mlc::tensor_ndim_v<T>;\n        const Tensor* tensor = (const Tensor*)data;\n\n        if (tensor->total_elements == 0) {\n            int64_t zero_shape[NDim] = {};\n            return T(zero_shape);\n        }\n\n        const int64_t* shape = (const int64_t*)resolve_relptr_cpp(tensor->shape, base_ptr);\n        StorageT* tdata = (StorageT*)resolve_relptr_cpp(tensor->data, base_ptr);\n        return T(tdata, shape, tensor->total_elements);\n    }\n    else {\n        // Primitives (int, double, float, etc.)\n        // Record types are handled by generated overloads which are preferred\n        // by overload resolution over this template.\n        return *(T*)data;\n    }\n}\n\n\n// ============================================================\n// mpk_pack / mpk_unpack\n// ============================================================\n\ntemplate<typename T>\nstd::vector<char> mpk_pack(const T& data, const std::string& schema_str) {\n    const char* schema_ptr = schema_str.c_str();\n    Schema* schema = parse_schema_cpp(schema_ptr);\n\n    void* voidstar = nullptr;\n    char* msgpack_data = NULL;\n    size_t msg_size = 0;\n\n    try {\n        voidstar = toAnything(schema, data);\n        pack_with_schema_cpp(voidstar, schema, &msgpack_data, &msg_size);\n    } catch (...) {\n        if (voidstar) shfree_cpp(voidstar);\n        free(msgpack_data);\n        free_schema(schema);\n        throw;\n    }\n\n    shfree_cpp(voidstar);\n\n    std::vector<char> result(msgpack_data, msgpack_data + msg_size);\n    free(msgpack_data);\n    free_schema(schema);\n\n    return result;\n}\n\ntemplate<typename T>\nT mpk_unpack(const std::vector<char>& packed_data, const std::string& schema_str) {\n    const char* schema_ptr = schema_str.c_str();\n    Schema* schema = parse_schema_cpp(schema_ptr);\n\n    void* voidstar = nullptr;\n    int unpack_result = unpack_with_schema_cpp(packed_data.data(), packed_data.size(), schema, &voidstar);\n    if (unpack_result != 0) {\n        free_schema(schema);\n        throw std::runtime_error(\"Unpacking failed\");\n    }\n\n    T x;\n    try {\n        x = fromAnything(schema, voidstar, static_cast<T*>(nullptr));\n    } catch (...) {\n        free_schema(schema);\n        shfree_cpp(voidstar);\n        throw;\n    }\n\n    free_schema(schema);\n    shfree_cpp(voidstar);\n\n    return x;\n}\n\n#endif\n"
  },
  {
    "path": "data/lang/cpp/init.sh",
    "content": "#!/bin/bash\nset -e\n\nMORLOC_HOME=\"$1\"\nBUILD_DIR=\"$2\"\nSANITIZE_FLAGS=\"$3\"\nINCLUDE_DIR=\"$MORLOC_HOME/include\"\nLIB_DIR=\"$MORLOC_HOME/lib\"\n\n# Install mlccpptypes if not present\nif [ ! -d \"$INCLUDE_DIR/mlccpptypes\" ]; then\n    git clone https://github.com/morloclib/mlccpptypes \"$INCLUDE_DIR/mlccpptypes\"\nfi\n\n# Install headers\ncp \"$BUILD_DIR/cppmorloc.hpp\" \"$INCLUDE_DIR/\"\ncp \"$BUILD_DIR/mlc_arrow.hpp\" \"$INCLUDE_DIR/\"\ncp \"$BUILD_DIR/mlc_tensor.hpp\" \"$INCLUDE_DIR/\"\n\n# Install nanoarrow headers\nmkdir -p \"$INCLUDE_DIR/nanoarrow\"\ncp \"$BUILD_DIR/nanoarrow.h\" \"$INCLUDE_DIR/nanoarrow/\"\n\n# Compile nanoarrow.c\ngcc -c -O2 -fPIC $SANITIZE_FLAGS -I\"$INCLUDE_DIR\" -o \"$BUILD_DIR/nanoarrow.o\" \"$BUILD_DIR/nanoarrow.c\"\n\n# Compile cppmorloc.cpp\ng++ -c --std=c++17 -O2 $SANITIZE_FLAGS -I\"$INCLUDE_DIR\" -o \"$BUILD_DIR/cppmorloc.o\" \"$BUILD_DIR/cppmorloc.cpp\"\n\n# Archive into libcppmorloc.a\nar rcs \"$LIB_DIR/libcppmorloc.a\" \"$BUILD_DIR/cppmorloc.o\" \"$BUILD_DIR/nanoarrow.o\"\n\n# Compile precompiled header\ncp \"$BUILD_DIR/morloc_pch.hpp\" \"$INCLUDE_DIR/\"\ng++ --std=c++17 -O2 $SANITIZE_FLAGS -I\"$INCLUDE_DIR\" -x c++-header \"$INCLUDE_DIR/morloc_pch.hpp\" -o \"$INCLUDE_DIR/morloc_pch.hpp.gch\"\n"
  },
  {
    "path": "data/lang/cpp/lang.yaml",
    "content": "# C++ language metadata for morloc compiler\nname: cpp\nextension: cpp\naliases: [\"c++\"]\nis_compiled: true\nrun_command: []\nserial_type: \"uint8_t*\"\ncost: 0\n"
  },
  {
    "path": "data/lang/cpp/mlc_arrow.hpp",
    "content": "#ifndef MLC_ARROW_HPP\n#define MLC_ARROW_HPP\n\n// mlc_arrow.hpp -- thin RAII wrapper around Arrow C Data Interface structs\n// for use in morloc C++ pools.  Holds ArrowSchema + ArrowArray as a single\n// move-only value.  The pool template dispatches arrow-hinted schemas to\n// arrow_to_shm / arrow_from_shm (in libmorloc.so) via this type.\n//\n// User code should include <nanoarrow/nanoarrow.h> to build and read columns.\n\n#include \"morloc.h\"\n#include <cstring>\n#include <stdexcept>\n#include <utility>\n\nnamespace mlc {\n\nclass ArrowTable {\npublic:\n    // Construct from moved-in C Data Interface structs.\n    // Takes ownership of release callbacks.\n    ArrowTable(struct ArrowSchema schema, struct ArrowArray array)\n        : schema_(schema), array_(array)\n    {\n        // Zero the source structs so the caller does not double-release\n        memset(&schema, 0, sizeof(schema));\n        memset(&array, 0, sizeof(array));\n    }\n\n    ~ArrowTable() {\n        if (array_.release) array_.release(&array_);\n        if (schema_.release) schema_.release(&schema_);\n    }\n\n    // Move-only\n    ArrowTable(ArrowTable&& other) noexcept\n        : schema_(other.schema_), array_(other.array_)\n    {\n        memset(&other.schema_, 0, sizeof(other.schema_));\n        memset(&other.array_, 0, sizeof(other.array_));\n    }\n\n    ArrowTable& operator=(ArrowTable&& other) noexcept {\n        if (this != &other) {\n            if (array_.release) array_.release(&array_);\n            if (schema_.release) schema_.release(&schema_);\n            schema_ = other.schema_;\n            array_ = other.array_;\n            memset(&other.schema_, 0, sizeof(other.schema_));\n            memset(&other.array_, 0, sizeof(other.array_));\n        }\n        return *this;\n    }\n\n    ArrowTable(const ArrowTable&) = delete;\n    ArrowTable& operator=(const ArrowTable&) = delete;\n\n    // Accessors (const -- arrow data is immutable)\n    const struct ArrowSchema* schema() const { return &schema_; }\n    const struct ArrowArray*  array()  const { return &array_; }\n    int64_t n_columns() const { return schema_.n_children; }\n    int64_t n_rows()    const { return array_.length; }\n\n    // Build from shared memory header (zero-copy import)\n    static ArrowTable from_shm(const arrow_shm_header_t* hdr) {\n        struct ArrowSchema as;\n        struct ArrowArray aa;\n        char* err = nullptr;\n        arrow_from_shm(hdr, &as, &aa, &err);\n        if (err) {\n            std::string msg(err);\n            free(err);\n            throw std::runtime_error(msg);\n        }\n        return ArrowTable(std::move(as), std::move(aa));\n    }\n\n    // Move table data to shared memory: copies buffers into a contiguous SHM\n    // block, frees the original heap buffers, then repoints this table's\n    // internal ArrowSchema/ArrowArray into the SHM block (zero-copy).\n    // After this call the table is still usable but backed by SHM.\n    // Returns relptr to the SHM block for use in packets.\n    relptr_t move_to_shm() {\n        // Step 1: copy all column data into contiguous SHM\n        char* copy_err = nullptr;\n        relptr_t rp = arrow_to_shm(&array_, &schema_, &copy_err);\n        if (copy_err) {\n            std::string msg(copy_err);\n            free(copy_err);\n            throw std::runtime_error(msg);\n        }\n\n        // Step 2: release heap-backed structs (frees all original buffers)\n        if (array_.release) array_.release(&array_);\n        if (schema_.release) schema_.release(&schema_);\n        memset(&schema_, 0, sizeof(schema_));\n        memset(&array_, 0, sizeof(array_));\n\n        // Step 3: resolve SHM pointer and rebuild structs pointing into it\n        char* resolve_err = nullptr;\n        void* abs = rel2abs(rp, &resolve_err);\n        if (resolve_err) {\n            std::string msg(resolve_err);\n            free(resolve_err);\n            throw std::runtime_error(msg);\n        }\n\n        char* shm_err = nullptr;\n        arrow_from_shm((const arrow_shm_header_t*)abs, &schema_, &array_, &shm_err);\n        if (shm_err) {\n            std::string msg(shm_err);\n            free(shm_err);\n            throw std::runtime_error(msg);\n        }\n\n        return rp;\n    }\n\nprivate:\n    struct ArrowSchema schema_;\n    struct ArrowArray array_;\n};\n\n} // namespace mlc\n\n#endif // MLC_ARROW_HPP\n"
  },
  {
    "path": "data/lang/cpp/mlc_tensor.hpp",
    "content": "#ifndef MLC_TENSOR_HPP\n#define MLC_TENSOR_HPP\n\n// mlc_tensor.hpp -- dense N-dimensional tensor for morloc C++ pools.\n// Data is always contiguous row-major (C order). The Tensor struct in\n// schema.h defines the voidstar layout; this header provides the C++\n// user-facing type that maps to it.\n\n#include \"morloc.h\"\n#include <cstring>\n#include <stdexcept>\n#include <numeric>\n#include <initializer_list>\n\nnamespace mlc {\n\n// Storage type trait: maps bool to uint8_t so that tensor memory layout\n// matches the voidstar format (MORLOC_BOOL = 1 byte) regardless of\n// sizeof(bool) on the target platform.\ntemplate<typename T> struct tensor_storage { using type = T; };\ntemplate<> struct tensor_storage<bool> { using type = uint8_t; };\ntemplate<typename T> using tensor_storage_t = typename tensor_storage<T>::type;\n\ntemplate<typename T, int NDim>\nclass Tensor {\n    using S = tensor_storage_t<T>;\npublic:\n    // Construct with given shape, allocating data on the heap\n    Tensor(const int64_t (&dims)[NDim]) : owns_data_(true) {\n        for (int i = 0; i < NDim; i++) shape_[i] = dims[i];\n        total_ = 1;\n        for (int i = 0; i < NDim; i++) total_ *= (size_t)shape_[i];\n        data_ = new S[total_]();\n    }\n\n    // Construct from initializer list of dims\n    Tensor(std::initializer_list<int64_t> dims) : owns_data_(true) {\n        if ((int)dims.size() != NDim) {\n            throw std::runtime_error(\"Tensor dimension mismatch\");\n        }\n        int i = 0;\n        for (auto d : dims) shape_[i++] = d;\n        total_ = 1;\n        for (i = 0; i < NDim; i++) total_ *= (size_t)shape_[i];\n        data_ = new S[total_]();\n    }\n\n    // Construct as a view over existing data (does not own)\n    Tensor(S* data, const int64_t* shape, size_t total)\n        : data_(data), total_(total), owns_data_(false) {\n        for (int i = 0; i < NDim; i++) shape_[i] = shape[i];\n    }\n\n    ~Tensor() {\n        if (owns_data_ && data_) delete[] data_;\n    }\n\n    // Move-only\n    Tensor(Tensor&& other) noexcept\n        : data_(other.data_), total_(other.total_), owns_data_(other.owns_data_) {\n        for (int i = 0; i < NDim; i++) shape_[i] = other.shape_[i];\n        other.data_ = nullptr;\n        other.owns_data_ = false;\n    }\n\n    Tensor& operator=(Tensor&& other) noexcept {\n        if (this != &other) {\n            if (owns_data_ && data_) delete[] data_;\n            data_ = other.data_;\n            total_ = other.total_;\n            owns_data_ = other.owns_data_;\n            for (int i = 0; i < NDim; i++) shape_[i] = other.shape_[i];\n            other.data_ = nullptr;\n            other.owns_data_ = false;\n        }\n        return *this;\n    }\n\n    Tensor(const Tensor&) = delete;\n    Tensor& operator=(const Tensor&) = delete;\n\n    // Accessors (S* for raw access; S == T for all types except bool)\n    const S* data() const { return data_; }\n    S* data() { return data_; }\n    constexpr int ndim() const { return NDim; }\n    const int64_t* shape() const { return shape_; }\n    int64_t shape(int d) const { return shape_[d]; }\n    size_t size() const { return total_; }\n\n    // Linear access (returns S& which is uint8_t& for bool tensors;\n    // implicit conversion to/from bool handles the difference)\n    const S& operator[](size_t i) const { return data_[i]; }\n    S& operator[](size_t i) { return data_[i]; }\n\n    // 1D access\n    template<int N = NDim, typename = std::enable_if_t<N == 1>>\n    const S& operator()(int64_t i) const { return data_[i]; }\n    template<int N = NDim, typename = std::enable_if_t<N == 1>>\n    S& operator()(int64_t i) { return data_[i]; }\n\n    // 2D access (row-major)\n    template<int N = NDim, typename = std::enable_if_t<N == 2>>\n    const S& operator()(int64_t i, int64_t j) const {\n        return data_[i * shape_[1] + j];\n    }\n    template<int N = NDim, typename = std::enable_if_t<N == 2>>\n    S& operator()(int64_t i, int64_t j) {\n        return data_[i * shape_[1] + j];\n    }\n\n    // 3D access (row-major)\n    template<int N = NDim, typename = std::enable_if_t<N == 3>>\n    const S& operator()(int64_t i, int64_t j, int64_t k) const {\n        return data_[(i * shape_[1] + j) * shape_[2] + k];\n    }\n    template<int N = NDim, typename = std::enable_if_t<N == 3>>\n    S& operator()(int64_t i, int64_t j, int64_t k) {\n        return data_[(i * shape_[1] + j) * shape_[2] + k];\n    }\n\nprivate:\n    S* data_ = nullptr;\n    int64_t shape_[NDim] = {};\n    size_t total_ = 0;\n    bool owns_data_ = false;\n};\n\n// Convenience aliases\ntemplate<typename T> using Tensor1 = Tensor<T, 1>;\ntemplate<typename T> using Tensor2 = Tensor<T, 2>;\ntemplate<typename T> using Tensor3 = Tensor<T, 3>;\ntemplate<typename T> using Tensor4 = Tensor<T, 4>;\ntemplate<typename T> using Tensor5 = Tensor<T, 5>;\n\n// Type trait for detecting mlc::Tensor\ntemplate<typename T> struct is_mlc_tensor : std::false_type {};\ntemplate<typename T, int N> struct is_mlc_tensor<Tensor<T, N>> : std::true_type {};\ntemplate<typename T>\ninline constexpr bool is_mlc_tensor_v = is_mlc_tensor<T>::value;\n\n// Extract element type from Tensor\ntemplate<typename T> struct tensor_element;\ntemplate<typename T, int N> struct tensor_element<Tensor<T, N>> { using type = T; };\ntemplate<typename T> using tensor_element_t = typename tensor_element<T>::type;\n\n// Extract ndim from Tensor\ntemplate<typename T> struct tensor_ndim;\ntemplate<typename T, int N> struct tensor_ndim<Tensor<T, N>>\n    { static constexpr int value = N; };\ntemplate<typename T>\ninline constexpr int tensor_ndim_v = tensor_ndim<T>::value;\n\n} // namespace mlc\n\n#endif // MLC_TENSOR_HPP\n"
  },
  {
    "path": "data/lang/cpp/morloc_pch.hpp",
    "content": "// Precompiled header for morloc C++ pools.\n// Compiled once during 'morloc init', reused for every pool compilation.\n\n#ifndef MORLOC_PCH_HPP\n#define MORLOC_PCH_HPP\n\n// STL containers\n#include <vector>\n#include <stack>\n#include <list>\n#include <forward_list>\n#include <queue>\n#include <deque>\n#include <unordered_map>\n\n// STL algorithms and utilities\n#include <algorithm>\n#include <tuple>\n#include <functional>\n#include <limits>\n#include <utility>\n#include <type_traits>\n\n// Strings and I/O\n#include <string>\n#include <iostream>\n#include <sstream>\n#include <fstream>\n\n// C standard library\n#include <cstring>\n#include <cstdlib>\n#include <cstdio>\n#include <cstdint>\n#include <stdexcept>\n#include <system_error>\n\n// POSIX headers\n#include <sys/stat.h>\n#include <sys/mman.h>\n#include <unistd.h>\n#include <pthread.h>\n#include <signal.h>\n\n// Morloc runtime\n#include \"morloc.h\"\n\n#endif\n"
  },
  {
    "path": "data/lang/cpp/nanoarrow/nanoarrow.c",
    "content": "// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#include <errno.h>\n#include <stdarg.h>\n#include <stddef.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n\n#include \"nanoarrow/nanoarrow.h\"\n\nconst char* ArrowNanoarrowVersion(void) { return NANOARROW_VERSION; }\n\nint ArrowNanoarrowVersionInt(void) { return NANOARROW_VERSION_INT; }\n\nArrowErrorCode ArrowErrorSet(struct ArrowError* error, const char* fmt, ...) {\n  if (error == NULL) {\n    return NANOARROW_OK;\n  }\n\n  memset(error->message, 0, sizeof(error->message));\n\n  va_list args;\n  va_start(args, fmt);\n  int chars_needed = vsnprintf(error->message, sizeof(error->message), fmt, args);\n  va_end(args);\n\n  if (chars_needed < 0) {\n    return EINVAL;\n  } else if (((size_t)chars_needed) >= sizeof(error->message)) {\n    return ERANGE;\n  } else {\n    return NANOARROW_OK;\n  }\n}\n\nvoid ArrowLayoutInit(struct ArrowLayout* layout, enum ArrowType storage_type) {\n  layout->buffer_type[0] = NANOARROW_BUFFER_TYPE_VALIDITY;\n  layout->buffer_data_type[0] = NANOARROW_TYPE_BOOL;\n  layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA;\n  layout->buffer_data_type[1] = storage_type;\n  layout->buffer_type[2] = NANOARROW_BUFFER_TYPE_NONE;\n  layout->buffer_data_type[2] = NANOARROW_TYPE_UNINITIALIZED;\n\n  layout->element_size_bits[0] = 1;\n  layout->element_size_bits[1] = 0;\n  layout->element_size_bits[2] = 0;\n\n  layout->child_size_elements = 0;\n\n  switch (storage_type) {\n    case NANOARROW_TYPE_UNINITIALIZED:\n    case NANOARROW_TYPE_NA:\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      layout->buffer_type[0] = NANOARROW_BUFFER_TYPE_NONE;\n      layout->buffer_data_type[0] = NANOARROW_TYPE_UNINITIALIZED;\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_NONE;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_UNINITIALIZED;\n      layout->element_size_bits[0] = 0;\n      break;\n\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_MAP:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT32;\n      layout->element_size_bits[1] = 32;\n      break;\n\n    case NANOARROW_TYPE_LARGE_LIST:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT64;\n      layout->element_size_bits[1] = 64;\n      break;\n\n    case NANOARROW_TYPE_STRUCT:\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_NONE;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_UNINITIALIZED;\n      break;\n\n    case NANOARROW_TYPE_BOOL:\n      layout->element_size_bits[1] = 1;\n      break;\n\n    case NANOARROW_TYPE_UINT8:\n    case NANOARROW_TYPE_INT8:\n      layout->element_size_bits[1] = 8;\n      break;\n\n    case NANOARROW_TYPE_UINT16:\n    case NANOARROW_TYPE_INT16:\n    case NANOARROW_TYPE_HALF_FLOAT:\n      layout->element_size_bits[1] = 16;\n      break;\n\n    case NANOARROW_TYPE_UINT32:\n    case NANOARROW_TYPE_INT32:\n    case NANOARROW_TYPE_FLOAT:\n    case NANOARROW_TYPE_DECIMAL32:\n      layout->element_size_bits[1] = 32;\n      break;\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT32;\n      layout->element_size_bits[1] = 32;\n      break;\n\n    case NANOARROW_TYPE_UINT64:\n    case NANOARROW_TYPE_INT64:\n    case NANOARROW_TYPE_DOUBLE:\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME:\n    case NANOARROW_TYPE_DECIMAL64:\n      layout->element_size_bits[1] = 64;\n      break;\n\n    case NANOARROW_TYPE_DECIMAL128:\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO:\n      layout->element_size_bits[1] = 128;\n      break;\n\n    case NANOARROW_TYPE_DECIMAL256:\n      layout->element_size_bits[1] = 256;\n      break;\n\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n      layout->buffer_data_type[1] = NANOARROW_TYPE_BINARY;\n      break;\n\n    case NANOARROW_TYPE_DENSE_UNION:\n      layout->buffer_type[0] = NANOARROW_BUFFER_TYPE_TYPE_ID;\n      layout->buffer_data_type[0] = NANOARROW_TYPE_INT8;\n      layout->element_size_bits[0] = 8;\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_UNION_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT32;\n      layout->element_size_bits[1] = 32;\n      break;\n\n    case NANOARROW_TYPE_SPARSE_UNION:\n      layout->buffer_type[0] = NANOARROW_BUFFER_TYPE_TYPE_ID;\n      layout->buffer_data_type[0] = NANOARROW_TYPE_INT8;\n      layout->element_size_bits[0] = 8;\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_NONE;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_UNINITIALIZED;\n      break;\n\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_BINARY:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT32;\n      layout->element_size_bits[1] = 32;\n      layout->buffer_type[2] = NANOARROW_BUFFER_TYPE_DATA;\n      layout->buffer_data_type[2] = storage_type;\n      break;\n\n    case NANOARROW_TYPE_LARGE_STRING:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT64;\n      layout->element_size_bits[1] = 64;\n      layout->buffer_type[2] = NANOARROW_BUFFER_TYPE_DATA;\n      layout->buffer_data_type[2] = NANOARROW_TYPE_STRING;\n      break;\n    case NANOARROW_TYPE_LARGE_BINARY:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT64;\n      layout->element_size_bits[1] = 64;\n      layout->buffer_type[2] = NANOARROW_BUFFER_TYPE_DATA;\n      layout->buffer_data_type[2] = NANOARROW_TYPE_BINARY;\n      break;\n\n    case NANOARROW_TYPE_BINARY_VIEW:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_BINARY_VIEW;\n      layout->element_size_bits[1] = 128;\n      break;\n    case NANOARROW_TYPE_STRING_VIEW:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_DATA;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_STRING_VIEW;\n      layout->element_size_bits[1] = 128;\n      break;\n\n    case NANOARROW_TYPE_LIST_VIEW:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_VIEW_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT32;\n      layout->element_size_bits[1] = 32;\n      layout->buffer_type[2] = NANOARROW_BUFFER_TYPE_SIZE;\n      layout->buffer_data_type[2] = NANOARROW_TYPE_INT32;\n      layout->element_size_bits[2] = 32;\n      break;\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      layout->buffer_type[1] = NANOARROW_BUFFER_TYPE_VIEW_OFFSET;\n      layout->buffer_data_type[1] = NANOARROW_TYPE_INT64;\n      layout->element_size_bits[1] = 64;\n      layout->buffer_type[2] = NANOARROW_BUFFER_TYPE_SIZE;\n      layout->buffer_data_type[2] = NANOARROW_TYPE_INT64;\n      layout->element_size_bits[2] = 64;\n      break;\n\n    default:\n      break;\n  }\n}\n\nvoid* ArrowMalloc(int64_t size) { return malloc(size); }\n\nvoid* ArrowRealloc(void* ptr, int64_t size) { return realloc(ptr, size); }\n\nvoid ArrowFree(void* ptr) { free(ptr); }\n\nstatic uint8_t* ArrowBufferAllocatorMallocReallocate(\n    struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t old_size,\n    int64_t new_size) {\n  NANOARROW_UNUSED(allocator);\n  NANOARROW_UNUSED(old_size);\n  return (uint8_t*)ArrowRealloc(ptr, new_size);\n}\n\nstatic void ArrowBufferAllocatorMallocFree(struct ArrowBufferAllocator* allocator,\n                                           uint8_t* ptr, int64_t size) {\n  NANOARROW_UNUSED(allocator);\n  NANOARROW_UNUSED(size);\n  if (ptr != NULL) {\n    ArrowFree(ptr);\n  }\n}\n\nstatic struct ArrowBufferAllocator ArrowBufferAllocatorMalloc = {\n    &ArrowBufferAllocatorMallocReallocate, &ArrowBufferAllocatorMallocFree, NULL};\n\nstruct ArrowBufferAllocator ArrowBufferAllocatorDefault(void) {\n  return ArrowBufferAllocatorMalloc;\n}\n\nstatic uint8_t* ArrowBufferDeallocatorReallocate(struct ArrowBufferAllocator* allocator,\n                                                 uint8_t* ptr, int64_t old_size,\n                                                 int64_t new_size) {\n  NANOARROW_UNUSED(new_size);\n\n  // Attempting to reallocate a buffer with a custom deallocator is\n  // a programming error. In debug mode, crash here.\n#if defined(NANOARROW_DEBUG)\n  NANOARROW_PRINT_AND_DIE(ENOMEM,\n                          \"It is an error to reallocate a buffer whose allocator is \"\n                          \"ArrowBufferDeallocator()\");\n#endif\n\n  // In release mode, ensure the the deallocator is called exactly\n  // once using the pointer it was given and return NULL, which\n  // will trigger the caller to return ENOMEM.\n  allocator->free(allocator, ptr, old_size);\n  *allocator = ArrowBufferAllocatorDefault();\n  return NULL;\n}\n\nstruct ArrowBufferAllocator ArrowBufferDeallocator(\n    void (*custom_free)(struct ArrowBufferAllocator* allocator, uint8_t* ptr,\n                        int64_t size),\n    void* private_data) {\n  struct ArrowBufferAllocator allocator;\n  allocator.reallocate = &ArrowBufferDeallocatorReallocate;\n  allocator.free = custom_free;\n  allocator.private_data = private_data;\n  return allocator;\n}\n\nstatic const int kInt32DecimalDigits = 9;\n\nstatic const uint64_t kUInt32PowersOfTen[] = {\n    1ULL,      10ULL,      100ULL,      1000ULL,      10000ULL,\n    100000ULL, 1000000ULL, 10000000ULL, 100000000ULL, 1000000000ULL};\n\n// Adapted from Arrow C++ to use 32-bit words for better C portability\n// https://github.com/apache/arrow/blob/cd3321b28b0c9703e5d7105d6146c1270bbadd7f/cpp/src/arrow/util/decimal.cc#L524-L544\nstatic void ShiftAndAdd(struct ArrowStringView value, uint32_t* out, int64_t out_size) {\n  // We use strtoll for parsing, which needs input that is null-terminated\n  char chunk_string[16];\n\n  for (int64_t posn = 0; posn < value.size_bytes;) {\n    int64_t remaining = value.size_bytes - posn;\n\n    int64_t group_size;\n    if (remaining > kInt32DecimalDigits) {\n      group_size = kInt32DecimalDigits;\n    } else {\n      group_size = remaining;\n    }\n\n    const uint64_t multiple = kUInt32PowersOfTen[group_size];\n\n    memcpy(chunk_string, value.data + posn, group_size);\n    chunk_string[group_size] = '\\0';\n    uint32_t chunk = (uint32_t)strtoll(chunk_string, NULL, 10);\n\n    for (int64_t i = 0; i < out_size; i++) {\n      uint64_t tmp = out[i];\n      tmp *= multiple;\n      tmp += chunk;\n      out[i] = (uint32_t)(tmp & 0xFFFFFFFFULL);\n      chunk = (uint32_t)(tmp >> 32);\n    }\n    posn += group_size;\n  }\n}\n\nArrowErrorCode ArrowDecimalSetDigits(struct ArrowDecimal* decimal,\n                                     struct ArrowStringView value) {\n  // Check for sign\n  int is_negative = value.data[0] == '-';\n  int has_sign = is_negative || value.data[0] == '+';\n  value.data += has_sign;\n  value.size_bytes -= has_sign;\n\n  // Check all characters are digits that are not the negative sign\n  for (int64_t i = 0; i < value.size_bytes; i++) {\n    char c = value.data[i];\n    if (c < '0' || c > '9') {\n      return EINVAL;\n    }\n  }\n\n  // Skip over leading 0s\n  int64_t n_leading_zeroes = 0;\n  for (int64_t i = 0; i < value.size_bytes; i++) {\n    if (value.data[i] == '0') {\n      n_leading_zeroes++;\n    } else {\n      break;\n    }\n  }\n\n  value.data += n_leading_zeroes;\n  value.size_bytes -= n_leading_zeroes;\n\n  // Use 32-bit words for portability\n  uint32_t words32[8];\n  memset(words32, 0, sizeof(words32));\n  int n_words32 = decimal->n_words > 0 ? decimal->n_words * 2 : 1;\n  NANOARROW_DCHECK(n_words32 <= 8);\n  memset(words32, 0, sizeof(words32));\n\n  ShiftAndAdd(value, words32, n_words32);\n\n  if (_ArrowIsLittleEndian() || n_words32 == 1) {\n    memcpy(decimal->words, words32, sizeof(uint32_t) * n_words32);\n  } else {\n    uint64_t lo;\n    uint64_t hi;\n\n    for (int i = 0; i < decimal->n_words; i++) {\n      lo = (uint64_t)words32[i * 2];\n      hi = (uint64_t)words32[i * 2 + 1] << 32;\n      decimal->words[decimal->n_words - i - 1] = lo | hi;\n    }\n  }\n\n  if (is_negative) {\n    ArrowDecimalNegate(decimal);\n  }\n\n  return NANOARROW_OK;\n}\n\n// Adapted from Arrow C++ for C\n// https://github.com/apache/arrow/blob/cd3321b28b0c9703e5d7105d6146c1270bbadd7f/cpp/src/arrow/util/decimal.cc#L365\nArrowErrorCode ArrowDecimalAppendDigitsToBuffer(const struct ArrowDecimal* decimal,\n                                                struct ArrowBuffer* buffer) {\n  NANOARROW_DCHECK(decimal->n_words == 0 || decimal->n_words == 1 ||\n                   decimal->n_words == 2 || decimal->n_words == 4);\n\n  // For the 32-bit case, just use snprintf()\n  if (decimal->n_words == 0) {\n    int32_t value;\n    memcpy(&value, decimal->words, sizeof(int32_t));\n    NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(buffer, 16));\n    int n_chars = snprintf((char*)buffer->data + buffer->size_bytes,\n                           (buffer->capacity_bytes - buffer->size_bytes), \"%d\", value);\n    if (n_chars <= 0) {\n      return EINVAL;\n    }\n\n    buffer->size_bytes += n_chars;\n    return NANOARROW_OK;\n  }\n\n  int is_negative = ArrowDecimalSign(decimal) < 0;\n\n  uint64_t words_little_endian[4];\n  if (decimal->n_words == 0) {\n    words_little_endian[0] = 0;\n    memcpy(words_little_endian, decimal->words, sizeof(uint32_t));\n  } else if (decimal->low_word_index == 0) {\n    memcpy(words_little_endian, decimal->words, decimal->n_words * sizeof(uint64_t));\n  } else {\n    for (int i = 0; i < decimal->n_words; i++) {\n      words_little_endian[i] = decimal->words[decimal->n_words - i - 1];\n    }\n  }\n\n  // We've already made a copy, so negate that if needed\n  if (is_negative) {\n    if (decimal->n_words == 0) {\n      uint32_t elem = (uint32_t)words_little_endian[0];\n      elem = ~elem + 1;\n      words_little_endian[0] = (int32_t)elem;\n    } else {\n      uint64_t carry = 1;\n      for (int i = 0; i < decimal->n_words; i++) {\n        uint64_t elem = words_little_endian[i];\n        elem = ~elem + carry;\n        carry &= (elem == 0);\n        words_little_endian[i] = elem;\n      }\n    }\n  }\n\n  // Find the most significant word that is non-zero\n  int most_significant_elem_idx = -1;\n  if (decimal->n_words == 0) {\n    if (words_little_endian[0] != 0) {\n      most_significant_elem_idx = 0;\n    }\n  } else {\n    for (int i = decimal->n_words - 1; i >= 0; i--) {\n      if (words_little_endian[i] != 0) {\n        most_significant_elem_idx = i;\n        break;\n      }\n    }\n  }\n\n  // If they are all zero, the output is just '0'\n  if (most_significant_elem_idx == -1) {\n    NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt8(buffer, '0'));\n    return NANOARROW_OK;\n  }\n\n  // Define segments such that each segment represents 9 digits with the\n  // least significant group of 9 digits first. For example, if the input represents\n  // 9876543210123456789, then segments will be [123456789, 876543210, 9].\n  // We handle at most a signed 256 bit integer, whose maximum value occupies 77\n  // characters. Thus, we need at most 9 segments.\n  const uint32_t k1e9 = 1000000000U;\n  int num_segments = 0;\n  uint32_t segments[9];\n  memset(segments, 0, sizeof(segments));\n  uint64_t* most_significant_elem = words_little_endian + most_significant_elem_idx;\n\n  do {\n    // Compute remainder = words_little_endian % 1e9 and words_little_endian =\n    // words_little_endian / 1e9.\n    uint32_t remainder = 0;\n    uint64_t* elem = most_significant_elem;\n\n    do {\n      // Compute dividend = (remainder << 32) | *elem  (a virtual 96-bit integer);\n      // *elem = dividend / 1e9;\n      // remainder = dividend % 1e9.\n      uint32_t hi = (uint32_t)(*elem >> 32);\n      uint32_t lo = (uint32_t)(*elem & 0xFFFFFFFFULL);\n      uint64_t dividend_hi = ((uint64_t)(remainder) << 32) | hi;\n      uint64_t quotient_hi = dividend_hi / k1e9;\n      remainder = (uint32_t)(dividend_hi % k1e9);\n      uint64_t dividend_lo = ((uint64_t)(remainder) << 32) | lo;\n      uint64_t quotient_lo = dividend_lo / k1e9;\n      remainder = (uint32_t)(dividend_lo % k1e9);\n\n      *elem = (quotient_hi << 32) | quotient_lo;\n    } while (elem-- != words_little_endian);\n\n    segments[num_segments++] = remainder;\n  } while (*most_significant_elem != 0 || most_significant_elem-- != words_little_endian);\n\n  // We know our output has no more than 9 digits per segment, plus a negative sign,\n  // plus any further digits between our output of 9 digits plus enough\n  // extra characters to ensure that snprintf() with n = 21 (maximum length of %lu\n  // including a the null terminator) is bounded properly.\n  NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(buffer, num_segments * 9 + 1 + 21 - 9));\n  if (is_negative) {\n    buffer->data[buffer->size_bytes++] = '-';\n  }\n\n  // The most significant segment should have no leading zeroes\n  int n_chars = snprintf((char*)buffer->data + buffer->size_bytes, 21, \"%lu\",\n                         (unsigned long)segments[num_segments - 1]);\n\n  // Ensure that an encoding error from snprintf() does not result\n  // in an out-of-bounds access.\n  if (n_chars < 0) {\n    return ERANGE;\n  }\n\n  buffer->size_bytes += n_chars;\n\n  // Subsequent output needs to be left-padded with zeroes such that each segment\n  // takes up exactly 9 digits.\n  for (int i = num_segments - 2; i >= 0; i--) {\n    int n_chars = snprintf((char*)buffer->data + buffer->size_bytes, 21, \"%09lu\",\n                           (unsigned long)segments[i]);\n    buffer->size_bytes += n_chars;\n    NANOARROW_DCHECK(buffer->size_bytes <= buffer->capacity_bytes);\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowDecimalAppendStringToBuffer(const struct ArrowDecimal* decimal,\n                                                struct ArrowBuffer* buffer) {\n  int64_t buffer_size = buffer->size_bytes;\n  NANOARROW_RETURN_NOT_OK(ArrowDecimalAppendDigitsToBuffer(decimal, buffer));\n  int64_t digits_size = buffer->size_bytes - buffer_size;\n\n  if (decimal->scale <= 0) {\n    // e.g., digits are -12345 and scale is -2 -> -1234500\n    // Just add zeros to the end\n    for (int i = decimal->scale; i < 0; i++) {\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt8(buffer, '0'));\n    }\n    return NANOARROW_OK;\n  }\n\n  int is_negative = buffer->data[0] == '-';\n  int64_t num_digits = digits_size - is_negative;\n  if (num_digits <= decimal->scale) {\n    // e.g., digits are -12345 and scale is 6 -> -0.012345\n    // Insert \"0.<some zeros>\" between the (maybe) negative sign and the digits\n    int64_t num_zeros_after_decimal = decimal->scale - num_digits;\n    NANOARROW_RETURN_NOT_OK(\n        ArrowBufferResize(buffer, buffer->size_bytes + num_zeros_after_decimal + 2, 0));\n\n    uint8_t* digits_start = buffer->data + is_negative;\n    memmove(digits_start + num_zeros_after_decimal + 2, digits_start, num_digits);\n    *digits_start++ = '0';\n    *digits_start++ = '.';\n    for (int i = 0; i < num_zeros_after_decimal; i++) {\n      *digits_start++ = '0';\n    }\n\n  } else {\n    // e.g., digits are -12345 and scale is 4 -> -1.2345\n    // Insert a decimal point before scale digits of output\n    NANOARROW_RETURN_NOT_OK(ArrowBufferResize(buffer, buffer->size_bytes + 1, 0));\n    uint8_t* decimal_point_to_be = buffer->data + buffer->size_bytes - 1 - decimal->scale;\n    memmove(decimal_point_to_be + 1, decimal_point_to_be, decimal->scale);\n    *decimal_point_to_be = '.';\n  }\n\n  return NANOARROW_OK;\n}\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#include <errno.h>\n#include <inttypes.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n\n#include \"nanoarrow/nanoarrow.h\"\n\nstatic void ArrowSchemaReleaseInternal(struct ArrowSchema* schema) {\n  if (schema->format != NULL) ArrowFree((void*)schema->format);\n  if (schema->name != NULL) ArrowFree((void*)schema->name);\n  if (schema->metadata != NULL) ArrowFree((void*)schema->metadata);\n\n  // This object owns the memory for all the children, but those\n  // children may have been generated elsewhere and might have\n  // their own release() callback.\n  if (schema->children != NULL) {\n    for (int64_t i = 0; i < schema->n_children; i++) {\n      if (schema->children[i] != NULL) {\n        if (schema->children[i]->release != NULL) {\n          ArrowSchemaRelease(schema->children[i]);\n        }\n\n        ArrowFree(schema->children[i]);\n      }\n    }\n\n    ArrowFree(schema->children);\n  }\n\n  // This object owns the memory for the dictionary but it\n  // may have been generated somewhere else and have its own\n  // release() callback.\n  if (schema->dictionary != NULL) {\n    if (schema->dictionary->release != NULL) {\n      ArrowSchemaRelease(schema->dictionary);\n    }\n\n    ArrowFree(schema->dictionary);\n  }\n\n  // private data not currently used\n  if (schema->private_data != NULL) {\n    ArrowFree(schema->private_data);\n  }\n\n  schema->release = NULL;\n}\n\nstatic const char* ArrowSchemaFormatTemplate(enum ArrowType type) {\n  switch (type) {\n    case NANOARROW_TYPE_UNINITIALIZED:\n      return NULL;\n    case NANOARROW_TYPE_NA:\n      return \"n\";\n    case NANOARROW_TYPE_BOOL:\n      return \"b\";\n\n    case NANOARROW_TYPE_UINT8:\n      return \"C\";\n    case NANOARROW_TYPE_INT8:\n      return \"c\";\n    case NANOARROW_TYPE_UINT16:\n      return \"S\";\n    case NANOARROW_TYPE_INT16:\n      return \"s\";\n    case NANOARROW_TYPE_UINT32:\n      return \"I\";\n    case NANOARROW_TYPE_INT32:\n      return \"i\";\n    case NANOARROW_TYPE_UINT64:\n      return \"L\";\n    case NANOARROW_TYPE_INT64:\n      return \"l\";\n\n    case NANOARROW_TYPE_HALF_FLOAT:\n      return \"e\";\n    case NANOARROW_TYPE_FLOAT:\n      return \"f\";\n    case NANOARROW_TYPE_DOUBLE:\n      return \"g\";\n\n    case NANOARROW_TYPE_STRING:\n      return \"u\";\n    case NANOARROW_TYPE_LARGE_STRING:\n      return \"U\";\n    case NANOARROW_TYPE_STRING_VIEW:\n      return \"vu\";\n    case NANOARROW_TYPE_BINARY:\n      return \"z\";\n    case NANOARROW_TYPE_BINARY_VIEW:\n      return \"vz\";\n    case NANOARROW_TYPE_LARGE_BINARY:\n      return \"Z\";\n\n    case NANOARROW_TYPE_DATE32:\n      return \"tdD\";\n    case NANOARROW_TYPE_DATE64:\n      return \"tdm\";\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n      return \"tiM\";\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME:\n      return \"tiD\";\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO:\n      return \"tin\";\n\n    case NANOARROW_TYPE_LIST:\n      return \"+l\";\n    case NANOARROW_TYPE_LARGE_LIST:\n      return \"+L\";\n    case NANOARROW_TYPE_LIST_VIEW:\n      return \"+vl\";\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      return \"+vL\";\n    case NANOARROW_TYPE_STRUCT:\n      return \"+s\";\n    case NANOARROW_TYPE_MAP:\n      return \"+m\";\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      return \"+r\";\n\n    default:\n      return NULL;\n  }\n}\n\nstatic int ArrowSchemaInitChildrenIfNeeded(struct ArrowSchema* schema,\n                                           enum ArrowType type) {\n  switch (type) {\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_LARGE_LIST:\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n    case NANOARROW_TYPE_LIST_VIEW:\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaAllocateChildren(schema, 1));\n      ArrowSchemaInit(schema->children[0]);\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema->children[0], \"item\"));\n      break;\n    case NANOARROW_TYPE_MAP:\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaAllocateChildren(schema, 1));\n      NANOARROW_RETURN_NOT_OK(\n          ArrowSchemaInitFromType(schema->children[0], NANOARROW_TYPE_STRUCT));\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema->children[0], \"entries\"));\n      schema->children[0]->flags &= ~ARROW_FLAG_NULLABLE;\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaAllocateChildren(schema->children[0], 2));\n      ArrowSchemaInit(schema->children[0]->children[0]);\n      ArrowSchemaInit(schema->children[0]->children[1]);\n      NANOARROW_RETURN_NOT_OK(\n          ArrowSchemaSetName(schema->children[0]->children[0], \"key\"));\n      schema->children[0]->children[0]->flags &= ~ARROW_FLAG_NULLABLE;\n      NANOARROW_RETURN_NOT_OK(\n          ArrowSchemaSetName(schema->children[0]->children[1], \"value\"));\n      break;\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaAllocateChildren(schema, 2));\n      ArrowSchemaInit(schema->children[0]);\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema->children[0], \"run_ends\"));\n      schema->children[0]->flags &= ~ARROW_FLAG_NULLABLE;\n      ArrowSchemaInit(schema->children[1]);\n      NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema->children[1], \"values\"));\n    default:\n      break;\n  }\n\n  return NANOARROW_OK;\n}\n\nvoid ArrowSchemaInit(struct ArrowSchema* schema) {\n  schema->format = NULL;\n  schema->name = NULL;\n  schema->metadata = NULL;\n  schema->flags = ARROW_FLAG_NULLABLE;\n  schema->n_children = 0;\n  schema->children = NULL;\n  schema->dictionary = NULL;\n  schema->private_data = NULL;\n  schema->release = &ArrowSchemaReleaseInternal;\n}\n\nArrowErrorCode ArrowSchemaSetType(struct ArrowSchema* schema, enum ArrowType type) {\n  // We don't allocate the dictionary because it has to be nullptr\n  // for non-dictionary-encoded arrays.\n\n  // Set the format to a valid format string for type\n  const char* template_format = ArrowSchemaFormatTemplate(type);\n\n  // If type isn't recognized and not explicitly unset\n  if (template_format == NULL && type != NANOARROW_TYPE_UNINITIALIZED) {\n    return EINVAL;\n  }\n\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaSetFormat(schema, template_format));\n\n  // For types with an umabiguous child structure, allocate children\n  return ArrowSchemaInitChildrenIfNeeded(schema, type);\n}\n\nArrowErrorCode ArrowSchemaSetTypeStruct(struct ArrowSchema* schema, int64_t n_children) {\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaSetType(schema, NANOARROW_TYPE_STRUCT));\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaAllocateChildren(schema, n_children));\n  for (int64_t i = 0; i < n_children; i++) {\n    ArrowSchemaInit(schema->children[i]);\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaInitFromType(struct ArrowSchema* schema, enum ArrowType type) {\n  ArrowSchemaInit(schema);\n\n  int result = ArrowSchemaSetType(schema, type);\n  if (result != NANOARROW_OK) {\n    ArrowSchemaRelease(schema);\n    return result;\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaSetTypeFixedSize(struct ArrowSchema* schema,\n                                           enum ArrowType type, int32_t fixed_size) {\n  if (fixed_size <= 0) {\n    return EINVAL;\n  }\n\n  char buffer[64];\n  int n_chars;\n  switch (type) {\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n      n_chars = snprintf(buffer, sizeof(buffer), \"w:%\" PRId32, fixed_size);\n      break;\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      n_chars = snprintf(buffer, sizeof(buffer), \"+w:%\" PRId32, fixed_size);\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (((size_t)n_chars) >= sizeof(buffer) || n_chars < 0) {\n    return ERANGE;\n  }\n\n  buffer[n_chars] = '\\0';\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaSetFormat(schema, buffer));\n\n  if (type == NANOARROW_TYPE_FIXED_SIZE_LIST) {\n    NANOARROW_RETURN_NOT_OK(ArrowSchemaInitChildrenIfNeeded(schema, type));\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaSetTypeDecimal(struct ArrowSchema* schema, enum ArrowType type,\n                                         int32_t decimal_precision,\n                                         int32_t decimal_scale) {\n  if (decimal_precision <= 0) {\n    return EINVAL;\n  }\n\n  char buffer[64];\n  int n_chars;\n  switch (type) {\n    case NANOARROW_TYPE_DECIMAL32:\n      if (decimal_precision > 9) {\n        return EINVAL;\n      }\n\n      n_chars = snprintf(buffer, sizeof(buffer), \"d:%d,%d,32\", decimal_precision,\n                         decimal_scale);\n      break;\n    case NANOARROW_TYPE_DECIMAL64:\n      if (decimal_precision > 18) {\n        return EINVAL;\n      }\n\n      n_chars = snprintf(buffer, sizeof(buffer), \"d:%d,%d,64\", decimal_precision,\n                         decimal_scale);\n      break;\n    case NANOARROW_TYPE_DECIMAL128:\n      if (decimal_precision > 38) {\n        return EINVAL;\n      }\n\n      n_chars =\n          snprintf(buffer, sizeof(buffer), \"d:%d,%d\", decimal_precision, decimal_scale);\n      break;\n    case NANOARROW_TYPE_DECIMAL256:\n      if (decimal_precision > 76) {\n        return EINVAL;\n      }\n\n      n_chars = snprintf(buffer, sizeof(buffer), \"d:%d,%d,256\", decimal_precision,\n                         decimal_scale);\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (((size_t)n_chars) >= sizeof(buffer) || n_chars < 0) {\n    return ERANGE;\n  }\n\n  buffer[n_chars] = '\\0';\n  return ArrowSchemaSetFormat(schema, buffer);\n}\n\nArrowErrorCode ArrowSchemaSetTypeRunEndEncoded(struct ArrowSchema* schema,\n                                               enum ArrowType run_end_type) {\n  switch (run_end_type) {\n    case NANOARROW_TYPE_INT16:\n    case NANOARROW_TYPE_INT32:\n    case NANOARROW_TYPE_INT64:\n      break;\n    default:\n      return EINVAL;\n  }\n\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaSetFormat(\n      schema, ArrowSchemaFormatTemplate(NANOARROW_TYPE_RUN_END_ENCODED)));\n  NANOARROW_RETURN_NOT_OK(\n      ArrowSchemaInitChildrenIfNeeded(schema, NANOARROW_TYPE_RUN_END_ENCODED));\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaSetType(schema->children[0], run_end_type));\n  NANOARROW_RETURN_NOT_OK(\n      ArrowSchemaSetType(schema->children[1], NANOARROW_TYPE_UNINITIALIZED));\n\n  return NANOARROW_OK;\n}\n\nstatic const char* ArrowTimeUnitFormatString(enum ArrowTimeUnit time_unit) {\n  switch (time_unit) {\n    case NANOARROW_TIME_UNIT_SECOND:\n      return \"s\";\n    case NANOARROW_TIME_UNIT_MILLI:\n      return \"m\";\n    case NANOARROW_TIME_UNIT_MICRO:\n      return \"u\";\n    case NANOARROW_TIME_UNIT_NANO:\n      return \"n\";\n    default:\n      return NULL;\n  }\n}\n\nArrowErrorCode ArrowSchemaSetTypeDateTime(struct ArrowSchema* schema, enum ArrowType type,\n                                          enum ArrowTimeUnit time_unit,\n                                          const char* timezone) {\n  const char* time_unit_str = ArrowTimeUnitFormatString(time_unit);\n  if (time_unit_str == NULL) {\n    return EINVAL;\n  }\n\n  char buffer[128];\n  int n_chars;\n  switch (type) {\n    case NANOARROW_TYPE_TIME32:\n      if (timezone != NULL) {\n        return EINVAL;\n      }\n\n      switch (time_unit) {\n        case NANOARROW_TIME_UNIT_MICRO:\n        case NANOARROW_TIME_UNIT_NANO:\n          return EINVAL;\n        default:\n          break;\n      }\n\n      n_chars = snprintf(buffer, sizeof(buffer), \"tt%s\", time_unit_str);\n      break;\n    case NANOARROW_TYPE_TIME64:\n      if (timezone != NULL) {\n        return EINVAL;\n      }\n\n      switch (time_unit) {\n        case NANOARROW_TIME_UNIT_SECOND:\n        case NANOARROW_TIME_UNIT_MILLI:\n          return EINVAL;\n        default:\n          break;\n      }\n\n      n_chars = snprintf(buffer, sizeof(buffer), \"tt%s\", time_unit_str);\n      break;\n    case NANOARROW_TYPE_TIMESTAMP:\n      if (timezone == NULL) {\n        timezone = \"\";\n      }\n      n_chars = snprintf(buffer, sizeof(buffer), \"ts%s:%s\", time_unit_str, timezone);\n      break;\n    case NANOARROW_TYPE_DURATION:\n      if (timezone != NULL) {\n        return EINVAL;\n      }\n      n_chars = snprintf(buffer, sizeof(buffer), \"tD%s\", time_unit_str);\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (((size_t)n_chars) >= sizeof(buffer) || n_chars < 0) {\n    return ERANGE;\n  }\n\n  buffer[n_chars] = '\\0';\n\n  return ArrowSchemaSetFormat(schema, buffer);\n}\n\nArrowErrorCode ArrowSchemaSetTypeUnion(struct ArrowSchema* schema, enum ArrowType type,\n                                       int64_t n_children) {\n  if (n_children < 0 || n_children > 127) {\n    return EINVAL;\n  }\n\n  // Max valid size would be +ud:0,1,...126 = 401 characters + null terminator\n  char format_out[512];\n  int64_t format_out_size = 512;\n  memset(format_out, 0, format_out_size);\n  int n_chars;\n  char* format_cursor = format_out;\n\n  switch (type) {\n    case NANOARROW_TYPE_SPARSE_UNION:\n      n_chars = snprintf(format_cursor, format_out_size, \"+us:\");\n      format_cursor += n_chars;\n      format_out_size -= n_chars;\n      break;\n    case NANOARROW_TYPE_DENSE_UNION:\n      n_chars = snprintf(format_cursor, format_out_size, \"+ud:\");\n      format_cursor += n_chars;\n      format_out_size -= n_chars;\n      break;\n    default:\n      return EINVAL;\n  }\n\n  // Ensure that an encoding error from snprintf() does not result\n  // in an out-of-bounds access.\n  if (n_chars < 0) {\n    return ERANGE;\n  }\n\n  if (n_children > 0) {\n    n_chars = snprintf(format_cursor, format_out_size, \"0\");\n    format_cursor += n_chars;\n    format_out_size -= n_chars;\n\n    for (int64_t i = 1; i < n_children; i++) {\n      n_chars = snprintf(format_cursor, format_out_size, \",%\" PRId64, i);\n      format_cursor += n_chars;\n      format_out_size -= n_chars;\n    }\n  }\n\n  // Ensure that an encoding error from snprintf() does not result\n  // in an out-of-bounds access.\n  if (n_chars < 0) {\n    return ERANGE;\n  }\n\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaSetFormat(schema, format_out));\n\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaAllocateChildren(schema, n_children));\n  for (int64_t i = 0; i < n_children; i++) {\n    ArrowSchemaInit(schema->children[i]);\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaSetFormat(struct ArrowSchema* schema, const char* format) {\n  if (schema->format != NULL) {\n    ArrowFree((void*)schema->format);\n  }\n\n  if (format != NULL) {\n    size_t format_size = strlen(format) + 1;\n    schema->format = (const char*)ArrowMalloc(format_size);\n    if (schema->format == NULL) {\n      return ENOMEM;\n    }\n\n    memcpy((void*)schema->format, format, format_size);\n  } else {\n    schema->format = NULL;\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaSetName(struct ArrowSchema* schema, const char* name) {\n  if (schema->name != NULL) {\n    ArrowFree((void*)schema->name);\n  }\n\n  if (name != NULL) {\n    size_t name_size = strlen(name) + 1;\n    schema->name = (const char*)ArrowMalloc(name_size);\n    if (schema->name == NULL) {\n      return ENOMEM;\n    }\n\n    memcpy((void*)schema->name, name, name_size);\n  } else {\n    schema->name = NULL;\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaSetMetadata(struct ArrowSchema* schema, const char* metadata) {\n  if (schema->metadata != NULL) {\n    ArrowFree((void*)schema->metadata);\n  }\n\n  if (metadata != NULL) {\n    size_t metadata_size = ArrowMetadataSizeOf(metadata);\n    schema->metadata = (const char*)ArrowMalloc(metadata_size);\n    if (schema->metadata == NULL) {\n      return ENOMEM;\n    }\n\n    memcpy((void*)schema->metadata, metadata, metadata_size);\n  } else {\n    schema->metadata = NULL;\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaAllocateChildren(struct ArrowSchema* schema,\n                                           int64_t n_children) {\n  if (schema->children != NULL) {\n    return EEXIST;\n  }\n\n  if (n_children > 0) {\n    schema->children =\n        (struct ArrowSchema**)ArrowMalloc(n_children * sizeof(struct ArrowSchema*));\n\n    if (schema->children == NULL) {\n      return ENOMEM;\n    }\n\n    schema->n_children = n_children;\n\n    memset(schema->children, 0, n_children * sizeof(struct ArrowSchema*));\n\n    for (int64_t i = 0; i < n_children; i++) {\n      schema->children[i] = (struct ArrowSchema*)ArrowMalloc(sizeof(struct ArrowSchema));\n\n      if (schema->children[i] == NULL) {\n        return ENOMEM;\n      }\n\n      schema->children[i]->release = NULL;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaAllocateDictionary(struct ArrowSchema* schema) {\n  if (schema->dictionary != NULL) {\n    return EEXIST;\n  }\n\n  schema->dictionary = (struct ArrowSchema*)ArrowMalloc(sizeof(struct ArrowSchema));\n  if (schema->dictionary == NULL) {\n    return ENOMEM;\n  }\n\n  schema->dictionary->release = NULL;\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaDeepCopy(const struct ArrowSchema* schema,\n                                   struct ArrowSchema* schema_out) {\n  ArrowSchemaInit(schema_out);\n\n  int result = ArrowSchemaSetFormat(schema_out, schema->format);\n  if (result != NANOARROW_OK) {\n    ArrowSchemaRelease(schema_out);\n    return result;\n  }\n\n  schema_out->flags = schema->flags;\n\n  result = ArrowSchemaSetName(schema_out, schema->name);\n  if (result != NANOARROW_OK) {\n    ArrowSchemaRelease(schema_out);\n    return result;\n  }\n\n  result = ArrowSchemaSetMetadata(schema_out, schema->metadata);\n  if (result != NANOARROW_OK) {\n    ArrowSchemaRelease(schema_out);\n    return result;\n  }\n\n  result = ArrowSchemaAllocateChildren(schema_out, schema->n_children);\n  if (result != NANOARROW_OK) {\n    ArrowSchemaRelease(schema_out);\n    return result;\n  }\n\n  for (int64_t i = 0; i < schema->n_children; i++) {\n    result = ArrowSchemaDeepCopy(schema->children[i], schema_out->children[i]);\n    if (result != NANOARROW_OK) {\n      ArrowSchemaRelease(schema_out);\n      return result;\n    }\n  }\n\n  if (schema->dictionary != NULL) {\n    result = ArrowSchemaAllocateDictionary(schema_out);\n    if (result != NANOARROW_OK) {\n      ArrowSchemaRelease(schema_out);\n      return result;\n    }\n\n    result = ArrowSchemaDeepCopy(schema->dictionary, schema_out->dictionary);\n    if (result != NANOARROW_OK) {\n      ArrowSchemaRelease(schema_out);\n      return result;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic void ArrowSchemaViewSetPrimitive(struct ArrowSchemaView* schema_view,\n                                        enum ArrowType type) {\n  schema_view->type = type;\n  schema_view->storage_type = type;\n}\n\nstatic ArrowErrorCode ArrowSchemaViewParse(struct ArrowSchemaView* schema_view,\n                                           const char* format,\n                                           const char** format_end_out,\n                                           struct ArrowError* error) {\n  *format_end_out = format;\n\n  // needed for decimal parsing\n  const char* parse_start;\n  char* parse_end;\n\n  switch (format[0]) {\n    case 'n':\n      schema_view->type = NANOARROW_TYPE_NA;\n      schema_view->storage_type = NANOARROW_TYPE_NA;\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'b':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_BOOL);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'c':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT8);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'C':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_UINT8);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 's':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT16);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'S':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_UINT16);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'i':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT32);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'I':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_UINT32);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'l':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'L':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_UINT64);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'e':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_HALF_FLOAT);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'f':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_FLOAT);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'g':\n      ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_DOUBLE);\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n\n    // decimal\n    case 'd':\n      if (format[1] != ':' || format[2] == '\\0') {\n        ArrowErrorSet(error, \"Expected ':precision,scale[,bitwidth]' following 'd'\");\n        return EINVAL;\n      }\n\n      parse_start = format + 2;\n      schema_view->decimal_precision = (int32_t)strtol(parse_start, &parse_end, 10);\n      if (parse_end == parse_start || parse_end[0] != ',') {\n        ArrowErrorSet(error, \"Expected 'precision,scale[,bitwidth]' following 'd:'\");\n        return EINVAL;\n      }\n\n      parse_start = parse_end + 1;\n      schema_view->decimal_scale = (int32_t)strtol(parse_start, &parse_end, 10);\n      if (parse_end == parse_start) {\n        ArrowErrorSet(error, \"Expected 'scale[,bitwidth]' following 'd:precision,'\");\n        return EINVAL;\n      } else if (parse_end[0] != ',') {\n        schema_view->decimal_bitwidth = 128;\n      } else {\n        parse_start = parse_end + 1;\n        schema_view->decimal_bitwidth = (int32_t)strtol(parse_start, &parse_end, 10);\n        if (parse_start == parse_end) {\n          ArrowErrorSet(error, \"Expected precision following 'd:precision,scale,'\");\n          return EINVAL;\n        }\n      }\n\n      *format_end_out = parse_end;\n\n      switch (schema_view->decimal_bitwidth) {\n        case 32:\n          ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_DECIMAL32);\n          return NANOARROW_OK;\n        case 64:\n          ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_DECIMAL64);\n          return NANOARROW_OK;\n        case 128:\n          ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_DECIMAL128);\n          return NANOARROW_OK;\n        case 256:\n          ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_DECIMAL256);\n          return NANOARROW_OK;\n        default:\n          ArrowErrorSet(error,\n                        \"Expected decimal bitwidth of 128 or 256 but found %\" PRId32,\n                        schema_view->decimal_bitwidth);\n          return EINVAL;\n      }\n\n    // validity + data\n    case 'w':\n      schema_view->type = NANOARROW_TYPE_FIXED_SIZE_BINARY;\n      schema_view->storage_type = NANOARROW_TYPE_FIXED_SIZE_BINARY;\n      if (format[1] != ':' || format[2] == '\\0') {\n        ArrowErrorSet(error, \"Expected ':<width>' following 'w'\");\n        return EINVAL;\n      }\n\n      schema_view->fixed_size = (int32_t)strtol(format + 2, (char**)format_end_out, 10);\n      return NANOARROW_OK;\n\n    // validity + offset + data\n    case 'z':\n      schema_view->type = NANOARROW_TYPE_BINARY;\n      schema_view->storage_type = NANOARROW_TYPE_BINARY;\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'u':\n      schema_view->type = NANOARROW_TYPE_STRING;\n      schema_view->storage_type = NANOARROW_TYPE_STRING;\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n\n    // validity + large_offset + data\n    case 'Z':\n      schema_view->type = NANOARROW_TYPE_LARGE_BINARY;\n      schema_view->storage_type = NANOARROW_TYPE_LARGE_BINARY;\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n    case 'U':\n      schema_view->type = NANOARROW_TYPE_LARGE_STRING;\n      schema_view->storage_type = NANOARROW_TYPE_LARGE_STRING;\n      *format_end_out = format + 1;\n      return NANOARROW_OK;\n\n    // nested types\n    case '+':\n      switch (format[1]) {\n        // list has validity + offset or offset\n        case 'l':\n          schema_view->storage_type = NANOARROW_TYPE_LIST;\n          schema_view->type = NANOARROW_TYPE_LIST;\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n\n        // large list has validity + large_offset or large_offset\n        case 'L':\n          schema_view->storage_type = NANOARROW_TYPE_LARGE_LIST;\n          schema_view->type = NANOARROW_TYPE_LARGE_LIST;\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n\n        // run end encoded has no buffer at all\n        case 'r':\n          schema_view->storage_type = NANOARROW_TYPE_RUN_END_ENCODED;\n          schema_view->type = NANOARROW_TYPE_RUN_END_ENCODED;\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n\n        // just validity buffer\n        case 'w':\n          if (format[2] != ':' || format[3] == '\\0') {\n            ArrowErrorSet(error, \"Expected ':<width>' following '+w'\");\n            return EINVAL;\n          }\n\n          schema_view->storage_type = NANOARROW_TYPE_FIXED_SIZE_LIST;\n          schema_view->type = NANOARROW_TYPE_FIXED_SIZE_LIST;\n          schema_view->fixed_size =\n              (int32_t)strtol(format + 3, (char**)format_end_out, 10);\n          return NANOARROW_OK;\n        case 's':\n          schema_view->storage_type = NANOARROW_TYPE_STRUCT;\n          schema_view->type = NANOARROW_TYPE_STRUCT;\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n        case 'm':\n          schema_view->storage_type = NANOARROW_TYPE_MAP;\n          schema_view->type = NANOARROW_TYPE_MAP;\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n\n        // unions\n        case 'u':\n          switch (format[2]) {\n            case 'd':\n              schema_view->storage_type = NANOARROW_TYPE_DENSE_UNION;\n              schema_view->type = NANOARROW_TYPE_DENSE_UNION;\n              break;\n            case 's':\n              schema_view->storage_type = NANOARROW_TYPE_SPARSE_UNION;\n              schema_view->type = NANOARROW_TYPE_SPARSE_UNION;\n              break;\n            default:\n              ArrowErrorSet(error,\n                            \"Expected union format string +us:<type_ids> or \"\n                            \"+ud:<type_ids> but found '%s'\",\n                            format);\n              return EINVAL;\n          }\n\n          if (format[3] == ':') {\n            schema_view->union_type_ids = format + 4;\n            int64_t n_type_ids =\n                _ArrowParseUnionTypeIds(schema_view->union_type_ids, NULL);\n            if (n_type_ids != schema_view->schema->n_children) {\n              ArrowErrorSet(error,\n                            \"Expected union type_ids parameter to be a comma-separated \"\n                            \"list of %\" PRId64 \" values between 0 and 127 but found '%s'\",\n                            schema_view->schema->n_children, schema_view->union_type_ids);\n              return EINVAL;\n            }\n            *format_end_out = format + strlen(format);\n            return NANOARROW_OK;\n          } else {\n            ArrowErrorSet(error,\n                          \"Expected union format string +us:<type_ids> or +ud:<type_ids> \"\n                          \"but found '%s'\",\n                          format);\n            return EINVAL;\n          }\n\n        // views\n        case 'v':\n          switch (format[2]) {\n            case 'l':\n              schema_view->storage_type = NANOARROW_TYPE_LIST_VIEW;\n              schema_view->type = NANOARROW_TYPE_LIST_VIEW;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'L':\n              schema_view->storage_type = NANOARROW_TYPE_LARGE_LIST_VIEW;\n              schema_view->type = NANOARROW_TYPE_LARGE_LIST_VIEW;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            default:\n              ArrowErrorSet(\n                  error, \"Expected view format string +vl or +vL but found '%s'\", format);\n              return EINVAL;\n          }\n        default:\n          ArrowErrorSet(error, \"Expected nested type format string but found '%s'\",\n                        format);\n          return EINVAL;\n      }\n\n    // date/time types\n    case 't':\n      switch (format[1]) {\n        // date\n        case 'd':\n          switch (format[2]) {\n            case 'D':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT32);\n              schema_view->type = NANOARROW_TYPE_DATE32;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'm':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_DATE64;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            default:\n              ArrowErrorSet(error, \"Expected 'D' or 'm' following 'td' but found '%s'\",\n                            format + 2);\n              return EINVAL;\n          }\n\n        // time of day\n        case 't':\n          switch (format[2]) {\n            case 's':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT32);\n              schema_view->type = NANOARROW_TYPE_TIME32;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_SECOND;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'm':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT32);\n              schema_view->type = NANOARROW_TYPE_TIME32;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_MILLI;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'u':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_TIME64;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_MICRO;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'n':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_TIME64;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_NANO;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            default:\n              ArrowErrorSet(\n                  error, \"Expected 's', 'm', 'u', or 'n' following 'tt' but found '%s'\",\n                  format + 2);\n              return EINVAL;\n          }\n\n        // timestamp\n        case 's':\n          switch (format[2]) {\n            case 's':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_TIMESTAMP;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_SECOND;\n              break;\n            case 'm':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_TIMESTAMP;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_MILLI;\n              break;\n            case 'u':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_TIMESTAMP;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_MICRO;\n              break;\n            case 'n':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_TIMESTAMP;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_NANO;\n              break;\n            default:\n              ArrowErrorSet(\n                  error, \"Expected 's', 'm', 'u', or 'n' following 'ts' but found '%s'\",\n                  format + 2);\n              return EINVAL;\n          }\n\n          if (format[3] != ':') {\n            ArrowErrorSet(error, \"Expected ':' following '%.3s' but found '%s'\", format,\n                          format + 3);\n            return EINVAL;\n          }\n\n          schema_view->timezone = format + 4;\n          *format_end_out = format + strlen(format);\n          return NANOARROW_OK;\n\n        // duration\n        case 'D':\n          switch (format[2]) {\n            case 's':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_DURATION;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_SECOND;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'm':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_DURATION;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_MILLI;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'u':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_DURATION;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_MICRO;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'n':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INT64);\n              schema_view->type = NANOARROW_TYPE_DURATION;\n              schema_view->time_unit = NANOARROW_TIME_UNIT_NANO;\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            default:\n              ArrowErrorSet(error,\n                            \"Expected 's', 'm', u', or 'n' following 'tD' but found '%s'\",\n                            format + 2);\n              return EINVAL;\n          }\n\n        // interval\n        case 'i':\n          switch (format[2]) {\n            case 'M':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INTERVAL_MONTHS);\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'D':\n              ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_INTERVAL_DAY_TIME);\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            case 'n':\n              ArrowSchemaViewSetPrimitive(schema_view,\n                                          NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO);\n              *format_end_out = format + 3;\n              return NANOARROW_OK;\n            default:\n              ArrowErrorSet(error,\n                            \"Expected 'M', 'D', or 'n' following 'ti' but found '%s'\",\n                            format + 2);\n              return EINVAL;\n          }\n\n        default:\n          ArrowErrorSet(\n              error, \"Expected 'd', 't', 's', 'D', or 'i' following 't' but found '%s'\",\n              format + 1);\n          return EINVAL;\n      }\n\n    // view types\n    case 'v': {\n      switch (format[1]) {\n        case 'u':\n          ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_STRING_VIEW);\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n        case 'z':\n          ArrowSchemaViewSetPrimitive(schema_view, NANOARROW_TYPE_BINARY_VIEW);\n          *format_end_out = format + 2;\n          return NANOARROW_OK;\n        default:\n          ArrowErrorSet(error, \"Expected 'u', or 'z' following 'v' but found '%s'\",\n                        format + 1);\n          return EINVAL;\n      }\n    }\n\n    default:\n      ArrowErrorSet(error, \"Unknown format: '%s'\", format);\n      return EINVAL;\n  }\n}\n\nstatic ArrowErrorCode ArrowSchemaViewValidateNChildren(\n    struct ArrowSchemaView* schema_view, int64_t n_children, struct ArrowError* error) {\n  if (n_children != -1 && schema_view->schema->n_children != n_children) {\n    ArrowErrorSet(\n        error, \"Expected schema with %\" PRId64 \" children but found %\" PRId64 \" children\",\n        n_children, schema_view->schema->n_children);\n    return EINVAL;\n  }\n\n  // Don't do a full validation of children but do check that they won't\n  // segfault if inspected\n  struct ArrowSchema* child;\n  for (int64_t i = 0; i < schema_view->schema->n_children; i++) {\n    child = schema_view->schema->children[i];\n    if (child == NULL) {\n      ArrowErrorSet(\n          error, \"Expected valid schema at schema->children[%\" PRId64 \"] but found NULL\",\n          i);\n      return EINVAL;\n    } else if (child->release == NULL) {\n      ArrowErrorSet(error,\n                    \"Expected valid schema at schema->children[%\" PRId64\n                    \"] but found a released schema\",\n                    i);\n      return EINVAL;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowSchemaViewValidateUnion(struct ArrowSchemaView* schema_view,\n                                                   struct ArrowError* error) {\n  return ArrowSchemaViewValidateNChildren(schema_view, -1, error);\n}\n\nstatic ArrowErrorCode ArrowSchemaViewValidateMap(struct ArrowSchemaView* schema_view,\n                                                 struct ArrowError* error) {\n  NANOARROW_RETURN_NOT_OK(ArrowSchemaViewValidateNChildren(schema_view, 1, error));\n\n  if (schema_view->schema->children[0]->n_children != 2) {\n    ArrowErrorSet(error,\n                  \"Expected child of map type to have 2 children but found %\" PRId64,\n                  schema_view->schema->children[0]->n_children);\n    return EINVAL;\n  }\n\n  if (strcmp(schema_view->schema->children[0]->format, \"+s\") != 0) {\n    ArrowErrorSet(error, \"Expected format of child of map type to be '+s' but found '%s'\",\n                  schema_view->schema->children[0]->format);\n    return EINVAL;\n  }\n\n  if (schema_view->schema->children[0]->flags & ARROW_FLAG_NULLABLE) {\n    ArrowErrorSet(error,\n                  \"Expected child of map type to be non-nullable but was nullable\");\n    return EINVAL;\n  }\n\n  if (schema_view->schema->children[0]->children[0]->flags & ARROW_FLAG_NULLABLE) {\n    ArrowErrorSet(error, \"Expected key of map type to be non-nullable but was nullable\");\n    return EINVAL;\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowSchemaViewValidateDictionary(\n    struct ArrowSchemaView* schema_view, struct ArrowError* error) {\n  // check for valid index type\n  switch (schema_view->storage_type) {\n    case NANOARROW_TYPE_UINT8:\n    case NANOARROW_TYPE_INT8:\n    case NANOARROW_TYPE_UINT16:\n    case NANOARROW_TYPE_INT16:\n    case NANOARROW_TYPE_UINT32:\n    case NANOARROW_TYPE_INT32:\n    case NANOARROW_TYPE_UINT64:\n    case NANOARROW_TYPE_INT64:\n      break;\n    default:\n      ArrowErrorSet(\n          error,\n          \"Expected dictionary schema index type to be an integral type but found '%s'\",\n          schema_view->schema->format);\n      return EINVAL;\n  }\n\n  struct ArrowSchemaView dictionary_schema_view;\n  return ArrowSchemaViewInit(&dictionary_schema_view, schema_view->schema->dictionary,\n                             error);\n}\n\nstatic ArrowErrorCode ArrowSchemaViewValidate(struct ArrowSchemaView* schema_view,\n                                              enum ArrowType type,\n                                              struct ArrowError* error) {\n  switch (type) {\n    case NANOARROW_TYPE_NA:\n    case NANOARROW_TYPE_BOOL:\n    case NANOARROW_TYPE_UINT8:\n    case NANOARROW_TYPE_INT8:\n    case NANOARROW_TYPE_UINT16:\n    case NANOARROW_TYPE_INT16:\n    case NANOARROW_TYPE_UINT32:\n    case NANOARROW_TYPE_INT32:\n    case NANOARROW_TYPE_UINT64:\n    case NANOARROW_TYPE_INT64:\n    case NANOARROW_TYPE_HALF_FLOAT:\n    case NANOARROW_TYPE_FLOAT:\n    case NANOARROW_TYPE_DOUBLE:\n    case NANOARROW_TYPE_DECIMAL32:\n    case NANOARROW_TYPE_DECIMAL64:\n    case NANOARROW_TYPE_DECIMAL128:\n    case NANOARROW_TYPE_DECIMAL256:\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_LARGE_STRING:\n    case NANOARROW_TYPE_BINARY:\n    case NANOARROW_TYPE_LARGE_BINARY:\n    case NANOARROW_TYPE_DATE32:\n    case NANOARROW_TYPE_DATE64:\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME:\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO:\n    case NANOARROW_TYPE_TIMESTAMP:\n    case NANOARROW_TYPE_TIME32:\n    case NANOARROW_TYPE_TIME64:\n    case NANOARROW_TYPE_DURATION:\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      return ArrowSchemaViewValidateNChildren(schema_view, 0, error);\n\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n      if (schema_view->fixed_size <= 0) {\n        ArrowErrorSet(error, \"Expected size > 0 for fixed size binary but found size %d\",\n                      schema_view->fixed_size);\n        return EINVAL;\n      }\n      return ArrowSchemaViewValidateNChildren(schema_view, 0, error);\n\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_LIST_VIEW:\n    case NANOARROW_TYPE_LARGE_LIST:\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      return ArrowSchemaViewValidateNChildren(schema_view, 1, error);\n\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      return ArrowSchemaViewValidateNChildren(schema_view, 2, error);\n\n    case NANOARROW_TYPE_STRUCT:\n      return ArrowSchemaViewValidateNChildren(schema_view, -1, error);\n\n    case NANOARROW_TYPE_SPARSE_UNION:\n    case NANOARROW_TYPE_DENSE_UNION:\n      return ArrowSchemaViewValidateUnion(schema_view, error);\n\n    case NANOARROW_TYPE_MAP:\n      return ArrowSchemaViewValidateMap(schema_view, error);\n\n    case NANOARROW_TYPE_DICTIONARY:\n      return ArrowSchemaViewValidateDictionary(schema_view, error);\n\n    default:\n      ArrowErrorSet(error, \"Expected a valid enum ArrowType value but found %d\",\n                    schema_view->type);\n      return EINVAL;\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowSchemaViewInit(struct ArrowSchemaView* schema_view,\n                                   const struct ArrowSchema* schema,\n                                   struct ArrowError* error) {\n  if (schema == NULL) {\n    ArrowErrorSet(error, \"Expected non-NULL schema\");\n    return EINVAL;\n  }\n\n  if (schema->release == NULL) {\n    ArrowErrorSet(error, \"Expected non-released schema\");\n    return EINVAL;\n  }\n\n  schema_view->schema = schema;\n\n  const char* format = schema->format;\n  if (format == NULL) {\n    ArrowErrorSet(\n        error,\n        \"Error parsing schema->format: Expected a null-terminated string but found NULL\");\n    return EINVAL;\n  }\n\n  size_t format_len = strlen(format);\n  if (format_len == 0) {\n    ArrowErrorSet(error, \"Error parsing schema->format: Expected a string with size > 0\");\n    return EINVAL;\n  }\n\n  const char* format_end_out;\n  int result = ArrowSchemaViewParse(schema_view, format, &format_end_out, error);\n\n  if (result != NANOARROW_OK) {\n    if (error != NULL) {\n      char child_error[1024];\n      memcpy(child_error, ArrowErrorMessage(error), 1024);\n      ArrowErrorSet(error, \"Error parsing schema->format: %s\", child_error);\n    }\n\n    return result;\n  }\n\n  if ((format + format_len) != format_end_out) {\n    ArrowErrorSet(error, \"Error parsing schema->format '%s': parsed %d/%zu characters\",\n                  format, (int)(format_end_out - format), format_len);\n    return EINVAL;\n  }\n\n  if (schema->dictionary != NULL) {\n    schema_view->type = NANOARROW_TYPE_DICTIONARY;\n  }\n\n  NANOARROW_RETURN_NOT_OK(\n      ArrowSchemaViewValidate(schema_view, schema_view->storage_type, error));\n\n  if (schema_view->storage_type != schema_view->type) {\n    NANOARROW_RETURN_NOT_OK(\n        ArrowSchemaViewValidate(schema_view, schema_view->type, error));\n  }\n\n  int64_t unknown_flags = schema->flags & ~NANOARROW_FLAG_ALL_SUPPORTED;\n  if (unknown_flags != 0) {\n    ArrowErrorSet(error, \"Unknown ArrowSchema flag\");\n    return EINVAL;\n  }\n\n  if (schema->flags & ARROW_FLAG_DICTIONARY_ORDERED &&\n      schema_view->type != NANOARROW_TYPE_DICTIONARY) {\n    ArrowErrorSet(error,\n                  \"ARROW_FLAG_DICTIONARY_ORDERED is only relevant for dictionaries\");\n    return EINVAL;\n  }\n\n  if (schema->flags & ARROW_FLAG_MAP_KEYS_SORTED &&\n      schema_view->type != NANOARROW_TYPE_MAP) {\n    ArrowErrorSet(error, \"ARROW_FLAG_MAP_KEYS_SORTED is only relevant for a map type\");\n    return EINVAL;\n  }\n\n  ArrowLayoutInit(&schema_view->layout, schema_view->storage_type);\n  if (schema_view->storage_type == NANOARROW_TYPE_FIXED_SIZE_BINARY) {\n    schema_view->layout.element_size_bits[1] = (int64_t)schema_view->fixed_size * 8;\n  } else if (schema_view->storage_type == NANOARROW_TYPE_FIXED_SIZE_LIST) {\n    schema_view->layout.child_size_elements = schema_view->fixed_size;\n  }\n\n  schema_view->extension_name = ArrowCharView(NULL);\n  schema_view->extension_metadata = ArrowCharView(NULL);\n  NANOARROW_RETURN_NOT_OK(ArrowMetadataGetValue(schema->metadata,\n                                                ArrowCharView(\"ARROW:extension:name\"),\n                                                &schema_view->extension_name));\n  NANOARROW_RETURN_NOT_OK(ArrowMetadataGetValue(schema->metadata,\n                                                ArrowCharView(\"ARROW:extension:metadata\"),\n                                                &schema_view->extension_metadata));\n\n  return NANOARROW_OK;\n}\n\nstatic int64_t ArrowSchemaTypeToStringInternal(struct ArrowSchemaView* schema_view,\n                                               char* out, int64_t n) {\n  const char* type_string = ArrowTypeString(schema_view->type);\n  switch (schema_view->type) {\n    case NANOARROW_TYPE_DECIMAL32:\n    case NANOARROW_TYPE_DECIMAL64:\n    case NANOARROW_TYPE_DECIMAL128:\n    case NANOARROW_TYPE_DECIMAL256:\n      return snprintf(out, n, \"%s(%\" PRId32 \", %\" PRId32 \")\", type_string,\n                      schema_view->decimal_precision, schema_view->decimal_scale);\n    case NANOARROW_TYPE_TIMESTAMP:\n      return snprintf(out, n, \"%s('%s', '%s')\", type_string,\n                      ArrowTimeUnitString(schema_view->time_unit), schema_view->timezone);\n    case NANOARROW_TYPE_TIME32:\n    case NANOARROW_TYPE_TIME64:\n    case NANOARROW_TYPE_DURATION:\n      return snprintf(out, n, \"%s('%s')\", type_string,\n                      ArrowTimeUnitString(schema_view->time_unit));\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      return snprintf(out, n, \"%s(%\" PRId32 \")\", type_string, schema_view->fixed_size);\n    case NANOARROW_TYPE_SPARSE_UNION:\n    case NANOARROW_TYPE_DENSE_UNION:\n      return snprintf(out, n, \"%s([%s])\", type_string, schema_view->union_type_ids);\n    default:\n      return snprintf(out, n, \"%s\", type_string);\n  }\n}\n\n// Helper for bookkeeping to emulate sprintf()-like behaviour spread\n// among multiple sprintf calls.\nstatic inline void ArrowToStringLogChars(char** out, int64_t n_chars_last,\n                                         int64_t* n_remaining, int64_t* n_chars) {\n  // In the unlikely snprintf() returning a negative value (encoding error),\n  // ensure the result won't cause an out-of-bounds access.\n  if (n_chars_last < 0) {\n    n_chars_last = 0;\n  }\n\n  *n_chars += n_chars_last;\n  *n_remaining -= n_chars_last;\n\n  // n_remaining is never less than 0\n  if (*n_remaining < 0) {\n    *n_remaining = 0;\n  }\n\n  // Can't do math on a NULL pointer\n  if (*out != NULL) {\n    *out += n_chars_last;\n  }\n}\n\nint64_t ArrowSchemaToString(const struct ArrowSchema* schema, char* out, int64_t n,\n                            char recursive) {\n  if (schema == NULL) {\n    return snprintf(out, n, \"[invalid: pointer is null]\");\n  }\n\n  if (schema->release == NULL) {\n    return snprintf(out, n, \"[invalid: schema is released]\");\n  }\n\n  struct ArrowSchemaView schema_view;\n  struct ArrowError error;\n\n  if (ArrowSchemaViewInit(&schema_view, schema, &error) != NANOARROW_OK) {\n    return snprintf(out, n, \"[invalid: %s]\", ArrowErrorMessage(&error));\n  }\n\n  // Extension type and dictionary should include both the top-level type\n  // and the storage type.\n  int is_extension = schema_view.extension_name.size_bytes > 0;\n  int is_dictionary = schema->dictionary != NULL;\n  int64_t n_chars = 0;\n  int64_t n_chars_last = 0;\n\n  // Uncommon but not technically impossible that both are true\n  if (is_extension && is_dictionary) {\n    n_chars_last = snprintf(\n        out, n, \"%.*s{dictionary(%s)<\", (int)schema_view.extension_name.size_bytes,\n        schema_view.extension_name.data, ArrowTypeString(schema_view.storage_type));\n  } else if (is_extension) {\n    n_chars_last = snprintf(out, n, \"%.*s{\", (int)schema_view.extension_name.size_bytes,\n                            schema_view.extension_name.data);\n  } else if (is_dictionary) {\n    n_chars_last =\n        snprintf(out, n, \"dictionary(%s)<\", ArrowTypeString(schema_view.storage_type));\n  }\n\n  ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n\n  if (!is_dictionary) {\n    n_chars_last = ArrowSchemaTypeToStringInternal(&schema_view, out, n);\n  } else {\n    n_chars_last = ArrowSchemaToString(schema->dictionary, out, n, recursive);\n  }\n\n  ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n\n  if (recursive && schema->format[0] == '+') {\n    n_chars_last = snprintf(out, n, \"<\");\n    ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n\n    for (int64_t i = 0; i < schema->n_children; i++) {\n      if (i > 0) {\n        n_chars_last = snprintf(out, n, \", \");\n        ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n      }\n\n      // ArrowSchemaToStringInternal() will validate the child and print the error,\n      // but we need the name first\n      if (schema->children[i] != NULL && schema->children[i]->release != NULL &&\n          schema->children[i]->name != NULL) {\n        n_chars_last = snprintf(out, n, \"%s: \", schema->children[i]->name);\n        ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n      }\n\n      n_chars_last = ArrowSchemaToString(schema->children[i], out, n, recursive);\n      ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n    }\n\n    n_chars_last = snprintf(out, n, \">\");\n    ArrowToStringLogChars(&out, n_chars_last, &n, &n_chars);\n  }\n\n  if (is_extension && is_dictionary) {\n    n_chars += snprintf(out, n, \">}\");\n  } else if (is_extension) {\n    n_chars += snprintf(out, n, \"}\");\n  } else if (is_dictionary) {\n    n_chars += snprintf(out, n, \">\");\n  }\n\n  // Ensure that we always return a positive result\n  if (n_chars > 0) {\n    return n_chars;\n  } else {\n    return 0;\n  }\n}\n\nArrowErrorCode ArrowMetadataReaderInit(struct ArrowMetadataReader* reader,\n                                       const char* metadata) {\n  reader->metadata = metadata;\n\n  if (reader->metadata == NULL) {\n    reader->offset = 0;\n    reader->remaining_keys = 0;\n  } else {\n    memcpy(&reader->remaining_keys, reader->metadata, sizeof(int32_t));\n    reader->offset = sizeof(int32_t);\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowMetadataReaderRead(struct ArrowMetadataReader* reader,\n                                       struct ArrowStringView* key_out,\n                                       struct ArrowStringView* value_out) {\n  if (reader->remaining_keys <= 0) {\n    return EINVAL;\n  }\n\n  int64_t pos = 0;\n\n  int32_t key_size;\n  memcpy(&key_size, reader->metadata + reader->offset + pos, sizeof(int32_t));\n  pos += sizeof(int32_t);\n\n  key_out->data = reader->metadata + reader->offset + pos;\n  key_out->size_bytes = key_size;\n  pos += key_size;\n\n  int32_t value_size;\n  memcpy(&value_size, reader->metadata + reader->offset + pos, sizeof(int32_t));\n  pos += sizeof(int32_t);\n\n  value_out->data = reader->metadata + reader->offset + pos;\n  value_out->size_bytes = value_size;\n  pos += value_size;\n\n  reader->offset += pos;\n  reader->remaining_keys--;\n  return NANOARROW_OK;\n}\n\nint64_t ArrowMetadataSizeOf(const char* metadata) {\n  if (metadata == NULL) {\n    return 0;\n  }\n\n  struct ArrowMetadataReader reader;\n  struct ArrowStringView key;\n  struct ArrowStringView value;\n  if (ArrowMetadataReaderInit(&reader, metadata) != NANOARROW_OK) {\n    return 0;\n  }\n\n  int64_t size = sizeof(int32_t);\n  while (ArrowMetadataReaderRead(&reader, &key, &value) == NANOARROW_OK) {\n    size += sizeof(int32_t) + key.size_bytes + sizeof(int32_t) + value.size_bytes;\n  }\n\n  return size;\n}\n\nstatic ArrowErrorCode ArrowMetadataGetValueInternal(const char* metadata,\n                                                    struct ArrowStringView* key,\n                                                    struct ArrowStringView* value_out) {\n  struct ArrowMetadataReader reader;\n  struct ArrowStringView existing_key;\n  struct ArrowStringView existing_value;\n  NANOARROW_RETURN_NOT_OK(ArrowMetadataReaderInit(&reader, metadata));\n\n  while (ArrowMetadataReaderRead(&reader, &existing_key, &existing_value) ==\n         NANOARROW_OK) {\n    int key_equal = key->size_bytes == existing_key.size_bytes &&\n                    strncmp(key->data, existing_key.data, existing_key.size_bytes) == 0;\n    if (key_equal) {\n      value_out->data = existing_value.data;\n      value_out->size_bytes = existing_value.size_bytes;\n      break;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowMetadataGetValue(const char* metadata, struct ArrowStringView key,\n                                     struct ArrowStringView* value_out) {\n  if (value_out == NULL) {\n    return EINVAL;\n  }\n\n  return ArrowMetadataGetValueInternal(metadata, &key, value_out);\n}\n\nchar ArrowMetadataHasKey(const char* metadata, struct ArrowStringView key) {\n  struct ArrowStringView value = ArrowCharView(NULL);\n  if (ArrowMetadataGetValue(metadata, key, &value) != NANOARROW_OK) {\n    return 0;\n  }\n\n  return value.data != NULL;\n}\n\nArrowErrorCode ArrowMetadataBuilderInit(struct ArrowBuffer* buffer,\n                                        const char* metadata) {\n  ArrowBufferInit(buffer);\n  return ArrowBufferAppend(buffer, metadata, ArrowMetadataSizeOf(metadata));\n}\n\nstatic ArrowErrorCode ArrowMetadataBuilderAppendInternal(struct ArrowBuffer* buffer,\n                                                         struct ArrowStringView* key,\n                                                         struct ArrowStringView* value) {\n  if (value == NULL) {\n    return NANOARROW_OK;\n  }\n\n  if (buffer->capacity_bytes == 0) {\n    NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(buffer, 0));\n  }\n\n  if (((size_t)buffer->capacity_bytes) < sizeof(int32_t)) {\n    return EINVAL;\n  }\n\n  int32_t n_keys;\n  memcpy(&n_keys, buffer->data, sizeof(int32_t));\n\n  int32_t key_size = (int32_t)key->size_bytes;\n  int32_t value_size = (int32_t)value->size_bytes;\n  NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(\n      buffer, sizeof(int32_t) + key_size + sizeof(int32_t) + value_size));\n\n  ArrowBufferAppendUnsafe(buffer, &key_size, sizeof(int32_t));\n  ArrowBufferAppendUnsafe(buffer, key->data, key_size);\n  ArrowBufferAppendUnsafe(buffer, &value_size, sizeof(int32_t));\n  ArrowBufferAppendUnsafe(buffer, value->data, value_size);\n\n  n_keys++;\n  memcpy(buffer->data, &n_keys, sizeof(int32_t));\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowMetadataBuilderSetInternal(struct ArrowBuffer* buffer,\n                                                      struct ArrowStringView* key,\n                                                      struct ArrowStringView* value) {\n  // Inspect the current value to see if we can avoid copying the buffer\n  struct ArrowStringView current_value = ArrowCharView(NULL);\n  NANOARROW_RETURN_NOT_OK(\n      ArrowMetadataGetValueInternal((const char*)buffer->data, key, &current_value));\n\n  // The key should be removed but no key exists\n  if (value == NULL && current_value.data == NULL) {\n    return NANOARROW_OK;\n  }\n\n  // The key/value can be appended because no key exists\n  if (value != NULL && current_value.data == NULL) {\n    return ArrowMetadataBuilderAppendInternal(buffer, key, value);\n  }\n\n  struct ArrowMetadataReader reader;\n  struct ArrowStringView existing_key;\n  struct ArrowStringView existing_value;\n  NANOARROW_RETURN_NOT_OK(ArrowMetadataReaderInit(&reader, (const char*)buffer->data));\n\n  struct ArrowBuffer new_buffer;\n  NANOARROW_RETURN_NOT_OK(ArrowMetadataBuilderInit(&new_buffer, NULL));\n\n  while (reader.remaining_keys > 0) {\n    int result = ArrowMetadataReaderRead(&reader, &existing_key, &existing_value);\n    if (result != NANOARROW_OK) {\n      ArrowBufferReset(&new_buffer);\n      return result;\n    }\n\n    if (key->size_bytes == existing_key.size_bytes &&\n        strncmp((const char*)key->data, (const char*)existing_key.data,\n                existing_key.size_bytes) == 0) {\n      result = ArrowMetadataBuilderAppendInternal(&new_buffer, key, value);\n      value = NULL;\n    } else {\n      result =\n          ArrowMetadataBuilderAppendInternal(&new_buffer, &existing_key, &existing_value);\n    }\n\n    if (result != NANOARROW_OK) {\n      ArrowBufferReset(&new_buffer);\n      return result;\n    }\n  }\n\n  ArrowBufferReset(buffer);\n  ArrowBufferMove(&new_buffer, buffer);\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowMetadataBuilderAppend(struct ArrowBuffer* buffer,\n                                          struct ArrowStringView key,\n                                          struct ArrowStringView value) {\n  return ArrowMetadataBuilderAppendInternal(buffer, &key, &value);\n}\n\nArrowErrorCode ArrowMetadataBuilderSet(struct ArrowBuffer* buffer,\n                                       struct ArrowStringView key,\n                                       struct ArrowStringView value) {\n  return ArrowMetadataBuilderSetInternal(buffer, &key, &value);\n}\n\nArrowErrorCode ArrowMetadataBuilderRemove(struct ArrowBuffer* buffer,\n                                          struct ArrowStringView key) {\n  return ArrowMetadataBuilderSetInternal(buffer, &key, NULL);\n}\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#include <errno.h>\n#include <inttypes.h>\n#include <stdarg.h>\n#include <stdbool.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n\n#include \"nanoarrow/nanoarrow.h\"\n\nstatic void ArrowArrayReleaseInternal(struct ArrowArray* array) {\n  // Release buffers held by this array\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  if (private_data != NULL) {\n    ArrowBitmapReset(&private_data->bitmap);\n    ArrowBufferReset(&private_data->buffers[0]);\n    ArrowBufferReset(&private_data->buffers[1]);\n    ArrowFree(private_data->buffer_data);\n    for (int32_t i = 0; i < private_data->n_variadic_buffers; ++i) {\n      ArrowBufferReset(&private_data->variadic_buffers[i]);\n    }\n    ArrowFree(private_data->variadic_buffers);\n    ArrowFree(private_data);\n  }\n\n  // This object owns the memory for all the children, but those\n  // children may have been generated elsewhere and might have\n  // their own release() callback.\n  if (array->children != NULL) {\n    for (int64_t i = 0; i < array->n_children; i++) {\n      if (array->children[i] != NULL) {\n        if (array->children[i]->release != NULL) {\n          ArrowArrayRelease(array->children[i]);\n        }\n\n        ArrowFree(array->children[i]);\n      }\n    }\n\n    ArrowFree(array->children);\n  }\n\n  // This object owns the memory for the dictionary but it\n  // may have been generated somewhere else and have its own\n  // release() callback.\n  if (array->dictionary != NULL) {\n    if (array->dictionary->release != NULL) {\n      ArrowArrayRelease(array->dictionary);\n    }\n\n    ArrowFree(array->dictionary);\n  }\n\n  // Mark released\n  array->release = NULL;\n}\n\nstatic int ArrowArrayIsInternal(struct ArrowArray* array) {\n  return array->release == &ArrowArrayReleaseInternal;\n}\n\nstatic ArrowErrorCode ArrowArraySetStorageType(struct ArrowArray* array,\n                                               enum ArrowType storage_type) {\n  switch (storage_type) {\n    case NANOARROW_TYPE_UNINITIALIZED:\n    case NANOARROW_TYPE_NA:\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      array->n_buffers = 0;\n      break;\n\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n    case NANOARROW_TYPE_STRUCT:\n    case NANOARROW_TYPE_SPARSE_UNION:\n      array->n_buffers = 1;\n      break;\n\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_LARGE_LIST:\n    case NANOARROW_TYPE_MAP:\n    case NANOARROW_TYPE_BOOL:\n    case NANOARROW_TYPE_UINT8:\n    case NANOARROW_TYPE_INT8:\n    case NANOARROW_TYPE_UINT16:\n    case NANOARROW_TYPE_INT16:\n    case NANOARROW_TYPE_UINT32:\n    case NANOARROW_TYPE_INT32:\n    case NANOARROW_TYPE_UINT64:\n    case NANOARROW_TYPE_INT64:\n    case NANOARROW_TYPE_HALF_FLOAT:\n    case NANOARROW_TYPE_FLOAT:\n    case NANOARROW_TYPE_DOUBLE:\n    case NANOARROW_TYPE_DECIMAL32:\n    case NANOARROW_TYPE_DECIMAL64:\n    case NANOARROW_TYPE_DECIMAL128:\n    case NANOARROW_TYPE_DECIMAL256:\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME:\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO:\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n    case NANOARROW_TYPE_DENSE_UNION:\n      array->n_buffers = 2;\n      break;\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      array->n_buffers = NANOARROW_BINARY_VIEW_FIXED_BUFFERS + 1;\n      break;\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_LARGE_STRING:\n    case NANOARROW_TYPE_BINARY:\n    case NANOARROW_TYPE_LARGE_BINARY:\n    case NANOARROW_TYPE_LIST_VIEW:\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      array->n_buffers = 3;\n      break;\n\n    default:\n      return EINVAL;\n\n      return NANOARROW_OK;\n  }\n\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  private_data->storage_type = storage_type;\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayInitFromType(struct ArrowArray* array,\n                                      enum ArrowType storage_type) {\n  array->length = 0;\n  array->null_count = 0;\n  array->offset = 0;\n  array->n_buffers = 0;\n  array->n_children = 0;\n  array->buffers = NULL;\n  array->children = NULL;\n  array->dictionary = NULL;\n  array->release = &ArrowArrayReleaseInternal;\n  array->private_data = NULL;\n\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)ArrowMalloc(sizeof(struct ArrowArrayPrivateData));\n  if (private_data == NULL) {\n    array->release = NULL;\n    return ENOMEM;\n  }\n\n  ArrowBitmapInit(&private_data->bitmap);\n  ArrowBufferInit(&private_data->buffers[0]);\n  ArrowBufferInit(&private_data->buffers[1]);\n  private_data->buffer_data =\n      (const void**)ArrowMalloc(sizeof(void*) * NANOARROW_MAX_FIXED_BUFFERS);\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; ++i) {\n    private_data->buffer_data[i] = NULL;\n  }\n  private_data->n_variadic_buffers = 0;\n  private_data->variadic_buffers = NULL;\n  private_data->list_view_offset = 0;\n\n  array->private_data = private_data;\n  array->buffers = (const void**)(private_data->buffer_data);\n\n  // These are not technically \"storage\" in the sense that they do not appear\n  // in the ArrowSchemaView's storage_type member; however, allowing them here\n  // is helpful to maximize the number of types that can avoid going through\n  // ArrowArrayInitFromSchema().\n  switch (storage_type) {\n    case NANOARROW_TYPE_DURATION:\n    case NANOARROW_TYPE_TIMESTAMP:\n    case NANOARROW_TYPE_TIME64:\n    case NANOARROW_TYPE_DATE64:\n      storage_type = NANOARROW_TYPE_INT64;\n      break;\n    case NANOARROW_TYPE_TIME32:\n    case NANOARROW_TYPE_DATE32:\n      storage_type = NANOARROW_TYPE_INT32;\n      break;\n    default:\n      break;\n  }\n\n  int result = ArrowArraySetStorageType(array, storage_type);\n  if (result != NANOARROW_OK) {\n    ArrowArrayRelease(array);\n    return result;\n  }\n\n  ArrowLayoutInit(&private_data->layout, storage_type);\n  // We can only know this not to be true when initializing based on a schema\n  // so assume this to be true.\n  private_data->union_type_id_is_child_index = 1;\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayInitFromArrayView(struct ArrowArray* array,\n                                           const struct ArrowArrayView* array_view,\n                                           struct ArrowError* error) {\n  NANOARROW_RETURN_NOT_OK_WITH_ERROR(\n      ArrowArrayInitFromType(array, array_view->storage_type), error);\n  int result;\n\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  private_data->layout = array_view->layout;\n\n  if (array_view->n_children > 0) {\n    result = ArrowArrayAllocateChildren(array, array_view->n_children);\n    if (result != NANOARROW_OK) {\n      ArrowArrayRelease(array);\n      return result;\n    }\n\n    for (int64_t i = 0; i < array_view->n_children; i++) {\n      result =\n          ArrowArrayInitFromArrayView(array->children[i], array_view->children[i], error);\n      if (result != NANOARROW_OK) {\n        ArrowArrayRelease(array);\n        return result;\n      }\n    }\n  }\n\n  if (array_view->dictionary != NULL) {\n    result = ArrowArrayAllocateDictionary(array);\n    if (result != NANOARROW_OK) {\n      ArrowArrayRelease(array);\n      return result;\n    }\n\n    result =\n        ArrowArrayInitFromArrayView(array->dictionary, array_view->dictionary, error);\n    if (result != NANOARROW_OK) {\n      ArrowArrayRelease(array);\n      return result;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayInitFromSchema(struct ArrowArray* array,\n                                        const struct ArrowSchema* schema,\n                                        struct ArrowError* error) {\n  struct ArrowArrayView array_view;\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewInitFromSchema(&array_view, schema, error));\n  NANOARROW_RETURN_NOT_OK(ArrowArrayInitFromArrayView(array, &array_view, error));\n  if (array_view.storage_type == NANOARROW_TYPE_DENSE_UNION ||\n      array_view.storage_type == NANOARROW_TYPE_SPARSE_UNION) {\n    struct ArrowArrayPrivateData* private_data =\n        (struct ArrowArrayPrivateData*)array->private_data;\n    // We can still build arrays if this isn't true; however, the append\n    // functions won't work. Instead, we store this value and error only\n    // when StartAppending is called.\n    private_data->union_type_id_is_child_index =\n        _ArrowUnionTypeIdsWillEqualChildIndices(schema->format + 4, schema->n_children);\n  }\n\n  ArrowArrayViewReset(&array_view);\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayAllocateChildren(struct ArrowArray* array, int64_t n_children) {\n  if (array->children != NULL) {\n    return EINVAL;\n  }\n\n  if (n_children == 0) {\n    return NANOARROW_OK;\n  }\n\n  array->children =\n      (struct ArrowArray**)ArrowMalloc(n_children * sizeof(struct ArrowArray*));\n  if (array->children == NULL) {\n    return ENOMEM;\n  }\n\n  memset(array->children, 0, n_children * sizeof(struct ArrowArray*));\n\n  for (int64_t i = 0; i < n_children; i++) {\n    array->children[i] = (struct ArrowArray*)ArrowMalloc(sizeof(struct ArrowArray));\n    if (array->children[i] == NULL) {\n      return ENOMEM;\n    }\n    array->children[i]->release = NULL;\n  }\n\n  array->n_children = n_children;\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayAllocateDictionary(struct ArrowArray* array) {\n  if (array->dictionary != NULL) {\n    return EINVAL;\n  }\n\n  array->dictionary = (struct ArrowArray*)ArrowMalloc(sizeof(struct ArrowArray));\n  if (array->dictionary == NULL) {\n    return ENOMEM;\n  }\n\n  array->dictionary->release = NULL;\n  return NANOARROW_OK;\n}\n\nvoid ArrowArraySetValidityBitmap(struct ArrowArray* array, struct ArrowBitmap* bitmap) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  ArrowBufferMove(&bitmap->buffer, &private_data->bitmap.buffer);\n  private_data->bitmap.size_bits = bitmap->size_bits;\n  bitmap->size_bits = 0;\n  private_data->buffer_data[0] = private_data->bitmap.buffer.data;\n  array->null_count = -1;\n}\n\nArrowErrorCode ArrowArraySetBuffer(struct ArrowArray* array, int64_t i,\n                                   struct ArrowBuffer* buffer) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  if (i >= array->n_buffers || i < 0) {\n    return EINVAL;\n  }\n\n  // Find the `i`th buffer, release what is currently there, and move the\n  // supplied buffer into that slot.\n  struct ArrowBuffer* dst = ArrowArrayBuffer(array, i);\n  ArrowBufferReset(dst);\n  ArrowBufferMove(buffer, dst);\n\n  // Flush the pointer into array->buffers. In theory clients should call\n  // ArrowArrayFinishBuilding() to flush the pointer values before passing\n  // this array elsewhere; however, in early nanoarrow versions this was not\n  // needed and some code may depend on this being true.\n  private_data->buffer_data[i] = dst->data;\n  array->buffers = private_data->buffer_data;\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowArrayViewInitFromArray(struct ArrowArrayView* array_view,\n                                                  struct ArrowArray* array,\n                                                  struct ArrowError* error) {\n  if (!ArrowArrayIsInternal(array)) {\n    ArrowErrorSet(error,\n                  \"Can't initialize internal ArrowArrayView from external ArrowArray\");\n    return EINVAL;\n  }\n\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  ArrowArrayViewInitFromType(array_view, private_data->storage_type);\n  array_view->layout = private_data->layout;\n  array_view->array = array;\n  array_view->length = array->length;\n  array_view->offset = array->offset;\n  array_view->null_count = array->null_count;\n\n  array_view->buffer_views[0].data.as_uint8 = private_data->bitmap.buffer.data;\n  array_view->buffer_views[0].size_bytes = private_data->bitmap.buffer.size_bytes;\n  array_view->buffer_views[1].data.as_uint8 = private_data->buffers[0].data;\n  array_view->buffer_views[1].size_bytes = private_data->buffers[0].size_bytes;\n  array_view->buffer_views[2].data.as_uint8 = private_data->buffers[1].data;\n  array_view->buffer_views[2].size_bytes = private_data->buffers[1].size_bytes;\n\n  int result = ArrowArrayViewAllocateChildren(array_view, array->n_children);\n  if (result != NANOARROW_OK) {\n    ArrowArrayViewReset(array_view);\n    return result;\n  }\n\n  for (int64_t i = 0; i < array->n_children; i++) {\n    result =\n        ArrowArrayViewInitFromArray(array_view->children[i], array->children[i], error);\n    if (result != NANOARROW_OK) {\n      ArrowArrayViewReset(array_view);\n      return result;\n    }\n  }\n\n  if (array->dictionary != NULL) {\n    result = ArrowArrayViewAllocateDictionary(array_view);\n    if (result != NANOARROW_OK) {\n      ArrowArrayViewReset(array_view);\n      return result;\n    }\n\n    result =\n        ArrowArrayViewInitFromArray(array_view->dictionary, array->dictionary, error);\n    if (result != NANOARROW_OK) {\n      ArrowArrayViewReset(array_view);\n      return result;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowArrayReserveInternal(struct ArrowArray* array,\n                                                struct ArrowArrayView* array_view) {\n  // Loop through buffers and reserve the extra space that we know about\n  for (int64_t i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    // Don't reserve on a validity buffer that hasn't been allocated yet\n    if (array_view->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_VALIDITY &&\n        ArrowArrayBuffer(array, i)->data == NULL) {\n      continue;\n    }\n\n    int64_t additional_size_bytes =\n        array_view->buffer_views[i].size_bytes - ArrowArrayBuffer(array, i)->size_bytes;\n\n    if (additional_size_bytes > 0) {\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferReserve(ArrowArrayBuffer(array, i), additional_size_bytes));\n    }\n  }\n\n  // Recursively reserve children\n  for (int64_t i = 0; i < array->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(\n        ArrowArrayReserveInternal(array->children[i], array_view->children[i]));\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayReserve(struct ArrowArray* array,\n                                 int64_t additional_size_elements) {\n  struct ArrowArrayView array_view;\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewInitFromArray(&array_view, array, NULL));\n\n  // Calculate theoretical buffer sizes (recursively)\n  ArrowArrayViewSetLength(&array_view, array->length + additional_size_elements);\n\n  // Walk the structure (recursively)\n  int result = ArrowArrayReserveInternal(array, &array_view);\n  ArrowArrayViewReset(&array_view);\n  if (result != NANOARROW_OK) {\n    return result;\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowArrayFinalizeBuffers(struct ArrowArray* array) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    if (private_data->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_VALIDITY ||\n        private_data->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_NONE) {\n      continue;\n    }\n\n    struct ArrowBuffer* buffer = ArrowArrayBuffer(array, i);\n    if (buffer->data == NULL) {\n      NANOARROW_RETURN_NOT_OK((ArrowBufferReserve(buffer, 1)));\n    }\n  }\n\n  for (int64_t i = 0; i < array->n_children; i++) {\n    if (ArrowArrayIsInternal(array->children[i])) {\n      NANOARROW_RETURN_NOT_OK(ArrowArrayFinalizeBuffers(array->children[i]));\n    }\n  }\n\n  if (array->dictionary != NULL && ArrowArrayIsInternal(array->dictionary)) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayFinalizeBuffers(array->dictionary));\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic ArrowErrorCode ArrowArrayFlushInternalPointers(struct ArrowArray* array) {\n  NANOARROW_DCHECK(ArrowArrayIsInternal(array));\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  if (array->n_buffers > NANOARROW_MAX_FIXED_BUFFERS) {\n    // If the variadic sizes buffer was not set and there is at least one variadic\n    // buffer, populate it now (if there are no variadic buffers there will be exactly\n    // three total buffers and we don't need to do anything special here). Notably, this\n    // will occur when building a BinaryView/StringView array by element using the\n    // appender.\n    struct ArrowBuffer* sizes_buffer = ArrowArrayBuffer(array, array->n_buffers - 1);\n    if (sizes_buffer->data == NULL && sizes_buffer->size_bytes == 0) {\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferReserve(sizes_buffer, private_data->n_variadic_buffers));\n      for (int64_t i = 0; i < private_data->n_variadic_buffers; i++) {\n        struct ArrowBuffer* variadic_buffer =\n            ArrowArrayBuffer(array, i + NANOARROW_BINARY_VIEW_FIXED_BUFFERS);\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppendInt64(sizes_buffer, variadic_buffer->size_bytes));\n      }\n    }\n  }\n\n  for (int32_t i = 0; i < array->n_buffers; i++) {\n    private_data->buffer_data[i] = ArrowArrayBuffer(array, i)->data;\n  }\n\n  array->buffers = (const void**)(private_data->buffer_data);\n\n  // Flush internal pointers for child/dictionary arrays if we allocated them. Clients\n  // building arrays by buffer might have moved arrays from some other source (e.g.,\n  // to create a record batch) and calling this function in that case will cause a crash.\n  for (int64_t i = 0; i < array->n_children; i++) {\n    if (ArrowArrayIsInternal(array->children[i])) {\n      NANOARROW_RETURN_NOT_OK(ArrowArrayFlushInternalPointers(array->children[i]));\n    }\n  }\n\n  if (array->dictionary != NULL && ArrowArrayIsInternal(array->dictionary)) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayFlushInternalPointers(array->dictionary));\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayFinishBuilding(struct ArrowArray* array,\n                                        enum ArrowValidationLevel validation_level,\n                                        struct ArrowError* error) {\n  // Even if the data buffer is size zero, the pointer value needed to be non-null\n  // in some implementations (at least one version of Arrow C++ at the time this\n  // was added and C# as later discovered). Only do this fix if we can assume\n  // CPU data access.\n  if (validation_level >= NANOARROW_VALIDATION_LEVEL_DEFAULT) {\n    NANOARROW_RETURN_NOT_OK_WITH_ERROR(ArrowArrayFinalizeBuffers(array), error);\n  }\n\n  // Make sure the value we get with array->buffers[i] is set to the actual\n  // pointer (which may have changed from the original due to reallocation)\n  NANOARROW_RETURN_NOT_OK_WITH_ERROR(ArrowArrayFlushInternalPointers(array), error);\n\n  if (validation_level == NANOARROW_VALIDATION_LEVEL_NONE) {\n    return NANOARROW_OK;\n  }\n\n  // For validation, initialize an ArrowArrayView with our known buffer sizes\n  struct ArrowArrayView array_view;\n  NANOARROW_RETURN_NOT_OK_WITH_ERROR(\n      ArrowArrayViewInitFromArray(&array_view, array, error), error);\n  int result = ArrowArrayViewValidate(&array_view, validation_level, error);\n  ArrowArrayViewReset(&array_view);\n  return result;\n}\n\nArrowErrorCode ArrowArrayFinishBuildingDefault(struct ArrowArray* array,\n                                               struct ArrowError* error) {\n  return ArrowArrayFinishBuilding(array, NANOARROW_VALIDATION_LEVEL_DEFAULT, error);\n}\n\nvoid ArrowArrayViewInitFromType(struct ArrowArrayView* array_view,\n                                enum ArrowType storage_type) {\n  memset(array_view, 0, sizeof(struct ArrowArrayView));\n  array_view->storage_type = storage_type;\n  ArrowLayoutInit(&array_view->layout, storage_type);\n}\n\nArrowErrorCode ArrowArrayViewAllocateChildren(struct ArrowArrayView* array_view,\n                                              int64_t n_children) {\n  if (array_view->children != NULL) {\n    return EINVAL;\n  }\n\n  if (n_children == 0) {\n    array_view->n_children = 0;\n    return NANOARROW_OK;\n  }\n\n  array_view->children =\n      (struct ArrowArrayView**)ArrowMalloc(n_children * sizeof(struct ArrowArrayView*));\n  if (array_view->children == NULL) {\n    return ENOMEM;\n  }\n\n  for (int64_t i = 0; i < n_children; i++) {\n    array_view->children[i] = NULL;\n  }\n\n  array_view->n_children = n_children;\n\n  for (int64_t i = 0; i < n_children; i++) {\n    array_view->children[i] =\n        (struct ArrowArrayView*)ArrowMalloc(sizeof(struct ArrowArrayView));\n    if (array_view->children[i] == NULL) {\n      return ENOMEM;\n    }\n    ArrowArrayViewInitFromType(array_view->children[i], NANOARROW_TYPE_UNINITIALIZED);\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayViewAllocateDictionary(struct ArrowArrayView* array_view) {\n  if (array_view->dictionary != NULL) {\n    return EINVAL;\n  }\n\n  array_view->dictionary =\n      (struct ArrowArrayView*)ArrowMalloc(sizeof(struct ArrowArrayView));\n  if (array_view->dictionary == NULL) {\n    return ENOMEM;\n  }\n\n  ArrowArrayViewInitFromType(array_view->dictionary, NANOARROW_TYPE_UNINITIALIZED);\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayViewInitFromSchema(struct ArrowArrayView* array_view,\n                                            const struct ArrowSchema* schema,\n                                            struct ArrowError* error) {\n  struct ArrowSchemaView schema_view;\n  int result = ArrowSchemaViewInit(&schema_view, schema, error);\n  if (result != NANOARROW_OK) {\n    return result;\n  }\n\n  ArrowArrayViewInitFromType(array_view, schema_view.storage_type);\n  array_view->layout = schema_view.layout;\n\n  result = ArrowArrayViewAllocateChildren(array_view, schema->n_children);\n  if (result != NANOARROW_OK) {\n    ArrowErrorSet(error, \"ArrowArrayViewAllocateChildren() failed\");\n    ArrowArrayViewReset(array_view);\n    return result;\n  }\n\n  for (int64_t i = 0; i < schema->n_children; i++) {\n    result =\n        ArrowArrayViewInitFromSchema(array_view->children[i], schema->children[i], error);\n    if (result != NANOARROW_OK) {\n      ArrowArrayViewReset(array_view);\n      return result;\n    }\n  }\n\n  if (schema->dictionary != NULL) {\n    result = ArrowArrayViewAllocateDictionary(array_view);\n    if (result != NANOARROW_OK) {\n      ArrowArrayViewReset(array_view);\n      return result;\n    }\n\n    result =\n        ArrowArrayViewInitFromSchema(array_view->dictionary, schema->dictionary, error);\n    if (result != NANOARROW_OK) {\n      ArrowArrayViewReset(array_view);\n      return result;\n    }\n  }\n\n  if (array_view->storage_type == NANOARROW_TYPE_SPARSE_UNION ||\n      array_view->storage_type == NANOARROW_TYPE_DENSE_UNION) {\n    array_view->union_type_id_map = (int8_t*)ArrowMalloc(256 * sizeof(int8_t));\n    if (array_view->union_type_id_map == NULL) {\n      return ENOMEM;\n    }\n\n    memset(array_view->union_type_id_map, -1, 256);\n    int32_t n_type_ids = _ArrowParseUnionTypeIds(schema_view.union_type_ids,\n                                                 array_view->union_type_id_map + 128);\n    for (int8_t child_index = 0; child_index < n_type_ids; child_index++) {\n      int8_t type_id = array_view->union_type_id_map[128 + child_index];\n      array_view->union_type_id_map[type_id] = child_index;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nvoid ArrowArrayViewReset(struct ArrowArrayView* array_view) {\n  if (array_view->children != NULL) {\n    for (int64_t i = 0; i < array_view->n_children; i++) {\n      if (array_view->children[i] != NULL) {\n        ArrowArrayViewReset(array_view->children[i]);\n        ArrowFree(array_view->children[i]);\n      }\n    }\n\n    ArrowFree(array_view->children);\n  }\n\n  if (array_view->dictionary != NULL) {\n    ArrowArrayViewReset(array_view->dictionary);\n    ArrowFree(array_view->dictionary);\n  }\n\n  if (array_view->union_type_id_map != NULL) {\n    ArrowFree(array_view->union_type_id_map);\n  }\n\n  ArrowArrayViewInitFromType(array_view, NANOARROW_TYPE_UNINITIALIZED);\n}\n\nvoid ArrowArrayViewSetLength(struct ArrowArrayView* array_view, int64_t length) {\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    int64_t element_size_bytes = array_view->layout.element_size_bits[i] / 8;\n\n    switch (array_view->layout.buffer_type[i]) {\n      case NANOARROW_BUFFER_TYPE_VALIDITY:\n        array_view->buffer_views[i].size_bytes = _ArrowBytesForBits(length);\n        continue;\n      case NANOARROW_BUFFER_TYPE_DATA_OFFSET:\n        // Probably don't want/need to rely on the producer to have allocated an\n        // offsets buffer of length 1 for a zero-size array\n        array_view->buffer_views[i].size_bytes =\n            (length != 0) * element_size_bytes * (length + 1);\n        continue;\n      case NANOARROW_BUFFER_TYPE_DATA:\n        array_view->buffer_views[i].size_bytes =\n            _ArrowRoundUpToMultipleOf8(array_view->layout.element_size_bits[i] * length) /\n            8;\n        continue;\n      case NANOARROW_BUFFER_TYPE_TYPE_ID:\n      case NANOARROW_BUFFER_TYPE_UNION_OFFSET:\n      case NANOARROW_BUFFER_TYPE_VIEW_OFFSET:\n      case NANOARROW_BUFFER_TYPE_SIZE:\n        array_view->buffer_views[i].size_bytes = element_size_bytes * length;\n        continue;\n      case NANOARROW_BUFFER_TYPE_VARIADIC_DATA:\n      case NANOARROW_BUFFER_TYPE_VARIADIC_SIZE:\n      case NANOARROW_BUFFER_TYPE_NONE:\n        array_view->buffer_views[i].size_bytes = 0;\n        continue;\n    }\n  }\n\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_STRUCT:\n    case NANOARROW_TYPE_SPARSE_UNION:\n      for (int64_t i = 0; i < array_view->n_children; i++) {\n        ArrowArrayViewSetLength(array_view->children[i], length);\n      }\n      break;\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      if (array_view->n_children >= 1) {\n        ArrowArrayViewSetLength(array_view->children[0],\n                                length * array_view->layout.child_size_elements);\n      }\n    default:\n      break;\n  }\n}\n\n// This version recursively extracts information from the array and stores it\n// in the array view, performing any checks that require the original array.\nstatic int ArrowArrayViewSetArrayInternal(struct ArrowArrayView* array_view,\n                                          const struct ArrowArray* array,\n                                          struct ArrowError* error) {\n  array_view->array = array;\n  array_view->offset = array->offset;\n  array_view->length = array->length;\n  array_view->null_count = array->null_count;\n  array_view->variadic_buffer_sizes = NULL;\n  array_view->variadic_buffers = NULL;\n  array_view->n_variadic_buffers = 0;\n\n  int64_t buffers_required = 0;\n  const int nfixed_buf = array_view->storage_type == NANOARROW_TYPE_STRING_VIEW ||\n                                 array_view->storage_type == NANOARROW_TYPE_BINARY_VIEW\n                             ? NANOARROW_BINARY_VIEW_FIXED_BUFFERS\n                             : NANOARROW_MAX_FIXED_BUFFERS;\n  for (int i = 0; i < nfixed_buf; i++) {\n    if (array_view->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_NONE) {\n      break;\n    }\n\n    buffers_required++;\n\n    // Set buffer pointer\n    array_view->buffer_views[i].data.data = array->buffers[i];\n\n    // If non-null, set buffer size to unknown.\n    if (array->buffers[i] == NULL) {\n      array_view->buffer_views[i].size_bytes = 0;\n    } else {\n      array_view->buffer_views[i].size_bytes = -1;\n    }\n  }\n\n  if (array_view->storage_type == NANOARROW_TYPE_STRING_VIEW ||\n      array_view->storage_type == NANOARROW_TYPE_BINARY_VIEW) {\n    const int64_t n_buffers = array->n_buffers;\n    const int32_t nfixed_buf = NANOARROW_BINARY_VIEW_FIXED_BUFFERS;\n\n    const int32_t nvariadic_buf = (int32_t)(n_buffers - nfixed_buf - 1);\n    array_view->n_variadic_buffers = nvariadic_buf;\n    buffers_required += nvariadic_buf + 1;\n    array_view->variadic_buffers = array->buffers + NANOARROW_BINARY_VIEW_FIXED_BUFFERS;\n    array_view->variadic_buffer_sizes = (int64_t*)array->buffers[n_buffers - 1];\n  }\n\n  if (buffers_required != array->n_buffers) {\n    ArrowErrorSet(error,\n                  \"Expected array with %\" PRId64 \" buffer(s) but found %\" PRId64\n                  \" buffer(s)\",\n                  buffers_required, array->n_buffers);\n    return EINVAL;\n  }\n\n  // Check number of children\n  if (array_view->n_children != array->n_children) {\n    ArrowErrorSet(error, \"Expected %\" PRId64 \" children but found %\" PRId64 \" children\",\n                  array_view->n_children, array->n_children);\n    return EINVAL;\n  }\n\n  // Recurse for children\n  for (int64_t i = 0; i < array_view->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayViewSetArrayInternal(array_view->children[i],\n                                                           array->children[i], error));\n  }\n\n  // Check dictionary\n  if (array->dictionary == NULL && array_view->dictionary != NULL) {\n    ArrowErrorSet(error, \"Expected dictionary but found NULL\");\n    return EINVAL;\n  }\n\n  if (array->dictionary != NULL && array_view->dictionary == NULL) {\n    ArrowErrorSet(error, \"Expected NULL dictionary but found dictionary member\");\n    return EINVAL;\n  }\n\n  if (array->dictionary != NULL) {\n    NANOARROW_RETURN_NOT_OK(\n        ArrowArrayViewSetArrayInternal(array_view->dictionary, array->dictionary, error));\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowArrayViewValidateMinimal(struct ArrowArrayView* array_view,\n                                         struct ArrowError* error) {\n  if (array_view->length < 0) {\n    ArrowErrorSet(error, \"Expected length >= 0 but found length %\" PRId64,\n                  array_view->length);\n    return EINVAL;\n  }\n\n  if (array_view->offset < 0) {\n    ArrowErrorSet(error, \"Expected offset >= 0 but found offset %\" PRId64,\n                  array_view->offset);\n    return EINVAL;\n  }\n\n  // Ensure that offset + length fits within an int64 before a possible overflow\n  if ((uint64_t)array_view->offset + (uint64_t)array_view->length > (uint64_t)INT64_MAX) {\n    ArrowErrorSet(error, \"Offset + length is > INT64_MAX\");\n    return EINVAL;\n  }\n\n  // Calculate buffer sizes that do not require buffer access. If marked as\n  // unknown, assign the buffer size; otherwise, validate it.\n  int64_t offset_plus_length = array_view->offset + array_view->length;\n\n  // Only loop over the first two buffers because the size of the third buffer\n  // is always data dependent for all current Arrow types.\n  for (int i = 0; i < 2; i++) {\n    int64_t element_size_bytes = array_view->layout.element_size_bits[i] / 8;\n    // Initialize with a value that will cause an error if accidentally used uninitialized\n    // Need to suppress the clang-tidy warning because gcc warns for possible use\n    int64_t min_buffer_size_bytes =  // NOLINT(clang-analyzer-deadcode.DeadStores)\n        array_view->buffer_views[i].size_bytes + 1;\n\n    switch (array_view->layout.buffer_type[i]) {\n      case NANOARROW_BUFFER_TYPE_VALIDITY:\n        if (array_view->null_count == 0 && array_view->buffer_views[i].size_bytes == 0) {\n          continue;\n        }\n\n        min_buffer_size_bytes = _ArrowBytesForBits(offset_plus_length);\n        break;\n      case NANOARROW_BUFFER_TYPE_SIZE:\n        min_buffer_size_bytes = element_size_bytes * offset_plus_length;\n        break;\n      case NANOARROW_BUFFER_TYPE_DATA_OFFSET:\n        // Probably don't want/need to rely on the producer to have allocated an\n        // offsets buffer of length 1 for a zero-size array\n        min_buffer_size_bytes =\n            (offset_plus_length != 0) * element_size_bytes * (offset_plus_length + 1);\n        break;\n      case NANOARROW_BUFFER_TYPE_VIEW_OFFSET:\n        min_buffer_size_bytes =\n            (offset_plus_length != 0) * element_size_bytes * offset_plus_length;\n        break;\n      case NANOARROW_BUFFER_TYPE_DATA:\n        min_buffer_size_bytes =\n            _ArrowRoundUpToMultipleOf8(array_view->layout.element_size_bits[i] *\n                                       offset_plus_length) /\n            8;\n        break;\n      case NANOARROW_BUFFER_TYPE_TYPE_ID:\n      case NANOARROW_BUFFER_TYPE_UNION_OFFSET:\n        min_buffer_size_bytes = element_size_bytes * offset_plus_length;\n        break;\n      case NANOARROW_BUFFER_TYPE_VARIADIC_DATA:\n      case NANOARROW_BUFFER_TYPE_VARIADIC_SIZE:\n      case NANOARROW_BUFFER_TYPE_NONE:\n        continue;\n    }\n\n    // Assign or validate buffer size\n    if (array_view->buffer_views[i].size_bytes == -1) {\n      array_view->buffer_views[i].size_bytes = min_buffer_size_bytes;\n    } else if (array_view->buffer_views[i].size_bytes < min_buffer_size_bytes) {\n      ArrowErrorSet(error,\n                    \"Expected %s array buffer %d to have size >= %\" PRId64\n                    \" bytes but found \"\n                    \"buffer with %\" PRId64 \" bytes\",\n                    ArrowTypeString(array_view->storage_type), i, min_buffer_size_bytes,\n                    array_view->buffer_views[i].size_bytes);\n      return EINVAL;\n    }\n  }\n\n  // For list, fixed-size list and map views, we can validate the number of children\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_LARGE_LIST:\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n    case NANOARROW_TYPE_MAP:\n    case NANOARROW_TYPE_LIST_VIEW:\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      if (array_view->n_children != 1) {\n        ArrowErrorSet(error,\n                      \"Expected 1 child of %s array but found %\" PRId64 \" child arrays\",\n                      ArrowTypeString(array_view->storage_type), array_view->n_children);\n        return EINVAL;\n      }\n      break;\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      if (array_view->n_children != 2) {\n        ArrowErrorSet(\n            error, \"Expected 2 children for %s array but found %\" PRId64 \" child arrays\",\n            ArrowTypeString(array_view->storage_type), array_view->n_children);\n        return EINVAL;\n      }\n      break;\n    default:\n      break;\n  }\n\n  // For struct, the sparse union, and the fixed-size list views, we can validate child\n  // lengths.\n  int64_t child_min_length;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_SPARSE_UNION:\n    case NANOARROW_TYPE_STRUCT:\n      child_min_length = (array_view->offset + array_view->length);\n      for (int64_t i = 0; i < array_view->n_children; i++) {\n        if (array_view->children[i]->length < child_min_length) {\n          ArrowErrorSet(error,\n                        \"Expected struct child %\" PRId64 \" to have length >= %\" PRId64\n                        \" but found child with \"\n                        \"length %\" PRId64,\n                        i + 1, child_min_length, array_view->children[i]->length);\n          return EINVAL;\n        }\n      }\n      break;\n\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      child_min_length = (array_view->offset + array_view->length) *\n                         array_view->layout.child_size_elements;\n      if (array_view->children[0]->length < child_min_length) {\n        ArrowErrorSet(error,\n                      \"Expected child of fixed_size_list array to have length >= %\" PRId64\n                      \" but \"\n                      \"found array with length %\" PRId64,\n                      child_min_length, array_view->children[0]->length);\n        return EINVAL;\n      }\n      break;\n\n    case NANOARROW_TYPE_RUN_END_ENCODED: {\n      if (array_view->n_children != 2) {\n        ArrowErrorSet(error,\n                      \"Expected 2 children for run-end encoded array but found %\" PRId64,\n                      array_view->n_children);\n        return EINVAL;\n      }\n      struct ArrowArrayView* run_ends_view = array_view->children[0];\n      struct ArrowArrayView* values_view = array_view->children[1];\n      int64_t max_length;\n      switch (run_ends_view->storage_type) {\n        case NANOARROW_TYPE_INT16:\n          max_length = INT16_MAX;\n          break;\n        case NANOARROW_TYPE_INT32:\n          max_length = INT32_MAX;\n          break;\n        case NANOARROW_TYPE_INT64:\n          max_length = INT64_MAX;\n          break;\n        default:\n          ArrowErrorSet(\n              error,\n              \"Run-end encoded array only supports INT16, INT32 or INT64 run-ends \"\n              \"but found run-ends type %s\",\n              ArrowTypeString(run_ends_view->storage_type));\n          return EINVAL;\n      }\n\n      // There is already a check above that offset_plus_length < INT64_MAX\n      if (offset_plus_length > max_length) {\n        ArrowErrorSet(error,\n                      \"Offset + length of a run-end encoded array must fit in a value\"\n                      \" of the run end type %s but is %\" PRId64 \" + %\" PRId64,\n                      ArrowTypeString(run_ends_view->storage_type), array_view->offset,\n                      array_view->length);\n        return EINVAL;\n      }\n\n      if (run_ends_view->length > values_view->length) {\n        ArrowErrorSet(error,\n                      \"Length of run_ends is greater than the length of values: %\" PRId64\n                      \" > %\" PRId64,\n                      run_ends_view->length, values_view->length);\n        return EINVAL;\n      }\n\n      if (run_ends_view->length == 0 && values_view->length != 0) {\n        ArrowErrorSet(error,\n                      \"Run-end encoded array has zero length %\" PRId64\n                      \", but values array has \"\n                      \"non-zero length\",\n                      values_view->length);\n        return EINVAL;\n      }\n\n      if (run_ends_view->null_count != 0) {\n        ArrowErrorSet(error, \"Null count must be 0 for run ends array, but is %\" PRId64,\n                      run_ends_view->null_count);\n        return EINVAL;\n      }\n      break;\n    }\n\n    default:\n      break;\n  }\n\n  // Recurse for children\n  for (int64_t i = 0; i < array_view->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(\n        ArrowArrayViewValidateMinimal(array_view->children[i], error));\n  }\n\n  // Recurse for dictionary\n  if (array_view->dictionary != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateMinimal(array_view->dictionary, error));\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowArrayViewValidateDefault(struct ArrowArrayView* array_view,\n                                         struct ArrowError* error) {\n  // Perform minimal validation. This will validate or assign\n  // buffer sizes as long as buffer access is not required.\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateMinimal(array_view, error));\n\n  // Calculate buffer sizes or child lengths that require accessing the offsets\n  // buffer. Where appropriate, validate that the first offset is >= 0.\n  // If a buffer size is marked as unknown, assign it; otherwise, validate it.\n  int64_t offset_plus_length = array_view->offset + array_view->length;\n\n  int64_t first_offset;\n  int64_t last_offset;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_BINARY:\n      if (array_view->buffer_views[1].size_bytes != 0) {\n        first_offset = array_view->buffer_views[1].data.as_int32[array_view->offset];\n        if (first_offset < 0) {\n          ArrowErrorSet(error, \"Expected first offset >= 0 but found %\" PRId64,\n                        first_offset);\n          return EINVAL;\n        }\n\n        last_offset = array_view->buffer_views[1].data.as_int32[offset_plus_length];\n        if (last_offset < 0) {\n          ArrowErrorSet(error, \"Expected last offset >= 0 but found %\" PRId64,\n                        last_offset);\n          return EINVAL;\n        }\n\n        // If the data buffer size is unknown, assign it; otherwise, check it\n        if (array_view->buffer_views[2].size_bytes == -1) {\n          array_view->buffer_views[2].size_bytes = last_offset;\n        } else if (array_view->buffer_views[2].size_bytes < last_offset) {\n          ArrowErrorSet(error,\n                        \"Expected %s array buffer 2 to have size >= %\" PRId64\n                        \" bytes but found \"\n                        \"buffer with %\" PRId64 \" bytes\",\n                        ArrowTypeString(array_view->storage_type), last_offset,\n                        array_view->buffer_views[2].size_bytes);\n          return EINVAL;\n        }\n      } else if (array_view->buffer_views[2].size_bytes == -1) {\n        // If the data buffer size is unknown and there are no bytes in the offset buffer,\n        // set the data buffer size to 0.\n        array_view->buffer_views[2].size_bytes = 0;\n      }\n      break;\n\n    case NANOARROW_TYPE_LARGE_STRING:\n    case NANOARROW_TYPE_LARGE_BINARY:\n      if (array_view->buffer_views[1].size_bytes != 0) {\n        first_offset = array_view->buffer_views[1].data.as_int64[array_view->offset];\n        if (first_offset < 0) {\n          ArrowErrorSet(error, \"Expected first offset >= 0 but found %\" PRId64,\n                        first_offset);\n          return EINVAL;\n        }\n\n        last_offset = array_view->buffer_views[1].data.as_int64[offset_plus_length];\n        if (last_offset < 0) {\n          ArrowErrorSet(error, \"Expected last offset >= 0 but found %\" PRId64,\n                        last_offset);\n          return EINVAL;\n        }\n\n        // If the data buffer size is unknown, assign it; otherwise, check it\n        if (array_view->buffer_views[2].size_bytes == -1) {\n          array_view->buffer_views[2].size_bytes = last_offset;\n        } else if (array_view->buffer_views[2].size_bytes < last_offset) {\n          ArrowErrorSet(error,\n                        \"Expected %s array buffer 2 to have size >= %\" PRId64\n                        \" bytes but found \"\n                        \"buffer with %\" PRId64 \" bytes\",\n                        ArrowTypeString(array_view->storage_type), last_offset,\n                        array_view->buffer_views[2].size_bytes);\n          return EINVAL;\n        }\n      } else if (array_view->buffer_views[2].size_bytes == -1) {\n        // If the data buffer size is unknown and there are no bytes in the offset\n        // buffer, set the data buffer size to 0.\n        array_view->buffer_views[2].size_bytes = 0;\n      }\n      break;\n\n    case NANOARROW_TYPE_STRUCT:\n      for (int64_t i = 0; i < array_view->n_children; i++) {\n        if (array_view->children[i]->length < offset_plus_length) {\n          ArrowErrorSet(error,\n                        \"Expected struct child %\" PRId64 \" to have length >= %\" PRId64\n                        \" but found child with \"\n                        \"length %\" PRId64,\n                        i + 1, offset_plus_length, array_view->children[i]->length);\n          return EINVAL;\n        }\n      }\n      break;\n\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_MAP:\n      if (array_view->buffer_views[1].size_bytes != 0) {\n        first_offset = array_view->buffer_views[1].data.as_int32[array_view->offset];\n        if (first_offset < 0) {\n          ArrowErrorSet(error, \"Expected first offset >= 0 but found %\" PRId64,\n                        first_offset);\n          return EINVAL;\n        }\n\n        last_offset = array_view->buffer_views[1].data.as_int32[offset_plus_length];\n        if (last_offset < 0) {\n          ArrowErrorSet(error, \"Expected last offset >= 0 but found %\" PRId64,\n                        last_offset);\n          return EINVAL;\n        }\n\n        if (array_view->children[0]->length < last_offset) {\n          ArrowErrorSet(error,\n                        \"Expected child of %s array to have length >= %\" PRId64\n                        \" but found array with \"\n                        \"length %\" PRId64,\n                        ArrowTypeString(array_view->storage_type), last_offset,\n                        array_view->children[0]->length);\n          return EINVAL;\n        }\n      }\n      break;\n\n    case NANOARROW_TYPE_LARGE_LIST:\n      if (array_view->buffer_views[1].size_bytes != 0) {\n        first_offset = array_view->buffer_views[1].data.as_int64[array_view->offset];\n        if (first_offset < 0) {\n          ArrowErrorSet(error, \"Expected first offset >= 0 but found %\" PRId64,\n                        first_offset);\n          return EINVAL;\n        }\n\n        last_offset = array_view->buffer_views[1].data.as_int64[offset_plus_length];\n        if (last_offset < 0) {\n          ArrowErrorSet(error, \"Expected last offset >= 0 but found %\" PRId64,\n                        last_offset);\n          return EINVAL;\n        }\n\n        if (array_view->children[0]->length < last_offset) {\n          ArrowErrorSet(error,\n                        \"Expected child of %s array to have length >= %\" PRId64\n                        \" but found array \"\n                        \"with length %\" PRId64,\n                        ArrowTypeString(array_view->storage_type), last_offset,\n                        array_view->children[0]->length);\n          return EINVAL;\n        }\n      }\n      break;\n\n    case NANOARROW_TYPE_RUN_END_ENCODED: {\n      struct ArrowArrayView* run_ends_view = array_view->children[0];\n      if (run_ends_view->length == 0) {\n        break;\n      }\n\n      int64_t first_run_end = ArrowArrayViewGetIntUnsafe(run_ends_view, 0);\n      if (first_run_end < 1) {\n        ArrowErrorSet(\n            error,\n            \"All run ends must be greater than 0 but the first run end is %\" PRId64,\n            first_run_end);\n        return EINVAL;\n      }\n\n      // offset + length < INT64_MAX is checked in ArrowArrayViewValidateMinimal()\n      int64_t last_run_end =\n          ArrowArrayViewGetIntUnsafe(run_ends_view, run_ends_view->length - 1);\n      if (last_run_end < offset_plus_length) {\n        ArrowErrorSet(error,\n                      \"Last run end is %\" PRId64 \" but it should be >= (%\" PRId64\n                      \" + %\" PRId64 \")\",\n                      last_run_end, array_view->offset, array_view->length);\n        return EINVAL;\n      }\n      break;\n    }\n    default:\n      break;\n  }\n\n  // Recurse for children\n  for (int64_t i = 0; i < array_view->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(\n        ArrowArrayViewValidateDefault(array_view->children[i], error));\n  }\n\n  // Recurse for dictionary\n  if (array_view->dictionary != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateDefault(array_view->dictionary, error));\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayViewSetArray(struct ArrowArrayView* array_view,\n                                      const struct ArrowArray* array,\n                                      struct ArrowError* error) {\n  // Extract information from the array into the array view\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewSetArrayInternal(array_view, array, error));\n\n  // Run default validation. Because we've marked all non-NULL buffers as having unknown\n  // size, validation will also update the buffer sizes as it goes.\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateDefault(array_view, error));\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayViewSetArrayMinimal(struct ArrowArrayView* array_view,\n                                             const struct ArrowArray* array,\n                                             struct ArrowError* error) {\n  // Extract information from the array into the array view\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewSetArrayInternal(array_view, array, error));\n\n  // Run default validation. Because we've marked all non-NULL buffers as having unknown\n  // size, validation will also update the buffer sizes as it goes.\n  NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateMinimal(array_view, error));\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowAssertIncreasingInt32(struct ArrowBufferView view,\n                                      struct ArrowError* error) {\n  if (view.size_bytes <= (int64_t)sizeof(int32_t)) {\n    return NANOARROW_OK;\n  }\n\n  for (int64_t i = 1; i < view.size_bytes / (int64_t)sizeof(int32_t); i++) {\n    if (view.data.as_int32[i] < view.data.as_int32[i - 1]) {\n      ArrowErrorSet(error, \"[%\" PRId64 \"] Expected element size >= 0\", i);\n      return EINVAL;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowAssertIncreasingInt64(struct ArrowBufferView view,\n                                      struct ArrowError* error) {\n  if (view.size_bytes <= (int64_t)sizeof(int64_t)) {\n    return NANOARROW_OK;\n  }\n\n  for (int64_t i = 1; i < view.size_bytes / (int64_t)sizeof(int64_t); i++) {\n    if (view.data.as_int64[i] < view.data.as_int64[i - 1]) {\n      ArrowErrorSet(error, \"[%\" PRId64 \"] Expected element size >= 0\", i);\n      return EINVAL;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowAssertRangeInt8(struct ArrowBufferView view, int8_t min_value,\n                                int8_t max_value, struct ArrowError* error) {\n  for (int64_t i = 0; i < view.size_bytes; i++) {\n    if (view.data.as_int8[i] < min_value || view.data.as_int8[i] > max_value) {\n      ArrowErrorSet(error,\n                    \"[%\" PRId64 \"] Expected buffer value between %\" PRId8 \" and %\" PRId8\n                    \" but found value %\" PRId8,\n                    i, min_value, max_value, view.data.as_int8[i]);\n      return EINVAL;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowAssertInt8In(struct ArrowBufferView view, const int8_t* values,\n                             int64_t n_values, struct ArrowError* error) {\n  for (int64_t i = 0; i < view.size_bytes; i++) {\n    int item_found = 0;\n    for (int64_t j = 0; j < n_values; j++) {\n      if (view.data.as_int8[i] == values[j]) {\n        item_found = 1;\n        break;\n      }\n    }\n\n    if (!item_found) {\n      ArrowErrorSet(error, \"[%\" PRId64 \"] Unexpected buffer value %\" PRId8, i,\n                    view.data.as_int8[i]);\n      return EINVAL;\n    }\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic int ArrowArrayViewValidateFull(struct ArrowArrayView* array_view,\n                                      struct ArrowError* error) {\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    switch (array_view->layout.buffer_type[i]) {\n      // Only validate the portion of the buffer that is strictly required,\n      // which includes not validating the offset buffer of a zero-length array.\n      case NANOARROW_BUFFER_TYPE_DATA_OFFSET:\n        if (array_view->length == 0) {\n          continue;\n        }\n        if (array_view->layout.element_size_bits[i] == 32) {\n          struct ArrowBufferView sliced_offsets;\n          sliced_offsets.data.as_int32 =\n              array_view->buffer_views[i].data.as_int32 + array_view->offset;\n          sliced_offsets.size_bytes = (array_view->length + 1) * sizeof(int32_t);\n          NANOARROW_RETURN_NOT_OK(ArrowAssertIncreasingInt32(sliced_offsets, error));\n        } else {\n          struct ArrowBufferView sliced_offsets;\n          sliced_offsets.data.as_int64 =\n              array_view->buffer_views[i].data.as_int64 + array_view->offset;\n          sliced_offsets.size_bytes = (array_view->length + 1) * sizeof(int64_t);\n          NANOARROW_RETURN_NOT_OK(ArrowAssertIncreasingInt64(sliced_offsets, error));\n        }\n        break;\n      default:\n        break;\n    }\n  }\n\n  if (array_view->storage_type == NANOARROW_TYPE_DENSE_UNION ||\n      array_view->storage_type == NANOARROW_TYPE_SPARSE_UNION) {\n    struct ArrowBufferView sliced_type_ids;\n    sliced_type_ids.size_bytes = array_view->length * sizeof(int8_t);\n    if (array_view->length > 0) {\n      sliced_type_ids.data.as_int8 =\n          array_view->buffer_views[0].data.as_int8 + array_view->offset;\n    } else {\n      sliced_type_ids.data.as_int8 = NULL;\n    }\n\n    if (array_view->union_type_id_map == NULL) {\n      // If the union_type_id map is NULL (e.g., when using ArrowArrayInitFromType() +\n      // ArrowArrayAllocateChildren() + ArrowArrayFinishBuilding()), we don't have enough\n      // information to validate this buffer.\n      ArrowErrorSet(error,\n                    \"Insufficient information provided for validation of union array\");\n      return EINVAL;\n    } else if (_ArrowParsedUnionTypeIdsWillEqualChildIndices(\n                   array_view->union_type_id_map, array_view->n_children,\n                   array_view->n_children)) {\n      NANOARROW_RETURN_NOT_OK(ArrowAssertRangeInt8(\n          sliced_type_ids, 0, (int8_t)(array_view->n_children - 1), error));\n    } else {\n      NANOARROW_RETURN_NOT_OK(ArrowAssertInt8In(sliced_type_ids,\n                                                array_view->union_type_id_map + 128,\n                                                array_view->n_children, error));\n    }\n  }\n\n  if (array_view->storage_type == NANOARROW_TYPE_DENSE_UNION &&\n      array_view->union_type_id_map != NULL) {\n    // Check that offsets refer to child elements that actually exist\n    for (int64_t i = 0; i < array_view->length; i++) {\n      int8_t child_id = ArrowArrayViewUnionChildIndex(array_view, i);\n      int64_t offset = ArrowArrayViewUnionChildOffset(array_view, i);\n      int64_t child_length = array_view->children[child_id]->length;\n      if (offset < 0 || offset > child_length) {\n        ArrowErrorSet(error,\n                      \"[%\" PRId64 \"] Expected union offset for child id %\" PRId8\n                      \" to be between 0 and %\" PRId64\n                      \" but \"\n                      \"found offset value %\" PRId64,\n                      i, child_id, child_length, offset);\n        return EINVAL;\n      }\n    }\n  }\n\n  if (array_view->storage_type == NANOARROW_TYPE_RUN_END_ENCODED) {\n    struct ArrowArrayView* run_ends_view = array_view->children[0];\n    if (run_ends_view->length > 0) {\n      int64_t last_run_end = ArrowArrayViewGetIntUnsafe(run_ends_view, 0);\n      for (int64_t i = 1; i < run_ends_view->length; i++) {\n        const int64_t run_end = ArrowArrayViewGetIntUnsafe(run_ends_view, i);\n        if (run_end <= last_run_end) {\n          ArrowErrorSet(\n              error,\n              \"Every run end must be strictly greater than the previous run end, \"\n              \"but run_ends[%\" PRId64 \" is %\" PRId64 \" and run_ends[%\" PRId64\n              \"] is %\" PRId64,\n              i, run_end, i - 1, last_run_end);\n          return EINVAL;\n        }\n        last_run_end = run_end;\n      }\n    }\n  }\n\n  if (array_view->storage_type == NANOARROW_TYPE_LIST_VIEW ||\n      array_view->storage_type == NANOARROW_TYPE_LARGE_LIST_VIEW) {\n    int64_t child_len = array_view->children[0]->length;\n\n    struct ArrowBufferView offsets, sizes;\n    offsets.data.data = array_view->buffer_views[1].data.data;\n    sizes.data.data = array_view->buffer_views[2].data.data;\n\n    for (int64_t i = array_view->offset; i < array_view->length + array_view->offset;\n         i++) {\n      int64_t offset, size;\n      if (array_view->storage_type == NANOARROW_TYPE_LIST_VIEW) {\n        offset = offsets.data.as_int32[i];\n        size = sizes.data.as_int32[i];\n      } else {\n        offset = offsets.data.as_int64[i];\n        size = sizes.data.as_int64[i];\n      }\n\n      if (offset < 0) {\n        ArrowErrorSet(error, \"Invalid negative offset %\" PRId64 \" at index %\" PRId64,\n                      offset, i);\n        return EINVAL;\n      }\n\n      if (size < 0) {\n        ArrowErrorSet(error, \"Invalid negative size %\" PRId64 \" at index %\" PRId64, size,\n                      i);\n        return EINVAL;\n      }\n\n      if ((offset + size) > child_len) {\n        ArrowErrorSet(error,\n                      \"Offset: %\" PRId64 \" + size: %\" PRId64 \" at index: %\" PRId64\n                      \" exceeds length of child view: %\" PRId64,\n                      offset, size, i, child_len);\n        return EINVAL;\n      }\n    }\n  }\n\n  // Recurse for children\n  for (int64_t i = 0; i < array_view->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateFull(array_view->children[i], error));\n  }\n\n  // Dictionary validation not implemented\n  if (array_view->dictionary != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateFull(array_view->dictionary, error));\n    // TODO: validate the indices\n  }\n\n  return NANOARROW_OK;\n}\n\nArrowErrorCode ArrowArrayViewValidate(struct ArrowArrayView* array_view,\n                                      enum ArrowValidationLevel validation_level,\n                                      struct ArrowError* error) {\n  switch (validation_level) {\n    case NANOARROW_VALIDATION_LEVEL_NONE:\n      return NANOARROW_OK;\n    case NANOARROW_VALIDATION_LEVEL_MINIMAL:\n      return ArrowArrayViewValidateMinimal(array_view, error);\n    case NANOARROW_VALIDATION_LEVEL_DEFAULT:\n      return ArrowArrayViewValidateDefault(array_view, error);\n    case NANOARROW_VALIDATION_LEVEL_FULL:\n      NANOARROW_RETURN_NOT_OK(ArrowArrayViewValidateDefault(array_view, error));\n      return ArrowArrayViewValidateFull(array_view, error);\n  }\n\n  ArrowErrorSet(error, \"validation_level not recognized\");\n  return EINVAL;\n}\n\nstruct ArrowComparisonInternalState {\n  enum ArrowCompareLevel level;\n  int is_equal;\n  struct ArrowError* reason;\n};\n\nNANOARROW_CHECK_PRINTF_ATTRIBUTE static void ArrowComparePrependPath(\n    struct ArrowError* out, const char* fmt, ...) {\n  if (out == NULL) {\n    return;\n  }\n\n  char prefix[128];\n  prefix[0] = '\\0';\n  va_list args;\n  va_start(args, fmt);\n  int prefix_len = vsnprintf(prefix, sizeof(prefix), fmt, args);\n  va_end(args);\n\n  if (prefix_len <= 0) {\n    return;\n  }\n\n  size_t out_len = strlen(out->message);\n  size_t out_len_to_move = sizeof(struct ArrowError) - prefix_len - 1;\n  if (out_len_to_move > out_len) {\n    out_len_to_move = out_len;\n  }\n\n  memmove(out->message + prefix_len, out->message, out_len_to_move);\n  memcpy(out->message, prefix, prefix_len);\n  out->message[out_len + prefix_len] = '\\0';\n}\n\n#define SET_NOT_EQUAL_AND_RETURN_IF_IMPL(cond_, state_, reason_) \\\n  do {                                                           \\\n    if (cond_) {                                                 \\\n      ArrowErrorSet(state_->reason, \": %s\", reason_);            \\\n      state_->is_equal = 0;                                      \\\n      return;                                                    \\\n    }                                                            \\\n  } while (0)\n\n#define SET_NOT_EQUAL_AND_RETURN_IF(condition_, state_) \\\n  SET_NOT_EQUAL_AND_RETURN_IF_IMPL(condition_, state_, #condition_)\n\nstatic void ArrowArrayViewCompareBuffer(const struct ArrowArrayView* actual,\n                                        const struct ArrowArrayView* expected, int i,\n                                        struct ArrowComparisonInternalState* state) {\n  SET_NOT_EQUAL_AND_RETURN_IF(\n      actual->buffer_views[i].size_bytes != expected->buffer_views[i].size_bytes, state);\n\n  int64_t buffer_size = actual->buffer_views[i].size_bytes;\n  if (buffer_size > 0) {\n    SET_NOT_EQUAL_AND_RETURN_IF(\n        memcmp(actual->buffer_views[i].data.data, expected->buffer_views[i].data.data,\n               buffer_size) != 0,\n        state);\n  }\n}\n\nstatic void ArrowArrayViewCompareIdentical(const struct ArrowArrayView* actual,\n                                           const struct ArrowArrayView* expected,\n                                           struct ArrowComparisonInternalState* state) {\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->storage_type != expected->storage_type, state);\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->n_children != expected->n_children, state);\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->dictionary == NULL && expected->dictionary != NULL,\n                              state);\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->dictionary != NULL && expected->dictionary == NULL,\n                              state);\n\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->length != expected->length, state);\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->offset != expected->offset, state);\n  SET_NOT_EQUAL_AND_RETURN_IF(actual->null_count != expected->null_count, state);\n\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    ArrowArrayViewCompareBuffer(actual, expected, i, state);\n    if (!state->is_equal) {\n      ArrowComparePrependPath(state->reason, \".buffers[%d]\", i);\n      return;\n    }\n  }\n\n  for (int64_t i = 0; i < actual->n_children; i++) {\n    ArrowArrayViewCompareIdentical(actual->children[i], expected->children[i], state);\n    if (!state->is_equal) {\n      ArrowComparePrependPath(state->reason, \".children[%\" PRId64 \"]\", i);\n      return;\n    }\n  }\n\n  if (actual->dictionary != NULL) {\n    ArrowArrayViewCompareIdentical(actual->dictionary, expected->dictionary, state);\n    if (!state->is_equal) {\n      ArrowComparePrependPath(state->reason, \".dictionary\");\n      return;\n    }\n  }\n}\n\n// Top-level entry point to take care of creating, cleaning up, and\n// propagating the ArrowComparisonInternalState to the caller\nArrowErrorCode ArrowArrayViewCompare(const struct ArrowArrayView* actual,\n                                     const struct ArrowArrayView* expected,\n                                     enum ArrowCompareLevel level, int* out,\n                                     struct ArrowError* reason) {\n  struct ArrowComparisonInternalState state;\n  state.level = level;\n  state.is_equal = 1;\n  state.reason = reason;\n\n  switch (level) {\n    case NANOARROW_COMPARE_IDENTICAL:\n      ArrowArrayViewCompareIdentical(actual, expected, &state);\n      break;\n    default:\n      return EINVAL;\n  }\n\n  *out = state.is_equal;\n  if (!state.is_equal) {\n    ArrowComparePrependPath(state.reason, \"root\");\n  }\n\n  return NANOARROW_OK;\n}\n\n#undef SET_NOT_EQUAL_AND_RETURN_IF\n#undef SET_NOT_EQUAL_AND_RETURN_IF_IMPL\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#include <errno.h>\n\n#include \"nanoarrow/nanoarrow.h\"\n\nstruct BasicArrayStreamPrivate {\n  struct ArrowSchema schema;\n  int64_t n_arrays;\n  struct ArrowArray* arrays;\n  int64_t arrays_i;\n};\n\nstatic int ArrowBasicArrayStreamGetSchema(struct ArrowArrayStream* array_stream,\n                                          struct ArrowSchema* schema) {\n  if (array_stream == NULL || array_stream->release == NULL) {\n    return EINVAL;\n  }\n\n  struct BasicArrayStreamPrivate* private_data =\n      (struct BasicArrayStreamPrivate*)array_stream->private_data;\n  return ArrowSchemaDeepCopy(&private_data->schema, schema);\n}\n\nstatic int ArrowBasicArrayStreamGetNext(struct ArrowArrayStream* array_stream,\n                                        struct ArrowArray* array) {\n  if (array_stream == NULL || array_stream->release == NULL) {\n    return EINVAL;\n  }\n\n  struct BasicArrayStreamPrivate* private_data =\n      (struct BasicArrayStreamPrivate*)array_stream->private_data;\n\n  if (private_data->arrays_i == private_data->n_arrays) {\n    array->release = NULL;\n    return NANOARROW_OK;\n  }\n\n  ArrowArrayMove(&private_data->arrays[private_data->arrays_i++], array);\n  return NANOARROW_OK;\n}\n\nstatic const char* ArrowBasicArrayStreamGetLastError(\n    struct ArrowArrayStream* array_stream) {\n  NANOARROW_UNUSED(array_stream);\n  return NULL;\n}\n\nstatic void ArrowBasicArrayStreamRelease(struct ArrowArrayStream* array_stream) {\n  if (array_stream == NULL || array_stream->release == NULL) {\n    return;\n  }\n\n  struct BasicArrayStreamPrivate* private_data =\n      (struct BasicArrayStreamPrivate*)array_stream->private_data;\n\n  if (private_data->schema.release != NULL) {\n    ArrowSchemaRelease(&private_data->schema);\n  }\n\n  for (int64_t i = 0; i < private_data->n_arrays; i++) {\n    if (private_data->arrays[i].release != NULL) {\n      ArrowArrayRelease(&private_data->arrays[i]);\n    }\n  }\n\n  if (private_data->arrays != NULL) {\n    ArrowFree(private_data->arrays);\n  }\n\n  ArrowFree(private_data);\n  array_stream->release = NULL;\n}\n\nArrowErrorCode ArrowBasicArrayStreamInit(struct ArrowArrayStream* array_stream,\n                                         struct ArrowSchema* schema, int64_t n_arrays) {\n  struct BasicArrayStreamPrivate* private_data =\n      (struct BasicArrayStreamPrivate*)ArrowMalloc(\n          sizeof(struct BasicArrayStreamPrivate));\n  if (private_data == NULL) {\n    return ENOMEM;\n  }\n\n  ArrowSchemaMove(schema, &private_data->schema);\n\n  private_data->n_arrays = n_arrays;\n  private_data->arrays = NULL;\n  private_data->arrays_i = 0;\n\n  if (n_arrays > 0) {\n    private_data->arrays =\n        (struct ArrowArray*)ArrowMalloc(n_arrays * sizeof(struct ArrowArray));\n    if (private_data->arrays == NULL) {\n      ArrowBasicArrayStreamRelease(array_stream);\n      ArrowFree(private_data);\n      return ENOMEM;\n    }\n  }\n\n  for (int64_t i = 0; i < private_data->n_arrays; i++) {\n    private_data->arrays[i].release = NULL;\n  }\n\n  array_stream->get_schema = &ArrowBasicArrayStreamGetSchema;\n  array_stream->get_next = &ArrowBasicArrayStreamGetNext;\n  array_stream->get_last_error = ArrowBasicArrayStreamGetLastError;\n  array_stream->release = ArrowBasicArrayStreamRelease;\n  array_stream->private_data = private_data;\n  return NANOARROW_OK;\n}\n\nvoid ArrowBasicArrayStreamSetArray(struct ArrowArrayStream* array_stream, int64_t i,\n                                   struct ArrowArray* array) {\n  struct BasicArrayStreamPrivate* private_data =\n      (struct BasicArrayStreamPrivate*)array_stream->private_data;\n  ArrowArrayMove(array, &private_data->arrays[i]);\n}\n\nArrowErrorCode ArrowBasicArrayStreamValidate(const struct ArrowArrayStream* array_stream,\n                                             struct ArrowError* error) {\n  struct BasicArrayStreamPrivate* private_data =\n      (struct BasicArrayStreamPrivate*)array_stream->private_data;\n\n  struct ArrowArrayView array_view;\n  NANOARROW_RETURN_NOT_OK(\n      ArrowArrayViewInitFromSchema(&array_view, &private_data->schema, error));\n\n  for (int64_t i = 0; i < private_data->n_arrays; i++) {\n    if (private_data->arrays[i].release != NULL) {\n      int result = ArrowArrayViewSetArray(&array_view, &private_data->arrays[i], error);\n      if (result != NANOARROW_OK) {\n        ArrowArrayViewReset(&array_view);\n        return result;\n      }\n    }\n  }\n\n  ArrowArrayViewReset(&array_view);\n  return NANOARROW_OK;\n}\n"
  },
  {
    "path": "data/lang/cpp/nanoarrow/nanoarrow.h",
    "content": "// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#ifndef NANOARROW_CONFIG_H_INCLUDED\n#define NANOARROW_CONFIG_H_INCLUDED\n\n#define NANOARROW_VERSION_MAJOR 0\n#define NANOARROW_VERSION_MINOR 8\n#define NANOARROW_VERSION_PATCH 0\n#define NANOARROW_VERSION \"0.8.0\"\n\n#define NANOARROW_VERSION_INT                                        \\\n  (NANOARROW_VERSION_MAJOR * 10000 + NANOARROW_VERSION_MINOR * 100 + \\\n   NANOARROW_VERSION_PATCH)\n\n// #define NANOARROW_NAMESPACE YourNamespaceHere\n\n#if !defined(NANOARROW_CXX_NAMESPACE)\n#define NANOARROW_CXX_NAMESPACE nanoarrow\n#endif\n\n#define NANOARROW_CXX_NAMESPACE_BEGIN namespace NANOARROW_CXX_NAMESPACE {\n#define NANOARROW_CXX_NAMESPACE_END }\n\n#endif\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#ifndef NANOARROW_NANOARROW_TYPES_H_INCLUDED\n#define NANOARROW_NANOARROW_TYPES_H_INCLUDED\n\n#include <stdint.h>\n#include <string.h>\n\n\n\n#if defined(NANOARROW_DEBUG) && !defined(NANOARROW_PRINT_AND_DIE)\n#include <stdio.h>\n#include <stdlib.h>\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n// Extra guard for versions of Arrow without the canonical guard\n#ifndef ARROW_FLAG_DICTIONARY_ORDERED\n\n/// \\defgroup nanoarrow-arrow-cdata Arrow C Data interface\n///\n/// The Arrow C Data (https://arrow.apache.org/docs/format/CDataInterface.html)\n/// and Arrow C Stream (https://arrow.apache.org/docs/format/CStreamInterface.html)\n/// interfaces are part of the\n/// Arrow Columnar Format specification\n/// (https://arrow.apache.org/docs/format/Columnar.html). See the Arrow documentation for\n/// documentation of these structures.\n///\n/// @{\n\n#ifndef ARROW_C_DATA_INTERFACE\n#define ARROW_C_DATA_INTERFACE\n\n#define ARROW_FLAG_DICTIONARY_ORDERED 1\n#define ARROW_FLAG_NULLABLE 2\n#define ARROW_FLAG_MAP_KEYS_SORTED 4\n\nstruct ArrowSchema {\n  // Array type description\n  const char* format;\n  const char* name;\n  const char* metadata;\n  int64_t flags;\n  int64_t n_children;\n  struct ArrowSchema** children;\n  struct ArrowSchema* dictionary;\n\n  // Release callback\n  void (*release)(struct ArrowSchema*);\n  // Opaque producer-specific data\n  void* private_data;\n};\n\nstruct ArrowArray {\n  // Array data description\n  int64_t length;\n  int64_t null_count;\n  int64_t offset;\n  int64_t n_buffers;\n  int64_t n_children;\n  const void** buffers;\n  struct ArrowArray** children;\n  struct ArrowArray* dictionary;\n\n  // Release callback\n  void (*release)(struct ArrowArray*);\n  // Opaque producer-specific data\n  void* private_data;\n};\n\n#endif  // ARROW_C_DATA_INTERFACE\n\n#ifndef ARROW_C_STREAM_INTERFACE\n#define ARROW_C_STREAM_INTERFACE\n\nstruct ArrowArrayStream {\n  // Callback to get the stream type\n  // (will be the same for all arrays in the stream).\n  //\n  // Return value: 0 if successful, an `errno`-compatible error code otherwise.\n  //\n  // If successful, the ArrowSchema must be released independently from the stream.\n  int (*get_schema)(struct ArrowArrayStream*, struct ArrowSchema* out);\n\n  // Callback to get the next array\n  // (if no error and the array is released, the stream has ended)\n  //\n  // Return value: 0 if successful, an `errno`-compatible error code otherwise.\n  //\n  // If successful, the ArrowArray must be released independently from the stream.\n  int (*get_next)(struct ArrowArrayStream*, struct ArrowArray* out);\n\n  // Callback to get optional detailed error information.\n  // This must only be called if the last stream operation failed\n  // with a non-0 return code.\n  //\n  // Return value: pointer to a null-terminated character array describing\n  // the last error, or NULL if no description is available.\n  //\n  // The returned pointer is only valid until the next operation on this stream\n  // (including release).\n  const char* (*get_last_error)(struct ArrowArrayStream*);\n\n  // Release callback: release the stream's own resources.\n  // Note that arrays returned by `get_next` must be individually released.\n  void (*release)(struct ArrowArrayStream*);\n\n  // Opaque producer-specific data\n  void* private_data;\n};\n\n#endif  // ARROW_C_STREAM_INTERFACE\n#endif  // ARROW_FLAG_DICTIONARY_ORDERED\n\n/// @}\n\n// Utility macros\n#define _NANOARROW_CONCAT(x, y) x##y\n#define _NANOARROW_MAKE_NAME(x, y) _NANOARROW_CONCAT(x, y)\n\n#define _NANOARROW_RETURN_NOT_OK_IMPL(NAME, EXPR) \\\n  do {                                            \\\n    const int NAME = (EXPR);                      \\\n    if (NAME) return NAME;                        \\\n  } while (0)\n\n#define _NANOARROW_CHECK_RANGE(x_, min_, max_) \\\n  NANOARROW_RETURN_NOT_OK((x_ >= min_ && x_ <= max_) ? NANOARROW_OK : EINVAL)\n\n#define _NANOARROW_CHECK_UPPER_LIMIT(x_, max_) \\\n  NANOARROW_RETURN_NOT_OK((x_ <= max_) ? NANOARROW_OK : EINVAL)\n\n#if defined(NANOARROW_DEBUG)\n#define _NANOARROW_RETURN_NOT_OK_WITH_ERROR_IMPL(NAME, EXPR, ERROR_PTR_EXPR, EXPR_STR) \\\n  do {                                                                                 \\\n    const int NAME = (EXPR);                                                           \\\n    if (NAME) {                                                                        \\\n      ArrowErrorSet((ERROR_PTR_EXPR), \"%s failed with errno %d\\n* %s:%d\", EXPR_STR,    \\\n                    NAME, __FILE__, __LINE__);                                         \\\n      return NAME;                                                                     \\\n    }                                                                                  \\\n  } while (0)\n#else\n#define _NANOARROW_RETURN_NOT_OK_WITH_ERROR_IMPL(NAME, EXPR, ERROR_PTR_EXPR, EXPR_STR) \\\n  do {                                                                                 \\\n    const int NAME = (EXPR);                                                           \\\n    if (NAME) {                                                                        \\\n      ArrowErrorSet((ERROR_PTR_EXPR), \"%s failed with errno %d\", EXPR_STR, NAME);      \\\n      return NAME;                                                                     \\\n    }                                                                                  \\\n  } while (0)\n#endif\n\n#if defined(NANOARROW_DEBUG)\n// For checking ArrowErrorSet() calls for valid printf format strings/arguments\n// If using mingw's c99-compliant printf, we need a different format-checking attribute\n#if defined(__USE_MINGW_ANSI_STDIO) && defined(__MINGW_PRINTF_FORMAT)\n#define NANOARROW_CHECK_PRINTF_ATTRIBUTE \\\n  __attribute__((format(__MINGW_PRINTF_FORMAT, 2, 3)))\n#elif defined(__GNUC__)\n#define NANOARROW_CHECK_PRINTF_ATTRIBUTE __attribute__((format(printf, 2, 3)))\n#else\n#define NANOARROW_CHECK_PRINTF_ATTRIBUTE\n#endif\n\n// For checking calls to functions that return ArrowErrorCode\n#if defined(__GNUC__) && (__GNUC__ >= 4)\n#define NANOARROW_CHECK_RETURN_ATTRIBUTE __attribute__((warn_unused_result))\n#elif defined(_MSC_VER) && (_MSC_VER >= 1700)\n#define NANOARROW_CHECK_RETURN_ATTRIBUTE _Check_return_\n#else\n#define NANOARROW_CHECK_RETURN_ATTRIBUTE\n#endif\n\n#else\n#define NANOARROW_CHECK_RETURN_ATTRIBUTE\n#define NANOARROW_CHECK_PRINTF_ATTRIBUTE\n#endif\n\n#define NANOARROW_UNUSED(x) (void)(x)\n\n/// \\brief Return code for success.\n/// \\ingroup nanoarrow-errors\n#define NANOARROW_OK 0\n\n/// \\brief Represents an errno-compatible error code\n/// \\ingroup nanoarrow-errors\ntypedef int ArrowErrorCode;\n\n#if defined(NANOARROW_DEBUG)\n#define ArrowErrorCode NANOARROW_CHECK_RETURN_ATTRIBUTE ArrowErrorCode\n#endif\n\n/// \\brief Flags supported by ArrowSchemaViewInit()\n/// \\ingroup nanoarrow-schema-view\n#define NANOARROW_FLAG_ALL_SUPPORTED \\\n  (ARROW_FLAG_DICTIONARY_ORDERED | ARROW_FLAG_NULLABLE | ARROW_FLAG_MAP_KEYS_SORTED)\n\n/// \\brief Error type containing a UTF-8 encoded message.\n/// \\ingroup nanoarrow-errors\nstruct ArrowError {\n  /// \\brief A character buffer with space for an error message.\n  char message[1024];\n};\n\n/// \\brief Ensure an ArrowError is null-terminated by zeroing the first character.\n/// \\ingroup nanoarrow-errors\n///\n/// If error is NULL, this function does nothing.\nstatic inline void ArrowErrorInit(struct ArrowError* error) {\n  if (error != NULL) {\n    error->message[0] = '\\0';\n  }\n}\n\n/// \\brief Get the contents of an error\n/// \\ingroup nanoarrow-errors\n///\n/// If error is NULL, returns \"\", or returns the contents of the error message\n/// otherwise.\nstatic inline const char* ArrowErrorMessage(struct ArrowError* error) {\n  if (error == NULL) {\n    return \"\";\n  } else {\n    return error->message;\n  }\n}\n\n/// \\brief Set the contents of an error from an existing null-terminated string\n/// \\ingroup nanoarrow-errors\n///\n/// If error is NULL, this function does nothing.\nstatic inline void ArrowErrorSetString(struct ArrowError* error, const char* src) {\n  if (error == NULL) {\n    return;\n  }\n\n  int64_t src_len = strlen(src);\n  if (src_len >= ((int64_t)sizeof(error->message))) {\n    memcpy(error->message, src, sizeof(error->message) - 1);\n    error->message[sizeof(error->message) - 1] = '\\0';\n  } else {\n    memcpy(error->message, src, src_len);\n    error->message[src_len] = '\\0';\n  }\n}\n\n/// \\brief Check the result of an expression and return it if not NANOARROW_OK\n/// \\ingroup nanoarrow-errors\n#define NANOARROW_RETURN_NOT_OK(EXPR) \\\n  _NANOARROW_RETURN_NOT_OK_IMPL(_NANOARROW_MAKE_NAME(errno_status_, __COUNTER__), EXPR)\n\n/// \\brief Check the result of an expression and return it if not NANOARROW_OK,\n/// adding an auto-generated message to an ArrowError.\n/// \\ingroup nanoarrow-errors\n///\n/// This macro is used to ensure that functions that accept an ArrowError\n/// as input always set its message when returning an error code (e.g., when calling\n/// a nanoarrow function that does *not* accept ArrowError).\n#define NANOARROW_RETURN_NOT_OK_WITH_ERROR(EXPR, ERROR_EXPR) \\\n  _NANOARROW_RETURN_NOT_OK_WITH_ERROR_IMPL(                  \\\n      _NANOARROW_MAKE_NAME(errno_status_, __COUNTER__), EXPR, ERROR_EXPR, #EXPR)\n\n#if defined(NANOARROW_DEBUG) && !defined(NANOARROW_PRINT_AND_DIE)\n#define NANOARROW_PRINT_AND_DIE(VALUE, EXPR_STR)                                 \\\n  do {                                                                           \\\n    fprintf(stderr, \"%s failed with code %d\\n* %s:%d\\n\", EXPR_STR, (int)(VALUE), \\\n            __FILE__, (int)__LINE__);                                            \\\n    abort();                                                                     \\\n  } while (0)\n#endif\n\n#if defined(NANOARROW_DEBUG)\n#define _NANOARROW_ASSERT_OK_IMPL(NAME, EXPR, EXPR_STR) \\\n  do {                                                  \\\n    const int NAME = (EXPR);                            \\\n    if (NAME) NANOARROW_PRINT_AND_DIE(NAME, EXPR_STR);  \\\n  } while (0)\n\n/// \\brief Assert that an expression's value is NANOARROW_OK\n/// \\ingroup nanoarrow-errors\n///\n/// If nanoarrow was built in debug mode (i.e., defined(NANOARROW_DEBUG) is true),\n/// print a message to stderr and abort. If nanoarrow was built in release mode,\n/// this statement has no effect. You can customize fatal error behaviour\n/// be defining the NANOARROW_PRINT_AND_DIE macro before including nanoarrow.h\n/// This macro is provided as a convenience for users and is not used internally.\n#define NANOARROW_ASSERT_OK(EXPR) \\\n  _NANOARROW_ASSERT_OK_IMPL(_NANOARROW_MAKE_NAME(errno_status_, __COUNTER__), EXPR, #EXPR)\n\n#define _NANOARROW_DCHECK_IMPL(EXPR, EXPR_STR)          \\\n  do {                                                  \\\n    if (!(EXPR)) NANOARROW_PRINT_AND_DIE(-1, EXPR_STR); \\\n  } while (0)\n\n#define NANOARROW_DCHECK(EXPR) _NANOARROW_DCHECK_IMPL(EXPR, #EXPR)\n#else\n#define NANOARROW_ASSERT_OK(EXPR) (void)(EXPR)\n#define NANOARROW_DCHECK(EXPR)\n#endif\n\nstatic inline void ArrowSchemaMove(struct ArrowSchema* src, struct ArrowSchema* dst) {\n  NANOARROW_DCHECK(src != NULL);\n  NANOARROW_DCHECK(dst != NULL);\n\n  memcpy(dst, src, sizeof(struct ArrowSchema));\n  src->release = NULL;\n}\n\nstatic inline void ArrowSchemaRelease(struct ArrowSchema* schema) {\n  NANOARROW_DCHECK(schema != NULL);\n  schema->release(schema);\n  NANOARROW_DCHECK(schema->release == NULL);\n}\n\nstatic inline void ArrowArrayMove(struct ArrowArray* src, struct ArrowArray* dst) {\n  NANOARROW_DCHECK(src != NULL);\n  NANOARROW_DCHECK(dst != NULL);\n\n  memcpy(dst, src, sizeof(struct ArrowArray));\n  src->release = NULL;\n}\n\nstatic inline void ArrowArrayRelease(struct ArrowArray* array) {\n  NANOARROW_DCHECK(array != NULL);\n  array->release(array);\n  NANOARROW_DCHECK(array->release == NULL);\n}\n\nstatic inline void ArrowArrayStreamMove(struct ArrowArrayStream* src,\n                                        struct ArrowArrayStream* dst) {\n  NANOARROW_DCHECK(src != NULL);\n  NANOARROW_DCHECK(dst != NULL);\n\n  memcpy(dst, src, sizeof(struct ArrowArrayStream));\n  src->release = NULL;\n}\n\nstatic inline const char* ArrowArrayStreamGetLastError(\n    struct ArrowArrayStream* array_stream) {\n  NANOARROW_DCHECK(array_stream != NULL);\n\n  const char* value = array_stream->get_last_error(array_stream);\n  if (value == NULL) {\n    return \"\";\n  } else {\n    return value;\n  }\n}\n\nstatic inline ArrowErrorCode ArrowArrayStreamGetSchema(\n    struct ArrowArrayStream* array_stream, struct ArrowSchema* out,\n    struct ArrowError* error) {\n  NANOARROW_DCHECK(array_stream != NULL);\n\n  int result = array_stream->get_schema(array_stream, out);\n  if (result != NANOARROW_OK && error != NULL) {\n    ArrowErrorSetString(error, ArrowArrayStreamGetLastError(array_stream));\n  }\n\n  return result;\n}\n\nstatic inline ArrowErrorCode ArrowArrayStreamGetNext(\n    struct ArrowArrayStream* array_stream, struct ArrowArray* out,\n    struct ArrowError* error) {\n  NANOARROW_DCHECK(array_stream != NULL);\n\n  int result = array_stream->get_next(array_stream, out);\n  if (result != NANOARROW_OK && error != NULL) {\n    ArrowErrorSetString(error, ArrowArrayStreamGetLastError(array_stream));\n  }\n\n  return result;\n}\n\nstatic inline void ArrowArrayStreamRelease(struct ArrowArrayStream* array_stream) {\n  NANOARROW_DCHECK(array_stream != NULL);\n  array_stream->release(array_stream);\n  NANOARROW_DCHECK(array_stream->release == NULL);\n}\n\nstatic char _ArrowIsLittleEndian(void) {\n  uint32_t check = 1;\n  char first_byte;\n  memcpy(&first_byte, &check, sizeof(char));\n  return first_byte;\n}\n\n/// \\brief Arrow type enumerator\n/// \\ingroup nanoarrow-utils\n///\n/// These names are intended to map to the corresponding arrow::Type::type\n/// enumerator; however, the numeric values are specifically not equal\n/// (i.e., do not rely on numeric comparison).\nenum ArrowType {\n  NANOARROW_TYPE_UNINITIALIZED = 0,\n  NANOARROW_TYPE_NA = 1,\n  NANOARROW_TYPE_BOOL,\n  NANOARROW_TYPE_UINT8,\n  NANOARROW_TYPE_INT8,\n  NANOARROW_TYPE_UINT16,\n  NANOARROW_TYPE_INT16,\n  NANOARROW_TYPE_UINT32,\n  NANOARROW_TYPE_INT32,\n  NANOARROW_TYPE_UINT64,\n  NANOARROW_TYPE_INT64,\n  NANOARROW_TYPE_HALF_FLOAT,\n  NANOARROW_TYPE_FLOAT,\n  NANOARROW_TYPE_DOUBLE,\n  NANOARROW_TYPE_STRING,\n  NANOARROW_TYPE_BINARY,\n  NANOARROW_TYPE_FIXED_SIZE_BINARY,\n  NANOARROW_TYPE_DATE32,\n  NANOARROW_TYPE_DATE64,\n  NANOARROW_TYPE_TIMESTAMP,\n  NANOARROW_TYPE_TIME32,\n  NANOARROW_TYPE_TIME64,\n  NANOARROW_TYPE_INTERVAL_MONTHS,\n  NANOARROW_TYPE_INTERVAL_DAY_TIME,\n  NANOARROW_TYPE_DECIMAL128,\n  NANOARROW_TYPE_DECIMAL256,\n  NANOARROW_TYPE_LIST,\n  NANOARROW_TYPE_STRUCT,\n  NANOARROW_TYPE_SPARSE_UNION,\n  NANOARROW_TYPE_DENSE_UNION,\n  NANOARROW_TYPE_DICTIONARY,\n  NANOARROW_TYPE_MAP,\n  NANOARROW_TYPE_EXTENSION,\n  NANOARROW_TYPE_FIXED_SIZE_LIST,\n  NANOARROW_TYPE_DURATION,\n  NANOARROW_TYPE_LARGE_STRING,\n  NANOARROW_TYPE_LARGE_BINARY,\n  NANOARROW_TYPE_LARGE_LIST,\n  NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO,\n  NANOARROW_TYPE_RUN_END_ENCODED,\n  NANOARROW_TYPE_BINARY_VIEW,\n  NANOARROW_TYPE_STRING_VIEW,\n  NANOARROW_TYPE_DECIMAL32,\n  NANOARROW_TYPE_DECIMAL64,\n  NANOARROW_TYPE_LIST_VIEW,\n  NANOARROW_TYPE_LARGE_LIST_VIEW,\n};\n\n/// \\brief Get a string value of an enum ArrowType value\n/// \\ingroup nanoarrow-utils\n///\n/// Returns NULL for invalid values for type\nstatic inline const char* ArrowTypeString(enum ArrowType type);\n\nstatic inline const char* ArrowTypeString(enum ArrowType type) {\n  switch (type) {\n    case NANOARROW_TYPE_NA:\n      return \"na\";\n    case NANOARROW_TYPE_BOOL:\n      return \"bool\";\n    case NANOARROW_TYPE_UINT8:\n      return \"uint8\";\n    case NANOARROW_TYPE_INT8:\n      return \"int8\";\n    case NANOARROW_TYPE_UINT16:\n      return \"uint16\";\n    case NANOARROW_TYPE_INT16:\n      return \"int16\";\n    case NANOARROW_TYPE_UINT32:\n      return \"uint32\";\n    case NANOARROW_TYPE_INT32:\n      return \"int32\";\n    case NANOARROW_TYPE_UINT64:\n      return \"uint64\";\n    case NANOARROW_TYPE_INT64:\n      return \"int64\";\n    case NANOARROW_TYPE_HALF_FLOAT:\n      return \"half_float\";\n    case NANOARROW_TYPE_FLOAT:\n      return \"float\";\n    case NANOARROW_TYPE_DOUBLE:\n      return \"double\";\n    case NANOARROW_TYPE_STRING:\n      return \"string\";\n    case NANOARROW_TYPE_BINARY:\n      return \"binary\";\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n      return \"fixed_size_binary\";\n    case NANOARROW_TYPE_DATE32:\n      return \"date32\";\n    case NANOARROW_TYPE_DATE64:\n      return \"date64\";\n    case NANOARROW_TYPE_TIMESTAMP:\n      return \"timestamp\";\n    case NANOARROW_TYPE_TIME32:\n      return \"time32\";\n    case NANOARROW_TYPE_TIME64:\n      return \"time64\";\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n      return \"interval_months\";\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME:\n      return \"interval_day_time\";\n    case NANOARROW_TYPE_DECIMAL32:\n      return \"decimal32\";\n    case NANOARROW_TYPE_DECIMAL64:\n      return \"decimal64\";\n    case NANOARROW_TYPE_DECIMAL128:\n      return \"decimal128\";\n    case NANOARROW_TYPE_DECIMAL256:\n      return \"decimal256\";\n    case NANOARROW_TYPE_LIST:\n      return \"list\";\n    case NANOARROW_TYPE_STRUCT:\n      return \"struct\";\n    case NANOARROW_TYPE_SPARSE_UNION:\n      return \"sparse_union\";\n    case NANOARROW_TYPE_DENSE_UNION:\n      return \"dense_union\";\n    case NANOARROW_TYPE_DICTIONARY:\n      return \"dictionary\";\n    case NANOARROW_TYPE_MAP:\n      return \"map\";\n    case NANOARROW_TYPE_EXTENSION:\n      return \"extension\";\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      return \"fixed_size_list\";\n    case NANOARROW_TYPE_DURATION:\n      return \"duration\";\n    case NANOARROW_TYPE_LARGE_STRING:\n      return \"large_string\";\n    case NANOARROW_TYPE_LARGE_BINARY:\n      return \"large_binary\";\n    case NANOARROW_TYPE_LARGE_LIST:\n      return \"large_list\";\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO:\n      return \"interval_month_day_nano\";\n    case NANOARROW_TYPE_RUN_END_ENCODED:\n      return \"run_end_encoded\";\n    case NANOARROW_TYPE_BINARY_VIEW:\n      return \"binary_view\";\n    case NANOARROW_TYPE_STRING_VIEW:\n      return \"string_view\";\n    case NANOARROW_TYPE_LIST_VIEW:\n      return \"list_view\";\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      return \"large_list_view\";\n    default:\n      return NULL;\n  }\n}\n\n/// \\brief Arrow time unit enumerator\n/// \\ingroup nanoarrow-utils\n///\n/// These names and values map to the corresponding arrow::TimeUnit::type\n/// enumerator.\nenum ArrowTimeUnit {\n  NANOARROW_TIME_UNIT_SECOND = 0,\n  NANOARROW_TIME_UNIT_MILLI = 1,\n  NANOARROW_TIME_UNIT_MICRO = 2,\n  NANOARROW_TIME_UNIT_NANO = 3\n};\n\n/// \\brief Validation level enumerator\n/// \\ingroup nanoarrow-array\nenum ArrowValidationLevel {\n  /// \\brief Do not validate buffer sizes or content.\n  NANOARROW_VALIDATION_LEVEL_NONE = 0,\n\n  /// \\brief Validate buffer sizes that depend on array length but do not validate buffer\n  /// sizes that depend on buffer data access.\n  NANOARROW_VALIDATION_LEVEL_MINIMAL = 1,\n\n  /// \\brief Validate all buffer sizes, including those that require buffer data access,\n  /// but do not perform any checks that are O(1) along the length of the buffers.\n  NANOARROW_VALIDATION_LEVEL_DEFAULT = 2,\n\n  /// \\brief Validate all buffer sizes and all buffer content. This is useful in the\n  /// context of untrusted input or input that may have been corrupted in transit.\n  NANOARROW_VALIDATION_LEVEL_FULL = 3\n};\n\n/// \\brief Comparison level enumerator\n/// \\ingroup nanoarrow-utils\nenum ArrowCompareLevel {\n  /// \\brief Consider arrays equal if buffers contain identical content\n  /// and have identical offset, null count, and length. Note that this is\n  /// a much stricter check than logical equality, which would take into\n  /// account potentially different content of null slots, arrays with a\n  /// non-zero offset, and other considerations.\n  NANOARROW_COMPARE_IDENTICAL,\n};\n\n/// \\brief Get a string value of an enum ArrowTimeUnit value\n/// \\ingroup nanoarrow-utils\n///\n/// Returns NULL for invalid values for time_unit\nstatic inline const char* ArrowTimeUnitString(enum ArrowTimeUnit time_unit);\n\nstatic inline const char* ArrowTimeUnitString(enum ArrowTimeUnit time_unit) {\n  switch (time_unit) {\n    case NANOARROW_TIME_UNIT_SECOND:\n      return \"s\";\n    case NANOARROW_TIME_UNIT_MILLI:\n      return \"ms\";\n    case NANOARROW_TIME_UNIT_MICRO:\n      return \"us\";\n    case NANOARROW_TIME_UNIT_NANO:\n      return \"ns\";\n    default:\n      return NULL;\n  }\n}\n\n/// \\brief Functional types of buffers as described in the Arrow Columnar Specification\n/// \\ingroup nanoarrow-array-view\nenum ArrowBufferType {\n  NANOARROW_BUFFER_TYPE_NONE,\n  NANOARROW_BUFFER_TYPE_VALIDITY,\n  NANOARROW_BUFFER_TYPE_TYPE_ID,\n  NANOARROW_BUFFER_TYPE_UNION_OFFSET,\n  NANOARROW_BUFFER_TYPE_DATA_OFFSET,\n  NANOARROW_BUFFER_TYPE_DATA,\n  NANOARROW_BUFFER_TYPE_VARIADIC_DATA,\n  NANOARROW_BUFFER_TYPE_VARIADIC_SIZE,\n  NANOARROW_BUFFER_TYPE_VIEW_OFFSET,\n  NANOARROW_BUFFER_TYPE_SIZE,\n};\n\n/// \\brief The maximum number of fixed buffers in an ArrowArrayView or ArrowLayout\n/// \\ingroup nanoarrow-array-view\n#define NANOARROW_MAX_FIXED_BUFFERS 3\n\n/// \\brief An non-owning view of a string\n/// \\ingroup nanoarrow-utils\nstruct ArrowStringView {\n  /// \\brief A pointer to the start of the string\n  ///\n  /// If size_bytes is 0, this value may be NULL.\n  const char* data;\n\n  /// \\brief The size of the string in bytes,\n  ///\n  /// (Not including the null terminator.)\n  int64_t size_bytes;\n};\n\n/// \\brief Return a view of a const C string\n/// \\ingroup nanoarrow-utils\nstatic inline struct ArrowStringView ArrowCharView(const char* value);\n\nstatic inline struct ArrowStringView ArrowCharView(const char* value) {\n  struct ArrowStringView out;\n\n  out.data = value;\n  if (value) {\n    out.size_bytes = (int64_t)strlen(value);\n  } else {\n    out.size_bytes = 0;\n  }\n\n  return out;\n}\n\nunion ArrowBufferViewData {\n  const void* data;\n  const int8_t* as_int8;\n  const uint8_t* as_uint8;\n  const int16_t* as_int16;\n  const uint16_t* as_uint16;\n  const int32_t* as_int32;\n  const uint32_t* as_uint32;\n  const int64_t* as_int64;\n  const uint64_t* as_uint64;\n  const double* as_double;\n  const float* as_float;\n  const char* as_char;\n  const union ArrowBinaryView* as_binary_view;\n};\n\n/// \\brief An non-owning view of a buffer\n/// \\ingroup nanoarrow-utils\nstruct ArrowBufferView {\n  /// \\brief A pointer to the start of the buffer\n  ///\n  /// If size_bytes is 0, this value may be NULL.\n  union ArrowBufferViewData data;\n\n  /// \\brief The size of the buffer in bytes\n  int64_t size_bytes;\n};\n\n/// \\brief Array buffer allocation and deallocation\n/// \\ingroup nanoarrow-buffer\n///\n/// Container for allocate, reallocate, and free methods that can be used\n/// to customize allocation and deallocation of buffers when constructing\n/// an ArrowArray.\nstruct ArrowBufferAllocator {\n  /// \\brief Reallocate a buffer or return NULL if it cannot be reallocated\n  uint8_t* (*reallocate)(struct ArrowBufferAllocator* allocator, uint8_t* ptr,\n                         int64_t old_size, int64_t new_size);\n\n  /// \\brief Deallocate a buffer allocated by this allocator\n  void (*free)(struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t size);\n\n  /// \\brief Opaque data specific to the allocator\n  void* private_data;\n};\n\ntypedef void (*ArrowBufferDeallocatorCallback)(struct ArrowBufferAllocator* allocator,\n                                               uint8_t* ptr, int64_t size);\n\n/// \\brief An owning mutable view of a buffer\n/// \\ingroup nanoarrow-buffer\nstruct ArrowBuffer {\n  /// \\brief A pointer to the start of the buffer\n  ///\n  /// If capacity_bytes is 0, this value may be NULL.\n  uint8_t* data;\n\n  /// \\brief The size of the buffer in bytes\n  int64_t size_bytes;\n\n  /// \\brief The capacity of the buffer in bytes\n  int64_t capacity_bytes;\n\n  /// \\brief The allocator that will be used to reallocate and/or free the buffer\n  struct ArrowBufferAllocator allocator;\n};\n\n/// \\brief An owning mutable view of a bitmap\n/// \\ingroup nanoarrow-bitmap\nstruct ArrowBitmap {\n  /// \\brief An ArrowBuffer to hold the allocated memory\n  struct ArrowBuffer buffer;\n\n  /// \\brief The number of bits that have been appended to the bitmap\n  int64_t size_bits;\n};\n\n/// \\brief A description of an arrangement of buffers\n/// \\ingroup nanoarrow-utils\n///\n/// Contains the minimum amount of information required to\n/// calculate the size of each buffer in an ArrowArray knowing only\n/// the length and offset of the array.\nstruct ArrowLayout {\n  /// \\brief The function of each buffer\n  enum ArrowBufferType buffer_type[NANOARROW_MAX_FIXED_BUFFERS];\n\n  /// \\brief The data type of each buffer\n  enum ArrowType buffer_data_type[NANOARROW_MAX_FIXED_BUFFERS];\n\n  /// \\brief The size of an element each buffer or 0 if this size is variable or unknown\n  int64_t element_size_bits[NANOARROW_MAX_FIXED_BUFFERS];\n\n  /// \\brief The number of elements in the child array per element in this array for a\n  /// fixed-size list\n  int64_t child_size_elements;\n};\n\n/// \\brief A non-owning view of an ArrowArray\n/// \\ingroup nanoarrow-array-view\n///\n/// This data structure provides access to the values contained within\n/// an ArrowArray with fields provided in a more readily-extractible\n/// form. You can re-use an ArrowArrayView for multiple ArrowArrays\n/// with the same storage type, use it to represent a hypothetical\n/// ArrowArray that does not exist yet, or use it to validate the buffers\n/// of a future ArrowArray.\nstruct ArrowArrayView {\n  /// \\brief The underlying ArrowArray or NULL if it has not been set or\n  /// if the buffers in this ArrowArrayView are not backed by an ArrowArray.\n  const struct ArrowArray* array;\n\n  /// \\brief The number of elements from the physical start of the buffers.\n  int64_t offset;\n\n  /// \\brief The number of elements in this view.\n  int64_t length;\n\n  /// \\brief A cached null count or -1 to indicate that this value is unknown.\n  int64_t null_count;\n\n  /// \\brief The type used to store values in this array\n  ///\n  /// This type represents only the minimum required information to\n  /// extract values from the array buffers (e.g., for a Date32 array,\n  /// this value will be NANOARROW_TYPE_INT32). For dictionary-encoded\n  /// arrays, this will be the index type.\n  enum ArrowType storage_type;\n\n  /// \\brief The buffer types, strides, and sizes of this Array's buffers\n  struct ArrowLayout layout;\n\n  /// \\brief This Array's buffers as ArrowBufferView objects\n  struct ArrowBufferView buffer_views[NANOARROW_MAX_FIXED_BUFFERS];\n\n  /// \\brief The number of children of this view\n  int64_t n_children;\n\n  /// \\brief Pointers to views of this array's children\n  struct ArrowArrayView** children;\n\n  /// \\brief Pointer to a view of this array's dictionary\n  struct ArrowArrayView* dictionary;\n\n  /// \\brief Union type id to child index mapping\n  ///\n  /// If storage_type is a union type, a 256-byte ArrowMalloc()ed buffer\n  /// such that child_index == union_type_id_map[type_id] and\n  /// type_id == union_type_id_map[128 + child_index]. This value may be\n  /// NULL in the case where child_id == type_id.\n  int8_t* union_type_id_map;\n\n  /// \\brief Number of variadic buffers\n  int32_t n_variadic_buffers;\n\n  /// \\brief Pointers to variadic buffers of binary/string_view arrays\n  const void** variadic_buffers;\n\n  /// \\brief Size of each variadic buffer\n  int64_t* variadic_buffer_sizes;\n};\n\n// Used as the private data member for ArrowArrays allocated here and accessed\n// internally within inline ArrowArray* helpers.\nstruct ArrowArrayPrivateData {\n  // Holder for the validity buffer (or first buffer for union types, which are\n  // the only type whose first buffer is not a valdiity buffer)\n  struct ArrowBitmap bitmap;\n\n  // Holder for additional buffers as required\n  struct ArrowBuffer buffers[NANOARROW_MAX_FIXED_BUFFERS - 1];\n\n  // The array of pointers to buffers. This must be updated after a sequence\n  // of appends to synchronize its values with the actual buffer addresses\n  // (which may have been reallocated during that time)\n  const void** buffer_data;\n\n  // The storage data type, or NANOARROW_TYPE_UNINITIALIZED if unknown\n  enum ArrowType storage_type;\n\n  // The buffer arrangement for the storage type\n  struct ArrowLayout layout;\n\n  // Flag to indicate if there are non-sequence union type ids.\n  // In the future this could be replaced with a type id<->child mapping\n  // to support constructing unions in append mode where type_id != child_index\n  int8_t union_type_id_is_child_index;\n\n  // Number of variadic buffers for binary view types\n  int32_t n_variadic_buffers;\n\n  // Variadic buffers for binary view types\n  struct ArrowBuffer* variadic_buffers;\n\n  // The current offset used to build list views\n  int64_t list_view_offset;\n};\n\n/// \\brief A representation of an interval.\n/// \\ingroup nanoarrow-utils\nstruct ArrowInterval {\n  /// \\brief The type of interval being used\n  enum ArrowType type;\n  /// \\brief The number of months represented by the interval\n  int32_t months;\n  /// \\brief The number of days represented by the interval\n  int32_t days;\n  /// \\brief The number of ms represented by the interval\n  int32_t ms;\n  /// \\brief The number of ns represented by the interval\n  int64_t ns;\n};\n\n/// \\brief Zero initialize an Interval with a given unit\n/// \\ingroup nanoarrow-utils\nstatic inline void ArrowIntervalInit(struct ArrowInterval* interval,\n                                     enum ArrowType type) {\n  memset(interval, 0, sizeof(struct ArrowInterval));\n  interval->type = type;\n}\n\n/// \\brief A representation of a fixed-precision decimal number\n/// \\ingroup nanoarrow-utils\n///\n/// This structure should be initialized with ArrowDecimalInit() once and\n/// values set using ArrowDecimalSetInt(), ArrowDecimalSetBytes128(),\n/// or ArrowDecimalSetBytes256().\nstruct ArrowDecimal {\n  /// \\brief An array of 64-bit integers of n_words length defined in native-endian order.\n  /// For a 32-bit decimal value, index 0 will be a 32-bit integer value.\n  uint64_t words[4];\n\n  /// \\brief The number of significant digits this decimal number can represent\n  int32_t precision;\n\n  /// \\brief The number of digits after the decimal point. This can be negative.\n  int32_t scale;\n\n  /// \\brief The number of 64-bit words in the words array. For the special case of a\n  /// 32-bit decimal value, this will be 0.\n  int n_words;\n\n  /// \\brief Cached value used by the implementation\n  int high_word_index;\n\n  /// \\brief Cached value used by the implementation\n  int low_word_index;\n};\n\n/// \\brief Initialize a decimal with a given set of type parameters\n/// \\ingroup nanoarrow-utils\nstatic inline void ArrowDecimalInit(struct ArrowDecimal* decimal, int32_t bitwidth,\n                                    int32_t precision, int32_t scale) {\n  memset(decimal->words, 0, sizeof(decimal->words));\n  decimal->precision = precision;\n  decimal->scale = scale;\n  // n_words will be 0 for bitwidth == 32\n  decimal->n_words = (int)(bitwidth / 8 / sizeof(uint64_t));\n\n  if (_ArrowIsLittleEndian()) {\n    decimal->low_word_index = 0;\n    decimal->high_word_index = decimal->n_words > 0 ? decimal->n_words - 1 : 0;\n  } else {\n    decimal->low_word_index = decimal->n_words > 0 ? decimal->n_words - 1 : 0;\n    decimal->high_word_index = 0;\n  }\n}\n\n/// \\brief Get a signed integer value of a sufficiently small ArrowDecimal\n///\n/// This does not check if the decimal's precision sufficiently small to fit\n/// within the signed 64-bit integer range (A precision less than or equal\n/// to 18 is sufficiently small).\nstatic inline int64_t ArrowDecimalGetIntUnsafe(const struct ArrowDecimal* decimal) {\n  if (decimal->n_words == 0) {\n    int32_t value;\n    memcpy(&value, decimal->words, sizeof(int32_t));\n    return value;\n  }\n\n  return (int64_t)decimal->words[decimal->low_word_index];\n}\n\n/// \\brief Copy the bytes of this decimal into a sufficiently large buffer\n/// \\ingroup nanoarrow-utils\nstatic inline void ArrowDecimalGetBytes(const struct ArrowDecimal* decimal,\n                                        uint8_t* out) {\n  if (decimal->n_words == 0) {\n    memcpy(out, decimal->words, sizeof(int32_t));\n  } else {\n    memcpy(out, decimal->words, decimal->n_words * sizeof(uint64_t));\n  }\n}\n\n/// \\brief Returns 1 if the value represented by decimal is >= 0 or -1 otherwise\n/// \\ingroup nanoarrow-utils\nstatic inline int64_t ArrowDecimalSign(const struct ArrowDecimal* decimal) {\n  if (decimal->n_words == 0) {\n    return ArrowDecimalGetIntUnsafe(decimal) >= 0 ? 1 : -1;\n  } else {\n    return 1 | ((int64_t)(decimal->words[decimal->high_word_index]) >> 63);\n  }\n}\n\n/// \\brief Sets the integer value of this decimal\n/// \\ingroup nanoarrow-utils\nstatic inline void ArrowDecimalSetInt(struct ArrowDecimal* decimal, int64_t value) {\n  if (decimal->n_words == 0) {\n    int32_t value32 = (int32_t)value;\n    memcpy(decimal->words, &value32, sizeof(int32_t));\n    return;\n  }\n\n  if (value < 0) {\n    memset(decimal->words, 0xff, decimal->n_words * sizeof(uint64_t));\n  } else {\n    memset(decimal->words, 0, decimal->n_words * sizeof(uint64_t));\n  }\n\n  decimal->words[decimal->low_word_index] = value;\n}\n\n/// \\brief Negate the value of this decimal in place\n/// \\ingroup nanoarrow-utils\nstatic inline void ArrowDecimalNegate(struct ArrowDecimal* decimal) {\n  if (decimal->n_words == 0) {\n    int32_t value;\n    memcpy(&value, decimal->words, sizeof(int32_t));\n    value = -value;\n    memcpy(decimal->words, &value, sizeof(int32_t));\n    return;\n  }\n\n  uint64_t carry = 1;\n\n  if (decimal->low_word_index == 0) {\n    for (int i = 0; i < decimal->n_words; i++) {\n      uint64_t elem = decimal->words[i];\n      elem = ~elem + carry;\n      carry &= (elem == 0);\n      decimal->words[i] = elem;\n    }\n  } else {\n    for (int i = decimal->low_word_index; i >= 0; i--) {\n      uint64_t elem = decimal->words[i];\n      elem = ~elem + carry;\n      carry &= (elem == 0);\n      decimal->words[i] = elem;\n    }\n  }\n}\n\n/// \\brief Copy bytes from a buffer into this decimal\n/// \\ingroup nanoarrow-utils\nstatic inline void ArrowDecimalSetBytes(struct ArrowDecimal* decimal,\n                                        const uint8_t* value) {\n  if (decimal->n_words == 0) {\n    memcpy(decimal->words, value, sizeof(int32_t));\n  } else {\n    memcpy(decimal->words, value, decimal->n_words * sizeof(uint64_t));\n  }\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#ifndef NANOARROW_H_INCLUDED\n#define NANOARROW_H_INCLUDED\n\n#include <stddef.h>\n#include <stdint.h>\n#include <stdlib.h>\n\n\n\n// If using CMake, optionally pass -DNANOARROW_NAMESPACE=MyNamespace which will set this\n// define in nanoarrow_config.h. If not, you can optionally #define NANOARROW_NAMESPACE\n// MyNamespace here.\n\n// This section remaps the non-prefixed symbols to the prefixed symbols so that\n// code written against this build can be used independent of the value of\n// NANOARROW_NAMESPACE.\n#ifdef NANOARROW_NAMESPACE\n#define NANOARROW_CAT(A, B) A##B\n#define NANOARROW_SYMBOL(A, B) NANOARROW_CAT(A, B)\n\n#define ArrowNanoarrowVersion NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowNanoarrowVersion)\n#define ArrowNanoarrowVersionInt \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowNanoarrowVersionInt)\n#define ArrowMalloc NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMalloc)\n#define ArrowRealloc NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowRealloc)\n#define ArrowFree NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowFree)\n#define ArrowBufferAllocatorDefault \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowBufferAllocatorDefault)\n#define ArrowBufferDeallocator \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowBufferDeallocator)\n#define ArrowErrorSet NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowErrorSet)\n#define ArrowLayoutInit NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowLayoutInit)\n#define ArrowDecimalSetDigits NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowDecimalSetDigits)\n#define ArrowDecimalAppendDigitsToBuffer \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowDecimalAppendDigitsToBuffer)\n#define ArrowDecimalAppendStringToBuffer \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowDecimalAppendStringToBuffer)\n#define ArrowSchemaInit NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaInit)\n#define ArrowSchemaInitFromType \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaInitFromType)\n#define ArrowSchemaSetType NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetType)\n#define ArrowSchemaSetTypeStruct \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetTypeStruct)\n#define ArrowSchemaSetTypeFixedSize \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetTypeFixedSize)\n#define ArrowSchemaSetTypeDecimal \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetTypeDecimal)\n#define ArrowSchemaSetTypeRunEndEncoded \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetTypeRunEndEncoded)\n#define ArrowSchemaSetTypeDateTime \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetTypeDateTime)\n#define ArrowSchemaSetTypeUnion \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetTypeUnion)\n#define ArrowSchemaDeepCopy NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaDeepCopy)\n#define ArrowSchemaSetFormat NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetFormat)\n#define ArrowSchemaSetName NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetName)\n#define ArrowSchemaSetMetadata \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaSetMetadata)\n#define ArrowSchemaAllocateChildren \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaAllocateChildren)\n#define ArrowSchemaAllocateDictionary \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaAllocateDictionary)\n#define ArrowMetadataReaderInit \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataReaderInit)\n#define ArrowMetadataReaderRead \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataReaderRead)\n#define ArrowMetadataSizeOf NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataSizeOf)\n#define ArrowMetadataHasKey NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataHasKey)\n#define ArrowMetadataGetValue NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataGetValue)\n#define ArrowMetadataBuilderInit \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataBuilderInit)\n#define ArrowMetadataBuilderAppend \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataBuilderAppend)\n#define ArrowMetadataBuilderSet \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataBuilderSet)\n#define ArrowMetadataBuilderRemove \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowMetadataBuilderRemove)\n#define ArrowSchemaViewInit NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaViewInit)\n#define ArrowSchemaToString NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowSchemaToString)\n#define ArrowArrayInitFromType \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayInitFromType)\n#define ArrowArrayInitFromSchema \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayInitFromSchema)\n#define ArrowArrayInitFromArrayView \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayInitFromArrayView)\n#define ArrowArrayInitFromArrayView \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayInitFromArrayView)\n#define ArrowArrayAllocateChildren \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayAllocateChildren)\n#define ArrowArrayAllocateDictionary \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayAllocateDictionary)\n#define ArrowArraySetValidityBitmap \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArraySetValidityBitmap)\n#define ArrowArraySetBuffer NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArraySetBuffer)\n#define ArrowArrayReserve NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayReserve)\n#define ArrowArrayFinishBuilding \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayFinishBuilding)\n#define ArrowArrayFinishBuildingDefault \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayFinishBuildingDefault)\n#define ArrowArrayViewInitFromType \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewInitFromType)\n#define ArrowArrayViewInitFromSchema \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewInitFromSchema)\n#define ArrowArrayViewAllocateChildren \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewAllocateChildren)\n#define ArrowArrayViewAllocateDictionary \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewAllocateDictionary)\n#define ArrowArrayViewSetLength \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewSetLength)\n#define ArrowArrayViewSetArray \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewSetArray)\n#define ArrowArrayViewSetArrayMinimal \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewSetArrayMinimal)\n#define ArrowArrayViewValidate \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewValidate)\n#define ArrowArrayViewCompare NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewCompare)\n#define ArrowArrayViewReset NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowArrayViewReset)\n#define ArrowBasicArrayStreamInit \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowBasicArrayStreamInit)\n#define ArrowBasicArrayStreamSetArray \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowBasicArrayStreamSetArray)\n#define ArrowBasicArrayStreamValidate \\\n  NANOARROW_SYMBOL(NANOARROW_NAMESPACE, ArrowBasicArrayStreamValidate)\n\n#endif\n\n#if (defined _WIN32 || defined __CYGWIN__) && defined(NANOARROW_BUILD_DLL)\n#if defined(NANOARROW_EXPORT_DLL)\n#define NANOARROW_DLL __declspec(dllexport)\n#else\n#define NANOARROW_DLL __declspec(dllimport)\n#endif  // defined(NANOARROW_EXPORT_DLL)\n#elif !defined(NANOARROW_DLL)\n#if defined(__GNUC__) && __GNUC__ >= 4\n#define NANOARROW_DLL __attribute__((visibility(\"default\")))\n#else\n#define NANOARROW_DLL\n#endif  // __GNUC__ >= 4\n#endif\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/// \\defgroup nanoarrow Nanoarrow C library\n///\n/// Except where noted, objects are not thread-safe and clients should\n/// take care to serialize accesses to methods.\n///\n/// Because this library is intended to be vendored, it provides full type\n/// definitions and encourages clients to stack or statically allocate\n/// where convenient.\n\n/// \\defgroup nanoarrow-malloc Memory management\n///\n/// Non-buffer members of a struct ArrowSchema and struct ArrowArray\n/// must be allocated using ArrowMalloc() or ArrowRealloc() and freed\n/// using ArrowFree() for schemas and arrays allocated here. Buffer members\n/// are allocated using an ArrowBufferAllocator.\n///\n/// @{\n\n/// \\brief Allocate like malloc()\nNANOARROW_DLL void* ArrowMalloc(int64_t size);\n\n/// \\brief Reallocate like realloc()\nNANOARROW_DLL void* ArrowRealloc(void* ptr, int64_t size);\n\n/// \\brief Free a pointer allocated using ArrowMalloc() or ArrowRealloc().\nNANOARROW_DLL void ArrowFree(void* ptr);\n\n/// \\brief Return the default allocator\n///\n/// The default allocator uses ArrowMalloc(), ArrowRealloc(), and\n/// ArrowFree().\nNANOARROW_DLL struct ArrowBufferAllocator ArrowBufferAllocatorDefault(void);\n\n/// \\brief Create a custom deallocator\n///\n/// Creates a buffer allocator with only a free method that can be used to\n/// attach a custom deallocator to an ArrowBuffer. This may be used to\n/// avoid copying an existing buffer that was not allocated using the\n/// infrastructure provided here (e.g., by an R or Python object).\nNANOARROW_DLL struct ArrowBufferAllocator ArrowBufferDeallocator(\n    ArrowBufferDeallocatorCallback, void* private_data);\n\n/// @}\n\n/// \\brief Move the contents of an src ArrowSchema into dst and set src->release to NULL\n/// \\ingroup nanoarrow-arrow-cdata\nstatic inline void ArrowSchemaMove(struct ArrowSchema* src, struct ArrowSchema* dst);\n\n/// \\brief Call the release callback of an ArrowSchema\n/// \\ingroup nanoarrow-arrow-cdata\nstatic inline void ArrowSchemaRelease(struct ArrowSchema* schema);\n\n/// \\brief Move the contents of an src ArrowArray into dst and set src->release to NULL\n/// \\ingroup nanoarrow-arrow-cdata\nstatic inline void ArrowArrayMove(struct ArrowArray* src, struct ArrowArray* dst);\n\n/// \\brief Call the release callback of an ArrowArray\nstatic inline void ArrowArrayRelease(struct ArrowArray* array);\n\n/// \\brief Move the contents of an src ArrowArrayStream into dst and set src->release to\n/// NULL \\ingroup nanoarrow-arrow-cdata\nstatic inline void ArrowArrayStreamMove(struct ArrowArrayStream* src,\n                                        struct ArrowArrayStream* dst);\n\n/// \\brief Call the get_schema callback of an ArrowArrayStream\n/// \\ingroup nanoarrow-arrow-cdata\n///\n/// Unlike the get_schema callback, this wrapper checks the return code\n/// and propagates the error reported by get_last_error into error. This\n/// makes it significantly less verbose to iterate over array streams\n/// using NANOARROW_RETURN_NOT_OK()-style error handling.\nstatic inline ArrowErrorCode ArrowArrayStreamGetSchema(\n    struct ArrowArrayStream* array_stream, struct ArrowSchema* out,\n    struct ArrowError* error);\n\n/// \\brief Call the get_next callback of an ArrowArrayStream\n/// \\ingroup nanoarrow-arrow-cdata\n///\n/// Unlike the get_next callback, this wrapper checks the return code\n/// and propagates the error reported by get_last_error into error. This\n/// makes it significantly less verbose to iterate over array streams\n/// using NANOARROW_RETURN_NOT_OK()-style error handling.\nstatic inline ArrowErrorCode ArrowArrayStreamGetNext(\n    struct ArrowArrayStream* array_stream, struct ArrowArray* out,\n    struct ArrowError* error);\n\n/// \\brief Call the get_last_error callback of an ArrowArrayStream\n/// \\ingroup nanoarrow-arrow-cdata\n///\n/// Unlike the get_last_error callback, this function never returns NULL (i.e.,\n/// its result is safe to use in printf-style error formatters). Null values\n/// from the original callback are reported as\n/// \"<get_last_error() returned NULL>\".\nstatic inline const char* ArrowArrayStreamGetLastError(\n    struct ArrowArrayStream* array_stream);\n\n/// \\brief Call the release callback of an ArrowArrayStream\nstatic inline void ArrowArrayStreamRelease(struct ArrowArrayStream* array_stream);\n\n/// \\defgroup nanoarrow-errors Error handling\n///\n/// Functions generally return an errno-compatible error code; functions that\n/// need to communicate more verbose error information accept a pointer\n/// to an ArrowError. This can be stack or statically allocated. The\n/// content of the message is undefined unless an error code has been\n/// returned. If a nanoarrow function is passed a non-null ArrowError pointer, the\n/// ArrowError pointed to by the argument will be propagated with a\n/// null-terminated error message. It is safe to pass a NULL ArrowError anywhere\n/// in the nanoarrow API.\n///\n/// Except where documented, it is generally not safe to continue after a\n/// function has returned a non-zero ArrowErrorCode. The NANOARROW_RETURN_NOT_OK and\n/// NANOARROW_ASSERT_OK macros are provided to help propagate errors. C++ clients can use\n/// the helpers provided in the nanoarrow.hpp header to facilitate using C++ idioms\n/// for memory management and error propgagtion.\n///\n/// @{\n\n/// \\brief Set the contents of an error using printf syntax.\n///\n/// If error is NULL, this function does nothing and returns NANOARROW_OK.\nNANOARROW_DLL NANOARROW_CHECK_PRINTF_ATTRIBUTE int ArrowErrorSet(struct ArrowError* error,\n                                                                 const char* fmt, ...);\n\n/// @}\n\n/// \\defgroup nanoarrow-utils Utility data structures\n///\n/// @{\n\n/// \\brief Return a version string in the form \"major.minor.patch\"\nNANOARROW_DLL const char* ArrowNanoarrowVersion(void);\n\n/// \\brief Return an integer that can be used to compare versions sequentially\nNANOARROW_DLL int ArrowNanoarrowVersionInt(void);\n\n/// \\brief Initialize a description of buffer arrangements from a storage type\nNANOARROW_DLL void ArrowLayoutInit(struct ArrowLayout* layout,\n                                   enum ArrowType storage_type);\n\n/// \\brief Create a string view from a null-terminated string\nstatic inline struct ArrowStringView ArrowCharView(const char* value);\n\n/// \\brief Sets the integer value of an ArrowDecimal from a string\nNANOARROW_DLL ArrowErrorCode ArrowDecimalSetDigits(struct ArrowDecimal* decimal,\n                                                   struct ArrowStringView value);\n\n/// \\brief Get the integer value of an ArrowDecimal as string\nNANOARROW_DLL ArrowErrorCode ArrowDecimalAppendDigitsToBuffer(\n    const struct ArrowDecimal* decimal, struct ArrowBuffer* buffer);\n\n/// \\brief Get the decimal value of an ArrowDecimal as a string\nNANOARROW_DLL ArrowErrorCode ArrowDecimalAppendStringToBuffer(\n    const struct ArrowDecimal* decimal, struct ArrowBuffer* buffer);\n\n/// \\brief Get the half float value of a float\nstatic inline uint16_t ArrowFloatToHalfFloat(float value);\n\n/// \\brief Get the float value of a half float\nstatic inline float ArrowHalfFloatToFloat(uint16_t value);\n\n/// \\brief Resolve a chunk index from increasing int64_t offsets\n///\n/// Given a buffer of increasing int64_t offsets that begin with 0 (e.g., offset buffer\n/// of a large type, run ends of a chunked array implementation), resolve a value v\n/// where lo <= v < hi such that offsets[v] <= index < offsets[v + 1].\nstatic inline int64_t ArrowResolveChunk64(int64_t index, const int64_t* offsets,\n                                          int64_t lo, int64_t hi);\n\n/// @}\n\n/// \\defgroup nanoarrow-schema Creating schemas\n///\n/// These functions allocate, copy, and destroy ArrowSchema structures\n///\n/// @{\n\n/// \\brief Initialize an ArrowSchema\n///\n/// Initializes the fields and release callback of schema_out. Caller\n/// is responsible for calling the schema->release callback if\n/// NANOARROW_OK is returned.\nNANOARROW_DLL void ArrowSchemaInit(struct ArrowSchema* schema);\n\n/// \\brief Initialize an ArrowSchema from an ArrowType\n///\n/// A convenience constructor for that calls ArrowSchemaInit() and\n/// ArrowSchemaSetType() for the common case of constructing an\n/// unparameterized type. The caller is responsible for calling the schema->release\n/// callback if NANOARROW_OK is returned.\nNANOARROW_DLL ArrowErrorCode ArrowSchemaInitFromType(struct ArrowSchema* schema,\n                                                     enum ArrowType type);\n\n/// \\brief Get a human-readable summary of a Schema\n///\n/// Writes a summary of an ArrowSchema to out (up to n - 1 characters)\n/// and returns the number of characters required for the output if\n/// n were sufficiently large. If recursive is non-zero, the result will\n/// also include children.\nNANOARROW_DLL int64_t ArrowSchemaToString(const struct ArrowSchema* schema, char* out,\n                                          int64_t n, char recursive);\n\n/// \\brief Set the format field of a schema from an ArrowType\n///\n/// Initializes the fields and release callback of schema_out. For\n/// NANOARROW_TYPE_LIST, NANOARROW_TYPE_LARGE_LIST, and\n/// NANOARROW_TYPE_MAP, the appropriate number of children are\n/// allocated, initialized, and named; however, the caller must\n/// ArrowSchemaSetType() on the preinitialized children. Schema must have been initialized\n/// using ArrowSchemaInit() or ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetType(struct ArrowSchema* schema,\n                                                enum ArrowType type);\n\n/// \\brief Set the format field and initialize children of a struct schema\n///\n/// The specified number of children are initialized; however, the caller is responsible\n/// for calling ArrowSchemaSetType() and ArrowSchemaSetName() on each child.\n/// Schema must have been initialized using ArrowSchemaInit() or ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetTypeStruct(struct ArrowSchema* schema,\n                                                      int64_t n_children);\n\n/// \\brief Set the format field of a fixed-size schema\n///\n/// Returns EINVAL for fixed_size <= 0 or for type that is not\n/// NANOARROW_TYPE_FIXED_SIZE_BINARY or NANOARROW_TYPE_FIXED_SIZE_LIST.\n/// For NANOARROW_TYPE_FIXED_SIZE_LIST, the appropriate number of children are\n/// allocated, initialized, and named; however, the caller must\n/// ArrowSchemaSetType() the first child. Schema must have been initialized using\n/// ArrowSchemaInit() or ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetTypeFixedSize(struct ArrowSchema* schema,\n                                                         enum ArrowType type,\n                                                         int32_t fixed_size);\n\n/// \\brief Set the format field of a decimal schema\n///\n/// Returns EINVAL for scale <= 0 or for type that is not\n/// NANOARROW_TYPE_DECIMAL32, NANOARROW_TYPE_DECIMAL64, NANOARROW_TYPE_DECIMAL128 or\n/// NANOARROW_TYPE_DECIMAL256. Schema must have been initialized using\n/// ArrowSchemaInit() or ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetTypeDecimal(struct ArrowSchema* schema,\n                                                       enum ArrowType type,\n                                                       int32_t decimal_precision,\n                                                       int32_t decimal_scale);\n\n/// \\brief Set the format field of a run-end encoded schema\n///\n/// Returns EINVAL for run_end_type that is not\n/// NANOARROW_TYPE_INT16, NANOARROW_TYPE_INT32 or NANOARROW_TYPE_INT64.\n/// Schema must have been initialized using ArrowSchemaInit() or ArrowSchemaDeepCopy().\n/// The caller must call `ArrowSchemaSetTypeXXX(schema->children[1])` to\n/// set the value type. Note that when building arrays using the `ArrowArrayAppendXXX()`\n/// functions, the run-end encoded array's logical length must be updated manually.\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetTypeRunEndEncoded(struct ArrowSchema* schema,\n                                                             enum ArrowType run_end_type);\n\n/// \\brief Set the format field of a time, timestamp, or duration schema\n///\n/// Returns EINVAL for type that is not\n/// NANOARROW_TYPE_TIME32, NANOARROW_TYPE_TIME64,\n/// NANOARROW_TYPE_TIMESTAMP, or NANOARROW_TYPE_DURATION. The\n/// timezone parameter must be NULL for a non-timestamp type. Schema must have been\n/// initialized using ArrowSchemaInit() or ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetTypeDateTime(struct ArrowSchema* schema,\n                                                        enum ArrowType type,\n                                                        enum ArrowTimeUnit time_unit,\n                                                        const char* timezone);\n\n/// \\brief Set the format field of a union schema\n///\n/// Returns EINVAL for a type that is not NANOARROW_TYPE_DENSE_UNION\n/// or NANOARROW_TYPE_SPARSE_UNION. The specified number of children are\n/// allocated, and initialized.\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetTypeUnion(struct ArrowSchema* schema,\n                                                     enum ArrowType type,\n                                                     int64_t n_children);\n\n/// \\brief Make a (recursive) copy of a schema\n///\n/// Allocates and copies fields of schema into schema_out.\nNANOARROW_DLL ArrowErrorCode ArrowSchemaDeepCopy(const struct ArrowSchema* schema,\n                                                 struct ArrowSchema* schema_out);\n\n/// \\brief Copy format into schema->format\n///\n/// schema must have been allocated using ArrowSchemaInitFromType() or\n/// ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetFormat(struct ArrowSchema* schema,\n                                                  const char* format);\n\n/// \\brief Copy name into schema->name\n///\n/// schema must have been allocated using ArrowSchemaInitFromType() or\n/// ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetName(struct ArrowSchema* schema,\n                                                const char* name);\n\n/// \\brief Copy metadata into schema->metadata\n///\n/// schema must have been allocated using ArrowSchemaInitFromType() or\n/// ArrowSchemaDeepCopy.\nNANOARROW_DLL ArrowErrorCode ArrowSchemaSetMetadata(struct ArrowSchema* schema,\n                                                    const char* metadata);\n\n/// \\brief Allocate the schema->children array\n///\n/// Includes the memory for each child struct ArrowSchema.\n/// schema must have been allocated using ArrowSchemaInitFromType() or\n/// ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaAllocateChildren(struct ArrowSchema* schema,\n                                                         int64_t n_children);\n\n/// \\brief Allocate the schema->dictionary member\n///\n/// schema must have been allocated using ArrowSchemaInitFromType() or\n/// ArrowSchemaDeepCopy().\nNANOARROW_DLL ArrowErrorCode ArrowSchemaAllocateDictionary(struct ArrowSchema* schema);\n\n/// @}\n\n/// \\defgroup nanoarrow-metadata Create, read, and modify schema metadata\n///\n/// @{\n\n/// \\brief Reader for key/value pairs in schema metadata\n///\n/// The ArrowMetadataReader does not own any data and is only valid\n/// for the lifetime of the underlying metadata pointer.\nstruct ArrowMetadataReader {\n  /// \\brief A metadata string from a schema->metadata field.\n  const char* metadata;\n\n  /// \\brief The current offset into the metadata string\n  int64_t offset;\n\n  /// \\brief The number of remaining keys\n  int32_t remaining_keys;\n};\n\n/// \\brief Initialize an ArrowMetadataReader\nNANOARROW_DLL ArrowErrorCode ArrowMetadataReaderInit(struct ArrowMetadataReader* reader,\n                                                     const char* metadata);\n\n/// \\brief Read the next key/value pair from an ArrowMetadataReader\nNANOARROW_DLL ArrowErrorCode ArrowMetadataReaderRead(struct ArrowMetadataReader* reader,\n                                                     struct ArrowStringView* key_out,\n                                                     struct ArrowStringView* value_out);\n\n/// \\brief The number of bytes in in a key/value metadata string\nNANOARROW_DLL int64_t ArrowMetadataSizeOf(const char* metadata);\n\n/// \\brief Check for a key in schema metadata\nNANOARROW_DLL char ArrowMetadataHasKey(const char* metadata, struct ArrowStringView key);\n\n/// \\brief Extract a value from schema metadata\n///\n/// If key does not exist in metadata, value_out is unmodified\nNANOARROW_DLL ArrowErrorCode ArrowMetadataGetValue(const char* metadata,\n                                                   struct ArrowStringView key,\n                                                   struct ArrowStringView* value_out);\n\n/// \\brief Initialize a builder for schema metadata from key/value pairs\n///\n/// metadata can be an existing metadata string or NULL to initialize\n/// an empty metadata string.\nNANOARROW_DLL ArrowErrorCode ArrowMetadataBuilderInit(struct ArrowBuffer* buffer,\n                                                      const char* metadata);\n\n/// \\brief Append a key/value pair to a buffer containing serialized metadata\nNANOARROW_DLL ArrowErrorCode ArrowMetadataBuilderAppend(struct ArrowBuffer* buffer,\n                                                        struct ArrowStringView key,\n                                                        struct ArrowStringView value);\n\n/// \\brief Set a key/value pair to a buffer containing serialized metadata\n///\n/// Ensures that the only entry for key in the metadata is set to value.\n/// This function maintains the existing position of (the first instance of)\n/// key if present in the data.\nNANOARROW_DLL ArrowErrorCode ArrowMetadataBuilderSet(struct ArrowBuffer* buffer,\n                                                     struct ArrowStringView key,\n                                                     struct ArrowStringView value);\n\n/// \\brief Remove a key from a buffer containing serialized metadata\nNANOARROW_DLL ArrowErrorCode ArrowMetadataBuilderRemove(struct ArrowBuffer* buffer,\n                                                        struct ArrowStringView key);\n\n/// @}\n\n/// \\defgroup nanoarrow-schema-view Reading schemas\n///\n/// @{\n\n/// \\brief A non-owning view of a parsed ArrowSchema\n///\n/// Contains more readily extractable values than a raw ArrowSchema.\n/// Clients can stack or statically allocate this structure but are\n/// encouraged to use the provided getters to ensure forward\n/// compatibility.\nstruct ArrowSchemaView {\n  /// \\brief A pointer to the schema represented by this view\n  const struct ArrowSchema* schema;\n\n  /// \\brief The data type represented by the schema\n  ///\n  /// This value may be NANOARROW_TYPE_DICTIONARY if the schema has a\n  /// non-null dictionary member; datetime types are valid values.\n  /// This value will never be NANOARROW_TYPE_EXTENSION (see\n  /// extension_name and/or extension_metadata to check for\n  /// an extension type).\n  enum ArrowType type;\n\n  /// \\brief The storage data type represented by the schema\n  ///\n  /// This value will never be NANOARROW_TYPE_DICTIONARY, NANOARROW_TYPE_EXTENSION\n  /// or any datetime type. This value represents only the type required to\n  /// interpret the buffers in the array.\n  enum ArrowType storage_type;\n\n  /// \\brief The storage layout represented by the schema\n  struct ArrowLayout layout;\n\n  /// \\brief The extension type name if it exists\n  ///\n  /// If the ARROW:extension:name key is present in schema.metadata,\n  /// extension_name.data will be non-NULL.\n  struct ArrowStringView extension_name;\n\n  /// \\brief The extension type metadata if it exists\n  ///\n  /// If the ARROW:extension:metadata key is present in schema.metadata,\n  /// extension_metadata.data will be non-NULL.\n  struct ArrowStringView extension_metadata;\n\n  /// \\brief Format fixed size parameter\n  ///\n  /// This value is set when parsing a fixed-size binary or fixed-size\n  /// list schema; this value is undefined for other types. For a\n  /// fixed-size binary schema this value is in bytes; for a fixed-size\n  /// list schema this value refers to the number of child elements for\n  /// each element of the parent.\n  int32_t fixed_size;\n\n  /// \\brief Decimal bitwidth\n  ///\n  /// This value is set when parsing a decimal type schema;\n  /// this value is undefined for other types.\n  int32_t decimal_bitwidth;\n\n  /// \\brief Decimal precision\n  ///\n  /// This value is set when parsing a decimal type schema;\n  /// this value is undefined for other types.\n  int32_t decimal_precision;\n\n  /// \\brief Decimal scale\n  ///\n  /// This value is set when parsing a decimal type schema;\n  /// this value is undefined for other types.\n  int32_t decimal_scale;\n\n  /// \\brief Format time unit parameter\n  ///\n  /// This value is set when parsing a date/time type. The value is\n  /// undefined for other types.\n  enum ArrowTimeUnit time_unit;\n\n  /// \\brief Format timezone parameter\n  ///\n  /// This value is set when parsing a timestamp type and represents\n  /// the timezone format parameter. This value points to\n  /// data within the schema and is undefined for other types.\n  const char* timezone;\n\n  /// \\brief Union type ids parameter\n  ///\n  /// This value is set when parsing a union type and represents\n  /// type ids parameter. This value points to\n  /// data within the schema and is undefined for other types.\n  const char* union_type_ids;\n};\n\n/// \\brief Initialize an ArrowSchemaView\nNANOARROW_DLL ArrowErrorCode ArrowSchemaViewInit(struct ArrowSchemaView* schema_view,\n                                                 const struct ArrowSchema* schema,\n                                                 struct ArrowError* error);\n\n/// @}\n\n/// \\defgroup nanoarrow-buffer Owning, growable buffers\n///\n/// @{\n\n/// \\brief Initialize an ArrowBuffer\n///\n/// Initialize a buffer with a NULL, zero-size buffer using the default\n/// buffer allocator.\nstatic inline void ArrowBufferInit(struct ArrowBuffer* buffer);\n\n/// \\brief Set a newly-initialized buffer's allocator\n///\n/// Returns EINVAL if the buffer has already been allocated.\nstatic inline ArrowErrorCode ArrowBufferSetAllocator(\n    struct ArrowBuffer* buffer, struct ArrowBufferAllocator allocator);\n\n/// \\brief Reset an ArrowBuffer\n///\n/// Releases the buffer using the allocator's free method if\n/// the buffer's data member is non-null, sets the data member\n/// to NULL, and sets the buffer's size and capacity to 0.\nstatic inline void ArrowBufferReset(struct ArrowBuffer* buffer);\n\n/// \\brief Move an ArrowBuffer\n///\n/// Transfers the buffer data and lifecycle management to another\n/// address and resets buffer.\nstatic inline void ArrowBufferMove(struct ArrowBuffer* src, struct ArrowBuffer* dst);\n\n/// \\brief Grow or shrink a buffer to a given size\n///\n/// When shrinking the size of the buffer, the buffer is only reallocated\n/// if shrink_to_fit is non-zero.\nstatic inline ArrowErrorCode ArrowBufferResize(struct ArrowBuffer* buffer,\n                                               int64_t new_size_bytes,\n                                               char shrink_to_fit);\n\n/// \\brief Ensure a buffer has at least a given additional capacity\n///\n/// Ensures that the buffer has space to append at least\n/// additional_size_bytes, overallocating when required.\nstatic inline ArrowErrorCode ArrowBufferReserve(struct ArrowBuffer* buffer,\n                                                int64_t additional_size_bytes);\n\n/// \\brief Write data to buffer and increment the buffer size\n///\n/// This function does not check that buffer has the required capacity\nstatic inline void ArrowBufferAppendUnsafe(struct ArrowBuffer* buffer, const void* data,\n                                           int64_t size_bytes);\n\n/// \\brief Write data to buffer and increment the buffer size\n///\n/// This function writes and ensures that the buffer has the required capacity,\n/// possibly by reallocating the buffer. Like ArrowBufferReserve, this will\n/// overallocate when reallocation is required.\nstatic inline ArrowErrorCode ArrowBufferAppend(struct ArrowBuffer* buffer,\n                                               const void* data, int64_t size_bytes);\n\n/// \\brief Write fill to buffer and increment the buffer size\n///\n/// This function writes the specified number of fill bytes and\n/// ensures that the buffer has the required capacity,\nstatic inline ArrowErrorCode ArrowBufferAppendFill(struct ArrowBuffer* buffer,\n                                                   uint8_t value, int64_t size_bytes);\n\n/// \\brief Write an 8-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendInt8(struct ArrowBuffer* buffer,\n                                                   int8_t value);\n\n/// \\brief Write an unsigned 8-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendUInt8(struct ArrowBuffer* buffer,\n                                                    uint8_t value);\n\n/// \\brief Write a 16-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendInt16(struct ArrowBuffer* buffer,\n                                                    int16_t value);\n\n/// \\brief Write an unsigned 16-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendUInt16(struct ArrowBuffer* buffer,\n                                                     uint16_t value);\n\n/// \\brief Write a 32-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendInt32(struct ArrowBuffer* buffer,\n                                                    int32_t value);\n\n/// \\brief Write an unsigned 32-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendUInt32(struct ArrowBuffer* buffer,\n                                                     uint32_t value);\n\n/// \\brief Write a 64-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendInt64(struct ArrowBuffer* buffer,\n                                                    int64_t value);\n\n/// \\brief Write an unsigned 64-bit integer to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendUInt64(struct ArrowBuffer* buffer,\n                                                     uint64_t value);\n\n/// \\brief Write a double to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendDouble(struct ArrowBuffer* buffer,\n                                                     double value);\n\n/// \\brief Write a float to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendFloat(struct ArrowBuffer* buffer,\n                                                    float value);\n\n/// \\brief Write an ArrowStringView to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendStringView(struct ArrowBuffer* buffer,\n                                                         struct ArrowStringView value);\n\n/// \\brief Write an ArrowBufferView to a buffer\nstatic inline ArrowErrorCode ArrowBufferAppendBufferView(struct ArrowBuffer* buffer,\n                                                         struct ArrowBufferView value);\n\n/// @}\n\n/// \\defgroup nanoarrow-bitmap Bitmap utilities\n///\n/// @{\n\n/// \\brief Extract a boolean value from a bitmap\nstatic inline int8_t ArrowBitGet(const uint8_t* bits, int64_t i);\n\n/// \\brief Set a boolean value to a bitmap to true\nstatic inline void ArrowBitSet(uint8_t* bits, int64_t i);\n\n/// \\brief Set a boolean value to a bitmap to false\nstatic inline void ArrowBitClear(uint8_t* bits, int64_t i);\n\n/// \\brief Set a boolean value to a bitmap\nstatic inline void ArrowBitSetTo(uint8_t* bits, int64_t i, uint8_t value);\n\n/// \\brief Set a boolean value to a range in a bitmap\nstatic inline void ArrowBitsSetTo(uint8_t* bits, int64_t start_offset, int64_t length,\n                                  uint8_t bits_are_set);\n\n/// \\brief Count true values in a bitmap\nstatic inline int64_t ArrowBitCountSet(const uint8_t* bits, int64_t i_from, int64_t i_to);\n\n/// \\brief Extract int8 boolean values from a range in a bitmap\nstatic inline void ArrowBitsUnpackInt8(const uint8_t* bits, int64_t start_offset,\n                                       int64_t length, int8_t* out);\n\n/// \\brief Extract int32 boolean values from a range in a bitmap\nstatic inline void ArrowBitsUnpackInt32(const uint8_t* bits, int64_t start_offset,\n                                        int64_t length, int32_t* out);\n\n/// \\brief Initialize an ArrowBitmap\n///\n/// Initialize the builder's buffer, empty its cache, and reset the size to zero\nstatic inline void ArrowBitmapInit(struct ArrowBitmap* bitmap);\n\n/// \\brief Move an ArrowBitmap\n///\n/// Transfers the underlying buffer data and lifecycle management to another\n/// address and resets the bitmap.\nstatic inline void ArrowBitmapMove(struct ArrowBitmap* src, struct ArrowBitmap* dst);\n\n/// \\brief Ensure a bitmap builder has at least a given additional capacity\n///\n/// Ensures that the buffer has space to append at least\n/// additional_size_bits, overallocating when required.\nstatic inline ArrowErrorCode ArrowBitmapReserve(struct ArrowBitmap* bitmap,\n                                                int64_t additional_size_bits);\n\n/// \\brief Grow or shrink a bitmap to a given size\n///\n/// When shrinking the size of the bitmap, the bitmap is only reallocated\n/// if shrink_to_fit is non-zero.\nstatic inline ArrowErrorCode ArrowBitmapResize(struct ArrowBitmap* bitmap,\n                                               int64_t new_size_bits, char shrink_to_fit);\n\n/// \\brief Reserve space for and append zero or more of the same boolean value to a bitmap\nstatic inline ArrowErrorCode ArrowBitmapAppend(struct ArrowBitmap* bitmap,\n                                               uint8_t bits_are_set, int64_t length);\n\n/// \\brief Append zero or more of the same boolean value to a bitmap\nstatic inline void ArrowBitmapAppendUnsafe(struct ArrowBitmap* bitmap,\n                                           uint8_t bits_are_set, int64_t length);\n\n/// \\brief Append boolean values encoded as int8_t to a bitmap\n///\n/// The values must all be 0 or 1.\nstatic inline void ArrowBitmapAppendInt8Unsafe(struct ArrowBitmap* bitmap,\n                                               const int8_t* values, int64_t n_values);\n\n/// \\brief Append boolean values encoded as int32_t to a bitmap\n///\n/// The values must all be 0 or 1.\nstatic inline void ArrowBitmapAppendInt32Unsafe(struct ArrowBitmap* bitmap,\n                                                const int32_t* values, int64_t n_values);\n\n/// \\brief Reset a bitmap builder\n///\n/// Releases any memory held by buffer, empties the cache, and resets the size to zero\nstatic inline void ArrowBitmapReset(struct ArrowBitmap* bitmap);\n\n/// @}\n\n/// \\defgroup nanoarrow-array Creating arrays\n///\n/// These functions allocate, copy, and destroy ArrowArray structures.\n/// Once an ArrowArray has been initialized via ArrowArrayInitFromType()\n/// or ArrowArrayInitFromSchema(), the caller is responsible for releasing\n/// it using the embedded release callback.\n///\n/// @{\n\n/// \\brief Initialize the fields of an array\n///\n/// Initializes the fields and release callback of array. Caller\n/// is responsible for calling the array->release callback if\n/// NANOARROW_OK is returned.\nNANOARROW_DLL ArrowErrorCode ArrowArrayInitFromType(struct ArrowArray* array,\n                                                    enum ArrowType storage_type);\n\n/// \\brief Initialize the contents of an ArrowArray from an ArrowSchema\n///\n/// Caller is responsible for calling the array->release callback if\n/// NANOARROW_OK is returned.\nNANOARROW_DLL ArrowErrorCode ArrowArrayInitFromSchema(struct ArrowArray* array,\n                                                      const struct ArrowSchema* schema,\n                                                      struct ArrowError* error);\n\n/// \\brief Initialize the contents of an ArrowArray from an ArrowArrayView\n///\n/// Caller is responsible for calling the array->release callback if\n/// NANOARROW_OK is returned.\nNANOARROW_DLL ArrowErrorCode ArrowArrayInitFromArrayView(\n    struct ArrowArray* array, const struct ArrowArrayView* array_view,\n    struct ArrowError* error);\n\n/// \\brief Allocate the array->children array\n///\n/// Includes the memory for each child struct ArrowArray,\n/// whose members are marked as released and may be subsequently initialized\n/// with ArrowArrayInitFromType() or moved from an existing ArrowArray.\n/// schema must have been allocated using ArrowArrayInitFromType().\nNANOARROW_DLL ArrowErrorCode ArrowArrayAllocateChildren(struct ArrowArray* array,\n                                                        int64_t n_children);\n\n/// \\brief Allocate the array->dictionary member\n///\n/// Includes the memory for the struct ArrowArray, whose contents\n/// is marked as released and may be subsequently initialized\n/// with ArrowArrayInitFromType() or moved from an existing ArrowArray.\n/// array must have been allocated using ArrowArrayInitFromType()\nNANOARROW_DLL ArrowErrorCode ArrowArrayAllocateDictionary(struct ArrowArray* array);\n\n/// \\brief Set the validity bitmap of an ArrowArray\n///\n/// array must have been allocated using ArrowArrayInitFromType()\nNANOARROW_DLL void ArrowArraySetValidityBitmap(struct ArrowArray* array,\n                                               struct ArrowBitmap* bitmap);\n\n/// \\brief Set a buffer of an ArrowArray\n///\n/// array must have been allocated using ArrowArrayInitFromType()\nNANOARROW_DLL ArrowErrorCode ArrowArraySetBuffer(struct ArrowArray* array, int64_t i,\n                                                 struct ArrowBuffer* buffer);\n\n/// \\brief Add variadic buffers to a string or binary view array\n///\n/// array must have been allocated using ArrowArrayInitFromType()\nstatic inline ArrowErrorCode ArrowArrayAddVariadicBuffers(struct ArrowArray* array,\n                                                          int32_t n_buffers);\n\n/// \\brief Get the validity bitmap of an ArrowArray\n///\n/// array must have been allocated using ArrowArrayInitFromType()\nstatic inline struct ArrowBitmap* ArrowArrayValidityBitmap(struct ArrowArray* array);\n\n/// \\brief Get a buffer of an ArrowArray\n///\n/// array must have been allocated using ArrowArrayInitFromType()\nstatic inline struct ArrowBuffer* ArrowArrayBuffer(struct ArrowArray* array, int64_t i);\n\n/// \\brief Start element-wise appending to an ArrowArray\n///\n/// Initializes any values needed to use ArrowArrayAppend*() functions.\n/// All element-wise appenders append by value and return EINVAL if the exact value\n/// cannot be represented by the underlying storage type.\n/// array must have been allocated using ArrowArrayInitFromType()\nstatic inline ArrowErrorCode ArrowArrayStartAppending(struct ArrowArray* array);\n\n/// \\brief Reserve space for future appends\n///\n/// For buffer sizes that can be calculated (i.e., not string data buffers or\n/// child array sizes for non-fixed-size arrays), recursively reserve space for\n/// additional elements. This is useful for reducing the number of reallocations\n/// that occur using the item-wise appenders.\nNANOARROW_DLL ArrowErrorCode ArrowArrayReserve(struct ArrowArray* array,\n                                               int64_t additional_size_elements);\n\n/// \\brief Append a null value to an array\nstatic inline ArrowErrorCode ArrowArrayAppendNull(struct ArrowArray* array, int64_t n);\n\n/// \\brief Append an empty, non-null value to an array\nstatic inline ArrowErrorCode ArrowArrayAppendEmpty(struct ArrowArray* array, int64_t n);\n\n/// \\brief Append a signed integer value to an array\n///\n/// Returns NANOARROW_OK if value can be exactly represented by\n/// the underlying storage type or EINVAL otherwise (e.g., value\n/// is outside the valid array range).\nstatic inline ArrowErrorCode ArrowArrayAppendInt(struct ArrowArray* array, int64_t value);\n\n/// \\brief Append an unsigned integer value to an array\n///\n/// Returns NANOARROW_OK if value can be exactly represented by\n/// the underlying storage type or EINVAL otherwise (e.g., value\n/// is outside the valid array range).\nstatic inline ArrowErrorCode ArrowArrayAppendUInt(struct ArrowArray* array,\n                                                  uint64_t value);\n\n/// \\brief Append a double value to an array\n///\n/// Returns NANOARROW_OK if value can be exactly represented by\n/// the underlying storage type or EINVAL otherwise (e.g., value\n/// is outside the valid array range or there is an attempt to append\n/// a non-integer to an array with an integer storage type).\nstatic inline ArrowErrorCode ArrowArrayAppendDouble(struct ArrowArray* array,\n                                                    double value);\n\n/// \\brief Append a string of bytes to an array\n///\n/// Returns NANOARROW_OK if value can be exactly represented by\n/// the underlying storage type, EOVERFLOW if appending value would overflow\n/// the offset type (e.g., if the data buffer would be larger than 2 GB for a\n/// non-large string type), or EINVAL otherwise (e.g., the underlying array is not a\n/// binary, string, large binary, large string, or fixed-size binary array, or value is\n/// the wrong size for a fixed-size binary array).\nstatic inline ArrowErrorCode ArrowArrayAppendBytes(struct ArrowArray* array,\n                                                   struct ArrowBufferView value);\n\n/// \\brief Append a string value to an array\n///\n/// Returns NANOARROW_OK if value can be exactly represented by\n/// the underlying storage type, EOVERFLOW if appending value would overflow\n/// the offset type (e.g., if the data buffer would be larger than 2 GB for a\n/// non-large string type), or EINVAL otherwise (e.g., the underlying array is not a\n/// string or large string array).\nstatic inline ArrowErrorCode ArrowArrayAppendString(struct ArrowArray* array,\n                                                    struct ArrowStringView value);\n\n/// \\brief Append a Interval to an array\n///\n/// Returns NANOARROW_OK if value can be exactly represented by\n/// the underlying storage type or EINVAL otherwise.\nstatic inline ArrowErrorCode ArrowArrayAppendInterval(struct ArrowArray* array,\n                                                      const struct ArrowInterval* value);\n\n/// \\brief Append a decimal value to an array\n///\n/// Returns NANOARROW_OK if array is a decimal array with the appropriate\n/// bitwidth or EINVAL otherwise.\nstatic inline ArrowErrorCode ArrowArrayAppendDecimal(struct ArrowArray* array,\n                                                     const struct ArrowDecimal* value);\n\n/// \\brief Finish a nested array element\n///\n/// Appends a non-null element to the array based on the first child's current\n/// length. Returns NANOARROW_OK if the item was successfully added, EOVERFLOW\n/// if the child of a list or map array would exceed INT_MAX elements, or EINVAL\n/// if the underlying storage type is not a struct, list, large list, or fixed-size\n/// list, or if there was an attempt to add a struct or fixed-size list element where the\n/// length of the child array(s) did not match the expected length.\nstatic inline ArrowErrorCode ArrowArrayFinishElement(struct ArrowArray* array);\n\n/// \\brief Finish a union array element\n///\n/// Appends an element to the union type ids buffer and increments array->length.\n/// For sparse unions, up to one element is added to non type-id children. Returns\n/// EINVAL if the underlying storage type is not a union, if type_id is not valid,\n/// or if child sizes after appending are inconsistent.\nstatic inline ArrowErrorCode ArrowArrayFinishUnionElement(struct ArrowArray* array,\n                                                          int8_t type_id);\n\n/// \\brief Shrink buffer capacity to the size required\n///\n/// Also applies shrinking to any child arrays. array must have been allocated using\n/// ArrowArrayInitFromType\nstatic inline ArrowErrorCode ArrowArrayShrinkToFit(struct ArrowArray* array);\n\n/// \\brief Finish building an ArrowArray\n///\n/// Flushes any pointers from internal buffers that may have been reallocated\n/// into array->buffers and checks the actual size of the buffers\n/// against the expected size based on the final length.\n/// array must have been allocated using ArrowArrayInitFromType()\nNANOARROW_DLL ArrowErrorCode ArrowArrayFinishBuildingDefault(struct ArrowArray* array,\n                                                             struct ArrowError* error);\n\n/// \\brief Finish building an ArrowArray with explicit validation\n///\n/// Finish building with an explicit validation level. This could perform less validation\n/// (i.e. NANOARROW_VALIDATION_LEVEL_NONE or NANOARROW_VALIDATION_LEVEL_MINIMAL) if CPU\n/// buffer data access is not possible or more validation (i.e.,\n/// NANOARROW_VALIDATION_LEVEL_FULL) if buffer content was obtained from an untrusted or\n/// corruptible source.\nNANOARROW_DLL ArrowErrorCode ArrowArrayFinishBuilding(\n    struct ArrowArray* array, enum ArrowValidationLevel validation_level,\n    struct ArrowError* error);\n\n/// @}\n\n/// \\defgroup nanoarrow-array-view Reading arrays\n///\n/// These functions read and validate the contents ArrowArray structures.\n///\n/// @{\n\n/// \\brief Initialize the contents of an ArrowArrayView\nNANOARROW_DLL void ArrowArrayViewInitFromType(struct ArrowArrayView* array_view,\n                                              enum ArrowType storage_type);\n\n/// \\brief Move an ArrowArrayView\n///\n/// Transfers the ArrowArrayView data and lifecycle management to another\n/// address and resets the contents of src.\nstatic inline void ArrowArrayViewMove(struct ArrowArrayView* src,\n                                      struct ArrowArrayView* dst);\n\n/// \\brief Initialize the contents of an ArrowArrayView from an ArrowSchema\nNANOARROW_DLL ArrowErrorCode\nArrowArrayViewInitFromSchema(struct ArrowArrayView* array_view,\n                             const struct ArrowSchema* schema, struct ArrowError* error);\n\n/// \\brief Allocate the array_view->children array\n///\n/// Includes the memory for each child struct ArrowArrayView\nNANOARROW_DLL ArrowErrorCode\nArrowArrayViewAllocateChildren(struct ArrowArrayView* array_view, int64_t n_children);\n\n/// \\brief Allocate array_view->dictionary\nNANOARROW_DLL ArrowErrorCode\nArrowArrayViewAllocateDictionary(struct ArrowArrayView* array_view);\n\n/// \\brief Set data-independent buffer sizes from length\nNANOARROW_DLL void ArrowArrayViewSetLength(struct ArrowArrayView* array_view,\n                                           int64_t length);\n\n/// \\brief Set buffer sizes and data pointers from an ArrowArray\nNANOARROW_DLL ArrowErrorCode ArrowArrayViewSetArray(struct ArrowArrayView* array_view,\n                                                    const struct ArrowArray* array,\n                                                    struct ArrowError* error);\n\n/// \\brief Set buffer sizes and data pointers from an ArrowArray except for those\n/// that require dereferencing buffer content.\nNANOARROW_DLL ArrowErrorCode\nArrowArrayViewSetArrayMinimal(struct ArrowArrayView* array_view,\n                              const struct ArrowArray* array, struct ArrowError* error);\n\n/// \\brief Get the number of buffers\n///\n/// The number of buffers referred to by this ArrowArrayView.  In may cases this can also\n/// be calculated from the ArrowLayout member of the ArrowArrayView or ArrowSchemaView;\n/// however, for binary view and string view types, the number of total buffers depends on\n/// the number of variadic buffers.\nstatic inline int64_t ArrowArrayViewGetNumBuffers(struct ArrowArrayView* array_view);\n\n/// \\brief Get a view of a specific buffer from an ArrowArrayView\n///\n/// This is the ArrowArrayView equivalent of ArrowArray::buffers[i] that includes\n/// size information (if known).\nstatic inline struct ArrowBufferView ArrowArrayViewGetBufferView(\n    struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get the function of a specific buffer in an ArrowArrayView\n///\n/// In may cases this can also be obtained from the ArrowLayout member of the\n/// ArrowArrayView or ArrowSchemaView; however, for binary view and string view types,\n/// the function of each buffer may be different between two arrays of the same type\n/// depending on the number of variadic buffers.\nstatic inline enum ArrowBufferType ArrowArrayViewGetBufferType(\n    struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get the data type of a specific buffer in an ArrowArrayView\n///\n/// In may cases this can also be obtained from the ArrowLayout member of the\n/// ArrowArrayView or ArrowSchemaView; however, for binary view and string view types,\n/// the data type of each buffer may be different between two arrays of the same type\n/// depending on the number of variadic buffers.\nstatic inline enum ArrowType ArrowArrayViewGetBufferDataType(\n    struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get the element size (in bits) of a specific buffer in an ArrowArrayView\n///\n/// In may cases this can also be obtained from the ArrowLayout member of the\n/// ArrowArrayView or ArrowSchemaView; however, for binary view and string view types,\n/// the element width of each buffer may be different between two arrays of the same type\n/// depending on the number of variadic buffers.\nstatic inline int64_t ArrowArrayViewGetBufferElementSizeBits(\n    struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Performs checks on the content of an ArrowArrayView\n///\n/// If using ArrowArrayViewSetArray() to back array_view with an ArrowArray,\n/// the buffer sizes and some content (fist and last offset) have already\n/// been validated at the \"default\" level. If setting the buffer pointers\n/// and sizes otherwise, you may wish to perform checks at a different level. See\n/// documentation for ArrowValidationLevel for the details of checks performed\n/// at each level.\nNANOARROW_DLL ArrowErrorCode ArrowArrayViewValidate(\n    struct ArrowArrayView* array_view, enum ArrowValidationLevel validation_level,\n    struct ArrowError* error);\n\n/// \\brief Compare two ArrowArrayView objects for equality\n///\n/// Given two ArrowArrayView instances, place either 0 (not equal) and\n/// 1 (equal) at the address pointed to by out. If the comparison determines\n/// that actual and expected are not equal, a reason will be communicated via\n/// error if error is non-NULL.\n///\n/// Returns NANOARROW_OK if the comparison completed successfully.\nNANOARROW_DLL ArrowErrorCode ArrowArrayViewCompare(const struct ArrowArrayView* actual,\n                                                   const struct ArrowArrayView* expected,\n                                                   enum ArrowCompareLevel level, int* out,\n                                                   struct ArrowError* reason);\n\n/// \\brief Reset the contents of an ArrowArrayView and frees resources\nNANOARROW_DLL void ArrowArrayViewReset(struct ArrowArrayView* array_view);\n\n/// \\brief Check for a null element in an ArrowArrayView\nstatic inline int8_t ArrowArrayViewIsNull(const struct ArrowArrayView* array_view,\n                                          int64_t i);\n\n/// \\brief Compute null count for an ArrowArrayView\nstatic inline int64_t ArrowArrayViewComputeNullCount(\n    const struct ArrowArrayView* array_view);\n\n/// \\brief Get the type id of a union array element\nstatic inline int8_t ArrowArrayViewUnionTypeId(const struct ArrowArrayView* array_view,\n                                               int64_t i);\n\n/// \\brief Get the child index of a union array element\nstatic inline int8_t ArrowArrayViewUnionChildIndex(\n    const struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get the index to use into the relevant union child array\nstatic inline int64_t ArrowArrayViewUnionChildOffset(\n    const struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get an element in an ArrowArrayView as an integer\n///\n/// This function does not check for null values, that values are actually integers, or\n/// that values are within a valid range for an int64.\nstatic inline int64_t ArrowArrayViewGetIntUnsafe(const struct ArrowArrayView* array_view,\n                                                 int64_t i);\n\n/// \\brief Get an element in an ArrowArrayView as an unsigned integer\n///\n/// This function does not check for null values, that values are actually integers, or\n/// that values are within a valid range for a uint64.\nstatic inline uint64_t ArrowArrayViewGetUIntUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get an element in an ArrowArrayView as a double\n///\n/// This function does not check for null values, or\n/// that values are within a valid range for a double.\nstatic inline double ArrowArrayViewGetDoubleUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get an element in an ArrowArrayView as an ArrowStringView\n///\n/// This function does not check for null values.\nstatic inline struct ArrowStringView ArrowArrayViewGetStringUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get an element in an ArrowArrayView as an ArrowBufferView\n///\n/// This function does not check for null values.\nstatic inline struct ArrowBufferView ArrowArrayViewGetBytesUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i);\n\n/// \\brief Get an element in an ArrowArrayView as an ArrowDecimal\n///\n/// This function does not check for null values. The out parameter must\n/// be initialized with ArrowDecimalInit() with the proper parameters for this\n/// type before calling this for the first time.\nstatic inline void ArrowArrayViewGetDecimalUnsafe(const struct ArrowArrayView* array_view,\n                                                  int64_t i, struct ArrowDecimal* out);\n\n/// @}\n\n/// \\defgroup nanoarrow-basic-array-stream Basic ArrowArrayStream implementation\n///\n/// An implementation of an ArrowArrayStream based on a collection of\n/// zero or more previously-existing ArrowArray objects. Users should\n/// initialize and/or validate the contents before transferring the\n/// responsibility of the ArrowArrayStream elsewhere.\n///\n/// @{\n\n/// \\brief Initialize an ArrowArrayStream backed by this implementation\n///\n/// This function moves the ownership of schema to the array_stream. If\n/// this function returns NANOARROW_OK, the caller is responsible for\n/// releasing the ArrowArrayStream.\nNANOARROW_DLL ArrowErrorCode ArrowBasicArrayStreamInit(\n    struct ArrowArrayStream* array_stream, struct ArrowSchema* schema, int64_t n_arrays);\n\n/// \\brief Set the ith ArrowArray in this ArrowArrayStream.\n///\n/// array_stream must have been initialized with ArrowBasicArrayStreamInit().\n/// This function moves the ownership of array to the array_stream. i must\n/// be greater than or equal to zero and less than the value of n_arrays passed in\n/// ArrowBasicArrayStreamInit(). Callers are not required to fill all\n/// n_arrays members (i.e., n_arrays is a maximum bound).\nNANOARROW_DLL void ArrowBasicArrayStreamSetArray(struct ArrowArrayStream* array_stream,\n                                                 int64_t i, struct ArrowArray* array);\n\n/// \\brief Validate the contents of this ArrowArrayStream\n///\n/// array_stream must have been initialized with ArrowBasicArrayStreamInit().\n/// This function uses ArrowArrayStreamInitFromSchema() and ArrowArrayStreamSetArray()\n/// to validate the contents of the arrays.\nNANOARROW_DLL ArrowErrorCode ArrowBasicArrayStreamValidate(\n    const struct ArrowArrayStream* array_stream, struct ArrowError* error);\n\n/// @}\n\n// Undefine ArrowErrorCode, which may have been defined to annotate functions that return\n// it to warn for an unused result.\n#if defined(ArrowErrorCode)\n#undef ArrowErrorCode\n#endif\n\n// Inline function definitions\n\n\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#ifndef NANOARROW_BUFFER_INLINE_H_INCLUDED\n#define NANOARROW_BUFFER_INLINE_H_INCLUDED\n\n#include <errno.h>\n#include <stdint.h>\n#include <string.h>\n\n\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n// Modified from Arrow C++ (1eb46f76) cpp/src/arrow/chunk_resolver.h#L133-L162\nstatic inline int64_t ArrowResolveChunk64(int64_t index, const int64_t* offsets,\n                                          int64_t lo, int64_t hi) {\n  // Similar to std::upper_bound(), but slightly different as our offsets\n  // array always starts with 0.\n  int64_t n = hi - lo;\n  // First iteration does not need to check for n > 1\n  // (lo < hi is guaranteed by the precondition).\n  NANOARROW_DCHECK(n > 1);\n  do {\n    const int64_t m = n >> 1;\n    const int64_t mid = lo + m;\n    if (index >= offsets[mid]) {\n      lo = mid;\n      n -= m;\n    } else {\n      n = m;\n    }\n  } while (n > 1);\n  return lo;\n}\n\nstatic inline int64_t ArrowResolveChunk32(int32_t index, const int32_t* offsets,\n                                          int32_t lo, int32_t hi) {\n  // Similar to std::upper_bound(), but slightly different as our offsets\n  // array always starts with 0.\n  int32_t n = hi - lo;\n  // First iteration does not need to check for n > 1\n  // (lo < hi is guaranteed by the precondition).\n  NANOARROW_DCHECK(n > 1);\n  do {\n    const int32_t m = n >> 1;\n    const int32_t mid = lo + m;\n    if (index >= offsets[mid]) {\n      lo = mid;\n      n -= m;\n    } else {\n      n = m;\n    }\n  } while (n > 1);\n  return lo;\n}\n\nstatic inline int64_t _ArrowGrowByFactor(int64_t current_capacity, int64_t new_capacity) {\n  int64_t doubled_capacity = current_capacity * 2;\n  if (doubled_capacity > new_capacity) {\n    return doubled_capacity;\n  } else {\n    return new_capacity;\n  }\n}\n\n// float to half float conversion, adapted from Arrow Go\n// https://github.com/apache/arrow/blob/main/go/arrow/float16/float16.go\nstatic inline uint16_t ArrowFloatToHalfFloat(float value) {\n  union {\n    float f;\n    uint32_t b;\n  } u;\n  u.f = value;\n\n  uint16_t sn = (uint16_t)((u.b >> 31) & 0x1);\n  uint16_t exp = (u.b >> 23) & 0xff;\n  int16_t res = (int16_t)(exp - 127 + 15);\n  uint16_t fc = (uint16_t)(u.b >> 13) & 0x3ff;\n\n  if (exp == 0) {\n    res = 0;\n  } else if (exp == 0xff) {\n    res = 0x1f;\n  } else if (res > 0x1e) {\n    res = 0x1f;\n    fc = 0;\n  } else if (res < 0x01) {\n    res = 0;\n    fc = 0;\n  }\n\n  return (uint16_t)((sn << 15) | (uint16_t)(res << 10) | fc);\n}\n\n// half float to float conversion, adapted from Arrow Go\n// https://github.com/apache/arrow/blob/main/go/arrow/float16/float16.go\nstatic inline float ArrowHalfFloatToFloat(uint16_t value) {\n  uint32_t sn = (uint32_t)((value >> 15) & 0x1);\n  uint32_t exp = (value >> 10) & 0x1f;\n  uint32_t res = exp + 127 - 15;\n  uint32_t fc = value & 0x3ff;\n\n  if (exp == 0) {\n    res = 0;\n  } else if (exp == 0x1f) {\n    res = 0xff;\n  }\n\n  union {\n    float f;\n    uint32_t b;\n  } u;\n  u.b = (uint32_t)(sn << 31) | (uint32_t)(res << 23) | (uint32_t)(fc << 13);\n  return u.f;\n}\n\nstatic inline void ArrowBufferInit(struct ArrowBuffer* buffer) {\n  buffer->data = NULL;\n  buffer->size_bytes = 0;\n  buffer->capacity_bytes = 0;\n  buffer->allocator = ArrowBufferAllocatorDefault();\n}\n\nstatic inline ArrowErrorCode ArrowBufferSetAllocator(\n    struct ArrowBuffer* buffer, struct ArrowBufferAllocator allocator) {\n  // This is not a perfect test for \"has a buffer already been allocated\"\n  // but is likely to catch most cases.\n  if (buffer->data == NULL) {\n    buffer->allocator = allocator;\n    return NANOARROW_OK;\n  } else {\n    return EINVAL;\n  }\n}\n\nstatic inline void ArrowBufferReset(struct ArrowBuffer* buffer) {\n  buffer->allocator.free(&buffer->allocator, (uint8_t*)buffer->data,\n                         buffer->capacity_bytes);\n  ArrowBufferInit(buffer);\n}\n\nstatic inline void ArrowBufferMove(struct ArrowBuffer* src, struct ArrowBuffer* dst) {\n  memcpy(dst, src, sizeof(struct ArrowBuffer));\n  src->data = NULL;\n  ArrowBufferInit(src);\n}\n\nstatic inline ArrowErrorCode ArrowBufferResize(struct ArrowBuffer* buffer,\n                                               int64_t new_size_bytes,\n                                               char shrink_to_fit) {\n  if (new_size_bytes < 0) {\n    return EINVAL;\n  }\n\n  int needs_reallocation = new_size_bytes > buffer->capacity_bytes ||\n                           (shrink_to_fit && new_size_bytes < buffer->capacity_bytes);\n\n  if (needs_reallocation) {\n    buffer->data = buffer->allocator.reallocate(&buffer->allocator, buffer->data,\n                                                buffer->capacity_bytes, new_size_bytes);\n\n    if (buffer->data == NULL && new_size_bytes > 0) {\n      buffer->capacity_bytes = 0;\n      buffer->size_bytes = 0;\n      return ENOMEM;\n    }\n\n    buffer->capacity_bytes = new_size_bytes;\n  }\n\n  buffer->size_bytes = new_size_bytes;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowBufferReserve(struct ArrowBuffer* buffer,\n                                                int64_t additional_size_bytes) {\n  int64_t min_capacity_bytes = buffer->size_bytes + additional_size_bytes;\n  if (min_capacity_bytes <= buffer->capacity_bytes) {\n    return NANOARROW_OK;\n  }\n\n  int64_t new_capacity_bytes =\n      _ArrowGrowByFactor(buffer->capacity_bytes, min_capacity_bytes);\n  buffer->data = buffer->allocator.reallocate(&buffer->allocator, buffer->data,\n                                              buffer->capacity_bytes, new_capacity_bytes);\n\n  if (buffer->data == NULL && new_capacity_bytes > 0) {\n    buffer->capacity_bytes = 0;\n    buffer->size_bytes = 0;\n    return ENOMEM;\n  }\n\n  buffer->capacity_bytes = new_capacity_bytes;\n  return NANOARROW_OK;\n}\n\nstatic inline void ArrowBufferAppendUnsafe(struct ArrowBuffer* buffer, const void* data,\n                                           int64_t size_bytes) {\n  if (size_bytes > 0) {\n    NANOARROW_DCHECK(buffer->data != NULL);\n    memcpy(buffer->data + buffer->size_bytes, data, size_bytes);\n    buffer->size_bytes += size_bytes;\n  }\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppend(struct ArrowBuffer* buffer,\n                                               const void* data, int64_t size_bytes) {\n  NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(buffer, size_bytes));\n\n  ArrowBufferAppendUnsafe(buffer, data, size_bytes);\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendInt8(struct ArrowBuffer* buffer,\n                                                   int8_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(int8_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendUInt8(struct ArrowBuffer* buffer,\n                                                    uint8_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(uint8_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendInt16(struct ArrowBuffer* buffer,\n                                                    int16_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(int16_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendUInt16(struct ArrowBuffer* buffer,\n                                                     uint16_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(uint16_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendInt32(struct ArrowBuffer* buffer,\n                                                    int32_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(int32_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendUInt32(struct ArrowBuffer* buffer,\n                                                     uint32_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(uint32_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendInt64(struct ArrowBuffer* buffer,\n                                                    int64_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(int64_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendUInt64(struct ArrowBuffer* buffer,\n                                                     uint64_t value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(uint64_t));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendDouble(struct ArrowBuffer* buffer,\n                                                     double value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(double));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendFloat(struct ArrowBuffer* buffer,\n                                                    float value) {\n  return ArrowBufferAppend(buffer, &value, sizeof(float));\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendStringView(struct ArrowBuffer* buffer,\n                                                         struct ArrowStringView value) {\n  return ArrowBufferAppend(buffer, value.data, value.size_bytes);\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendBufferView(struct ArrowBuffer* buffer,\n                                                         struct ArrowBufferView value) {\n  return ArrowBufferAppend(buffer, value.data.data, value.size_bytes);\n}\n\nstatic inline ArrowErrorCode ArrowBufferAppendFill(struct ArrowBuffer* buffer,\n                                                   uint8_t value, int64_t size_bytes) {\n  if (size_bytes == 0) {\n    return NANOARROW_OK;\n  }\n\n  NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(buffer, size_bytes));\n\n  NANOARROW_DCHECK(buffer->data != NULL);  // To help clang-tidy\n  memset(buffer->data + buffer->size_bytes, value, size_bytes);\n  buffer->size_bytes += size_bytes;\n\n  return NANOARROW_OK;\n}\n\nstatic const uint8_t _ArrowkBitmask[] = {1, 2, 4, 8, 16, 32, 64, 128};\nstatic const uint8_t _ArrowkFlippedBitmask[] = {254, 253, 251, 247, 239, 223, 191, 127};\nstatic const uint8_t _ArrowkPrecedingBitmask[] = {0, 1, 3, 7, 15, 31, 63, 127};\nstatic const uint8_t _ArrowkTrailingBitmask[] = {255, 254, 252, 248, 240, 224, 192, 128};\n\nstatic const uint8_t _ArrowkBytePopcount[] = {\n    0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3,\n    4, 4, 5, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4,\n    4, 5, 4, 5, 5, 6, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4,\n    5, 3, 4, 4, 5, 4, 5, 5, 6, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4, 5,\n    4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2,\n    3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5,\n    5, 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4,\n    5, 4, 5, 5, 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 3, 4, 4, 5, 4, 5, 5, 6,\n    4, 5, 5, 6, 5, 6, 6, 7, 4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6, 7, 7, 8};\n\nstatic inline int64_t _ArrowRoundUpToMultipleOf8(int64_t value) {\n  return (value + 7) & ~((int64_t)7);\n}\n\nstatic inline int64_t _ArrowRoundDownToMultipleOf8(int64_t value) {\n  return (value / 8) * 8;\n}\n\nstatic inline int64_t _ArrowBytesForBits(int64_t bits) {\n  return (bits >> 3) + ((bits & 7) != 0);\n}\n\nstatic inline void _ArrowBitsUnpackInt8(const uint8_t word, int8_t* out) {\n  out[0] = (word & 0x1) != 0;\n  out[1] = (word & 0x2) != 0;\n  out[2] = (word & 0x4) != 0;\n  out[3] = (word & 0x8) != 0;\n  out[4] = (word & 0x10) != 0;\n  out[5] = (word & 0x20) != 0;\n  out[6] = (word & 0x40) != 0;\n  out[7] = (word & 0x80) != 0;\n}\n\nstatic inline void _ArrowBitsUnpackInt32(const uint8_t word, int32_t* out) {\n  out[0] = (word & 0x1) != 0;\n  out[1] = (word & 0x2) != 0;\n  out[2] = (word & 0x4) != 0;\n  out[3] = (word & 0x8) != 0;\n  out[4] = (word & 0x10) != 0;\n  out[5] = (word & 0x20) != 0;\n  out[6] = (word & 0x40) != 0;\n  out[7] = (word & 0x80) != 0;\n}\n\nstatic inline void _ArrowBitmapPackInt8(const int8_t* values, uint8_t* out) {\n  *out = (uint8_t)(values[0] | ((values[1] + 0x1) & 0x2) | ((values[2] + 0x3) & 0x4) |\n                   ((values[3] + 0x7) & 0x8) | ((values[4] + 0xf) & 0x10) |\n                   ((values[5] + 0x1f) & 0x20) | ((values[6] + 0x3f) & 0x40) |\n                   ((values[7] + 0x7f) & 0x80));\n}\n\nstatic inline void _ArrowBitmapPackInt32(const int32_t* values, uint8_t* out) {\n  *out = (uint8_t)(values[0] | ((values[1] + 0x1) & 0x2) | ((values[2] + 0x3) & 0x4) |\n                   ((values[3] + 0x7) & 0x8) | ((values[4] + 0xf) & 0x10) |\n                   ((values[5] + 0x1f) & 0x20) | ((values[6] + 0x3f) & 0x40) |\n                   ((values[7] + 0x7f) & 0x80));\n}\n\nstatic inline int8_t ArrowBitGet(const uint8_t* bits, int64_t i) {\n  return (bits[i >> 3] >> (i & 0x07)) & 1;\n}\n\nstatic inline void ArrowBitsUnpackInt8(const uint8_t* bits, int64_t start_offset,\n                                       int64_t length, int8_t* out) {\n  if (length == 0) {\n    return;\n  }\n\n  const int64_t i_begin = start_offset;\n  const int64_t i_end = start_offset + length;\n  const int64_t i_last_valid = i_end - 1;\n\n  const int64_t bytes_begin = i_begin / 8;\n  const int64_t bytes_last_valid = i_last_valid / 8;\n\n  if (bytes_begin == bytes_last_valid) {\n    for (int i = 0; i < length; i++) {\n      out[i] = ArrowBitGet(&bits[bytes_begin], i + i_begin % 8);\n    }\n\n    return;\n  }\n\n  // first byte\n  for (int i = 0; i < 8 - (i_begin % 8); i++) {\n    *out++ = ArrowBitGet(&bits[bytes_begin], i + i_begin % 8);\n  }\n\n  // middle bytes\n  for (int64_t i = bytes_begin + 1; i < bytes_last_valid; i++) {\n    _ArrowBitsUnpackInt8(bits[i], out);\n    out += 8;\n  }\n\n  // last byte\n  const int bits_remaining = (int)(i_end % 8 == 0 ? 8 : i_end % 8);\n  for (int i = 0; i < bits_remaining; i++) {\n    *out++ = ArrowBitGet(&bits[bytes_last_valid], i);\n  }\n}\n\nstatic inline void ArrowBitsUnpackInt32(const uint8_t* bits, int64_t start_offset,\n                                        int64_t length, int32_t* out) {\n  if (length == 0) {\n    return;\n  }\n\n  NANOARROW_DCHECK(bits != NULL && out != NULL);\n\n  const int64_t i_begin = start_offset;\n  const int64_t i_end = start_offset + length;\n  const int64_t i_last_valid = i_end - 1;\n\n  const int64_t bytes_begin = i_begin / 8;\n  const int64_t bytes_last_valid = i_last_valid / 8;\n\n  if (bytes_begin == bytes_last_valid) {\n    for (int i = 0; i < length; i++) {\n      out[i] = ArrowBitGet(&bits[bytes_begin], i + i_begin % 8);\n    }\n\n    return;\n  }\n\n  // first byte\n  for (int i = 0; i < 8 - (i_begin % 8); i++) {\n    *out++ = ArrowBitGet(&bits[bytes_begin], i + i_begin % 8);\n  }\n\n  // middle bytes\n  for (int64_t i = bytes_begin + 1; i < bytes_last_valid; i++) {\n    _ArrowBitsUnpackInt32(bits[i], out);\n    out += 8;\n  }\n\n  // last byte\n  const int bits_remaining = (int)(i_end % 8 == 0 ? 8 : i_end % 8);\n  for (int i = 0; i < bits_remaining; i++) {\n    *out++ = ArrowBitGet(&bits[bytes_last_valid], i);\n  }\n}\n\nstatic inline void ArrowBitSet(uint8_t* bits, int64_t i) {\n  bits[i / 8] |= _ArrowkBitmask[i % 8];\n}\n\nstatic inline void ArrowBitClear(uint8_t* bits, int64_t i) {\n  bits[i / 8] &= _ArrowkFlippedBitmask[i % 8];\n}\n\nstatic inline void ArrowBitSetTo(uint8_t* bits, int64_t i, uint8_t bit_is_set) {\n  bits[i / 8] ^= (uint8_t)(((uint8_t)(-((uint8_t)(bit_is_set != 0)) ^ bits[i / 8])) &\n                           _ArrowkBitmask[i % 8]);\n}\n\nstatic inline void ArrowBitsSetTo(uint8_t* bits, int64_t start_offset, int64_t length,\n                                  uint8_t bits_are_set) {\n  if (length == 0) {\n    return;\n  }\n\n  NANOARROW_DCHECK(bits != NULL);\n\n  const int64_t i_begin = start_offset;\n  const int64_t i_end = start_offset + length;\n  const uint8_t fill_byte = (uint8_t)(-bits_are_set);\n\n  const int64_t bytes_begin = i_begin / 8;\n  const int64_t bytes_end = i_end / 8 + 1;\n\n  const uint8_t first_byte_mask = _ArrowkPrecedingBitmask[i_begin % 8];\n  const uint8_t last_byte_mask = _ArrowkTrailingBitmask[i_end % 8];\n\n  if (bytes_end == bytes_begin + 1) {\n    // set bits within a single byte\n    const uint8_t only_byte_mask =\n        i_end % 8 == 0 ? first_byte_mask : (uint8_t)(first_byte_mask | last_byte_mask);\n    bits[bytes_begin] &= only_byte_mask;\n    bits[bytes_begin] |= (uint8_t)(fill_byte & ~only_byte_mask);\n    return;\n  }\n\n  // set/clear trailing bits of first byte\n  bits[bytes_begin] &= first_byte_mask;\n  bits[bytes_begin] |= (uint8_t)(fill_byte & ~first_byte_mask);\n\n  if (bytes_end - bytes_begin > 2) {\n    // set/clear whole bytes\n    memset(bits + bytes_begin + 1, fill_byte, (size_t)(bytes_end - bytes_begin - 2));\n  }\n\n  if (i_end % 8 == 0) {\n    return;\n  }\n\n  // set/clear leading bits of last byte\n  bits[bytes_end - 1] &= last_byte_mask;\n  bits[bytes_end - 1] |= (uint8_t)(fill_byte & ~last_byte_mask);\n}\n\nstatic inline int64_t ArrowBitCountSet(const uint8_t* bits, int64_t start_offset,\n                                       int64_t length) {\n  if (length == 0) {\n    return 0;\n  }\n\n  NANOARROW_DCHECK(bits != NULL);\n\n  const int64_t i_begin = start_offset;\n  const int64_t i_end = start_offset + length;\n  const int64_t i_last_valid = i_end - 1;\n\n  const int64_t bytes_begin = i_begin / 8;\n  const int64_t bytes_last_valid = i_last_valid / 8;\n\n  if (bytes_begin == bytes_last_valid) {\n    // count bits within a single byte\n    const uint8_t first_byte_mask = _ArrowkPrecedingBitmask[i_end % 8];\n    const uint8_t last_byte_mask = _ArrowkTrailingBitmask[i_begin % 8];\n\n    const uint8_t only_byte_mask =\n        i_end % 8 == 0 ? last_byte_mask : (uint8_t)(first_byte_mask & last_byte_mask);\n\n    const uint8_t byte_masked = bits[bytes_begin] & only_byte_mask;\n    return _ArrowkBytePopcount[byte_masked];\n  }\n\n  const uint8_t first_byte_mask = _ArrowkPrecedingBitmask[i_begin % 8];\n  const uint8_t last_byte_mask = i_end % 8 == 0 ? 0 : _ArrowkTrailingBitmask[i_end % 8];\n  int64_t count = 0;\n\n  // first byte\n  count += _ArrowkBytePopcount[bits[bytes_begin] & ~first_byte_mask];\n\n  // middle bytes\n  for (int64_t i = bytes_begin + 1; i < bytes_last_valid; i++) {\n    count += _ArrowkBytePopcount[bits[i]];\n  }\n\n  // last byte\n  count += _ArrowkBytePopcount[bits[bytes_last_valid] & ~last_byte_mask];\n\n  return count;\n}\n\nstatic inline void ArrowBitmapInit(struct ArrowBitmap* bitmap) {\n  ArrowBufferInit(&bitmap->buffer);\n  bitmap->size_bits = 0;\n}\n\nstatic inline void ArrowBitmapMove(struct ArrowBitmap* src, struct ArrowBitmap* dst) {\n  ArrowBufferMove(&src->buffer, &dst->buffer);\n  dst->size_bits = src->size_bits;\n  src->size_bits = 0;\n}\n\nstatic inline ArrowErrorCode ArrowBitmapReserve(struct ArrowBitmap* bitmap,\n                                                int64_t additional_size_bits) {\n  int64_t min_capacity_bits = bitmap->size_bits + additional_size_bits;\n  int64_t min_capacity_bytes = _ArrowBytesForBits(min_capacity_bits);\n  int64_t current_size_bytes = bitmap->buffer.size_bytes;\n  int64_t current_capacity_bytes = bitmap->buffer.capacity_bytes;\n\n  if (min_capacity_bytes <= current_capacity_bytes) {\n    return NANOARROW_OK;\n  }\n\n  int64_t additional_capacity_bytes = min_capacity_bytes - current_size_bytes;\n  NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(&bitmap->buffer, additional_capacity_bytes));\n\n  // Zero out the last byte for deterministic output in the common case\n  // of reserving a known remaining size. We should have returned above\n  // if there was not at least one additional byte to allocate; however,\n  // DCHECK() just to be sure.\n  NANOARROW_DCHECK(bitmap->buffer.capacity_bytes > current_capacity_bytes);\n  bitmap->buffer.data[bitmap->buffer.capacity_bytes - 1] = 0;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowBitmapResize(struct ArrowBitmap* bitmap,\n                                               int64_t new_size_bits,\n                                               char shrink_to_fit) {\n  if (new_size_bits < 0) {\n    return EINVAL;\n  }\n\n  int64_t new_size_bytes = _ArrowBytesForBits(new_size_bits);\n  NANOARROW_RETURN_NOT_OK(\n      ArrowBufferResize(&bitmap->buffer, new_size_bytes, shrink_to_fit));\n\n  bitmap->size_bits = new_size_bits;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowBitmapAppend(struct ArrowBitmap* bitmap,\n                                               uint8_t bits_are_set, int64_t length) {\n  NANOARROW_RETURN_NOT_OK(ArrowBitmapReserve(bitmap, length));\n\n  ArrowBitmapAppendUnsafe(bitmap, bits_are_set, length);\n  return NANOARROW_OK;\n}\n\nstatic inline void ArrowBitmapAppendUnsafe(struct ArrowBitmap* bitmap,\n                                           uint8_t bits_are_set, int64_t length) {\n  ArrowBitsSetTo(bitmap->buffer.data, bitmap->size_bits, length, bits_are_set);\n  bitmap->size_bits += length;\n  bitmap->buffer.size_bytes = _ArrowBytesForBits(bitmap->size_bits);\n}\n\nstatic inline void ArrowBitmapAppendInt8Unsafe(struct ArrowBitmap* bitmap,\n                                               const int8_t* values, int64_t n_values) {\n  if (n_values == 0) {\n    return;\n  }\n\n  NANOARROW_DCHECK(bitmap->buffer.data != NULL);\n  NANOARROW_DCHECK(values != NULL);\n\n  const int8_t* values_cursor = values;\n  int64_t n_remaining = n_values;\n  int64_t out_i_cursor = bitmap->size_bits;\n  uint8_t* out_cursor = bitmap->buffer.data + bitmap->size_bits / 8;\n\n  // First byte\n  if ((out_i_cursor % 8) != 0) {\n    int64_t n_partial_bits = _ArrowRoundUpToMultipleOf8(out_i_cursor) - out_i_cursor;\n    for (int i = 0; i < n_partial_bits; i++) {\n      ArrowBitSetTo(bitmap->buffer.data, out_i_cursor++, values[i]);\n    }\n\n    out_cursor++;\n    values_cursor += n_partial_bits;\n    n_remaining -= n_partial_bits;\n  }\n\n  // Middle bytes\n  int64_t n_full_bytes = n_remaining / 8;\n  for (int64_t i = 0; i < n_full_bytes; i++) {\n    _ArrowBitmapPackInt8(values_cursor, out_cursor);\n    values_cursor += 8;\n    out_cursor++;\n  }\n\n  // Last byte\n  out_i_cursor += n_full_bytes * 8;\n  n_remaining -= n_full_bytes * 8;\n  if (n_remaining > 0) {\n    // Zero out the last byte\n    *out_cursor = 0x00;\n    for (int i = 0; i < n_remaining; i++) {\n      ArrowBitSetTo(bitmap->buffer.data, out_i_cursor++, values_cursor[i]);\n    }\n    out_cursor++;\n  }\n\n  bitmap->size_bits += n_values;\n  bitmap->buffer.size_bytes = out_cursor - bitmap->buffer.data;\n}\n\nstatic inline void ArrowBitmapAppendInt32Unsafe(struct ArrowBitmap* bitmap,\n                                                const int32_t* values, int64_t n_values) {\n  if (n_values == 0) {\n    return;\n  }\n\n  NANOARROW_DCHECK(bitmap->buffer.data != NULL);\n  NANOARROW_DCHECK(values != NULL);\n\n  const int32_t* values_cursor = values;\n  int64_t n_remaining = n_values;\n  int64_t out_i_cursor = bitmap->size_bits;\n  uint8_t* out_cursor = bitmap->buffer.data + bitmap->size_bits / 8;\n\n  // First byte\n  if ((out_i_cursor % 8) != 0) {\n    int64_t n_partial_bits = _ArrowRoundUpToMultipleOf8(out_i_cursor) - out_i_cursor;\n    for (int i = 0; i < n_partial_bits; i++) {\n      ArrowBitSetTo(bitmap->buffer.data, out_i_cursor++, (uint8_t)values[i]);\n    }\n\n    out_cursor++;\n    values_cursor += n_partial_bits;\n    n_remaining -= n_partial_bits;\n  }\n\n  // Middle bytes\n  int64_t n_full_bytes = n_remaining / 8;\n  for (int64_t i = 0; i < n_full_bytes; i++) {\n    _ArrowBitmapPackInt32(values_cursor, out_cursor);\n    values_cursor += 8;\n    out_cursor++;\n  }\n\n  // Last byte\n  out_i_cursor += n_full_bytes * 8;\n  n_remaining -= n_full_bytes * 8;\n  if (n_remaining > 0) {\n    // Zero out the last byte\n    *out_cursor = 0x00;\n    for (int i = 0; i < n_remaining; i++) {\n      ArrowBitSetTo(bitmap->buffer.data, out_i_cursor++, (uint8_t)values_cursor[i]);\n    }\n    out_cursor++;\n  }\n\n  bitmap->size_bits += n_values;\n  bitmap->buffer.size_bytes = out_cursor - bitmap->buffer.data;\n}\n\nstatic inline void ArrowBitmapReset(struct ArrowBitmap* bitmap) {\n  ArrowBufferReset(&bitmap->buffer);\n  bitmap->size_bits = 0;\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE file\n// distributed with this work for additional information\n// regarding copyright ownership.  The ASF licenses this file\n// to you under the Apache License, Version 2.0 (the\n// \"License\"); you may not use this file except in compliance\n// with the License.  You may obtain a copy of the License at\n//\n//   http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing,\n// software distributed under the License is distributed on an\n// \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, either express or implied.  See the License for the\n// specific language governing permissions and limitations\n// under the License.\n\n#ifndef NANOARROW_ARRAY_INLINE_H_INCLUDED\n#define NANOARROW_ARRAY_INLINE_H_INCLUDED\n\n#include <errno.h>\n#include <float.h>\n#include <limits.h>\n#include <stdint.h>\n#include <string.h>\n\n\n\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\nstatic inline struct ArrowBitmap* ArrowArrayValidityBitmap(struct ArrowArray* array) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  return &private_data->bitmap;\n}\n\nstatic inline struct ArrowBuffer* ArrowArrayBuffer(struct ArrowArray* array, int64_t i) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  switch (i) {\n    case 0:\n      return &private_data->bitmap.buffer;\n    case 1:\n      return private_data->buffers;\n    default:\n      if (array->n_buffers > 3 && i == (array->n_buffers - 1)) {\n        // The variadic buffer sizes buffer if for a BinaryView/String view array\n        // is always stored in private_data->buffers[1]; however, from the numbered\n        // buffers perspective this is the array->buffers[array->n_buffers - 1].\n        return private_data->buffers + 1;\n      } else if (array->n_buffers > 3) {\n        // If there are one or more variadic buffers, they are stored in\n        // private_data->variadic_buffers\n        return private_data->variadic_buffers + (i - 2);\n      } else {\n        // Otherwise, we're just accessing buffer at index 2 (e.g., String/Binary\n        // data buffer or variadic sizes buffer for the case where there are no\n        // variadic buffers)\n        NANOARROW_DCHECK(i == 2);\n        return private_data->buffers + i - 1;\n      }\n  }\n}\n\n// We don't currently support the case of unions where type_id != child_index;\n// however, these functions are used to keep track of where that assumption\n// is made.\nstatic inline int8_t _ArrowArrayUnionChildIndex(struct ArrowArray* array,\n                                                int8_t type_id) {\n  NANOARROW_UNUSED(array);\n  return type_id;\n}\n\nstatic inline int8_t _ArrowArrayUnionTypeId(struct ArrowArray* array,\n                                            int8_t child_index) {\n  NANOARROW_UNUSED(array);\n  return child_index;\n}\n\nstatic inline int32_t _ArrowParseUnionTypeIds(const char* type_ids, int8_t* out) {\n  if (*type_ids == '\\0') {\n    return 0;\n  }\n\n  int32_t i = 0;\n  long type_id;\n  char* end_ptr;\n  do {\n    type_id = strtol(type_ids, &end_ptr, 10);\n    if (end_ptr == type_ids || type_id < 0 || type_id > 127) {\n      return -1;\n    }\n\n    if (out != NULL) {\n      out[i] = (int8_t)type_id;\n    }\n\n    i++;\n\n    type_ids = end_ptr;\n    if (*type_ids == '\\0') {\n      return i;\n    } else if (*type_ids != ',') {\n      return -1;\n    } else {\n      type_ids++;\n    }\n  } while (1);\n\n  return -1;\n}\n\nstatic inline int8_t _ArrowParsedUnionTypeIdsWillEqualChildIndices(const int8_t* type_ids,\n                                                                   int64_t n_type_ids,\n                                                                   int64_t n_children) {\n  if (n_type_ids != n_children) {\n    return 0;\n  }\n\n  for (int8_t i = 0; i < n_type_ids; i++) {\n    if (type_ids[i] != i) {\n      return 0;\n    }\n  }\n\n  return 1;\n}\n\nstatic inline int8_t _ArrowUnionTypeIdsWillEqualChildIndices(const char* type_id_str,\n                                                             int64_t n_children) {\n  int8_t type_ids[128];\n  int32_t n_type_ids = _ArrowParseUnionTypeIds(type_id_str, type_ids);\n  return _ArrowParsedUnionTypeIdsWillEqualChildIndices(type_ids, n_type_ids, n_children);\n}\n\nstatic inline ArrowErrorCode ArrowArrayStartAppending(struct ArrowArray* array) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_UNINITIALIZED:\n      return EINVAL;\n    case NANOARROW_TYPE_SPARSE_UNION:\n    case NANOARROW_TYPE_DENSE_UNION:\n      // Note that this value could be -1 if the type_ids string was invalid\n      if (private_data->union_type_id_is_child_index != 1) {\n        return EINVAL;\n      } else {\n        break;\n      }\n    default:\n      break;\n  }\n  if (private_data->storage_type == NANOARROW_TYPE_UNINITIALIZED) {\n    return EINVAL;\n  }\n\n  // Initialize any data offset buffer with a single zero\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    if (private_data->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_DATA_OFFSET &&\n        private_data->layout.element_size_bits[i] == 64) {\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt64(ArrowArrayBuffer(array, i), 0));\n    } else if (private_data->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_DATA_OFFSET &&\n               private_data->layout.element_size_bits[i] == 32) {\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(ArrowArrayBuffer(array, i), 0));\n    }\n  }\n\n  // Start building any child arrays or dictionaries\n  for (int64_t i = 0; i < array->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayStartAppending(array->children[i]));\n  }\n\n  if (array->dictionary != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayStartAppending(array->dictionary));\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayShrinkToFit(struct ArrowArray* array) {\n  for (int64_t i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    struct ArrowBuffer* buffer = ArrowArrayBuffer(array, i);\n    NANOARROW_RETURN_NOT_OK(ArrowBufferResize(buffer, buffer->size_bytes, 1));\n  }\n\n  for (int64_t i = 0; i < array->n_children; i++) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayShrinkToFit(array->children[i]));\n  }\n\n  if (array->dictionary != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowArrayShrinkToFit(array->dictionary));\n  }\n\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode _ArrowArrayAppendBits(struct ArrowArray* array,\n                                                   int64_t buffer_i, uint8_t value,\n                                                   int64_t n) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  struct ArrowBuffer* buffer = ArrowArrayBuffer(array, buffer_i);\n  int64_t bytes_required =\n      _ArrowRoundUpToMultipleOf8(private_data->layout.element_size_bits[buffer_i] *\n                                 (array->length + 1)) /\n      8;\n  if (bytes_required > buffer->size_bytes) {\n    NANOARROW_RETURN_NOT_OK(\n        ArrowBufferAppendFill(buffer, 0, bytes_required - buffer->size_bytes));\n  }\n\n  ArrowBitsSetTo(buffer->data, array->length, n, value);\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode _ArrowArrayAppendEmptyInternal(struct ArrowArray* array,\n                                                            int64_t n, uint8_t is_valid) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  if (n == 0) {\n    return NANOARROW_OK;\n  }\n\n  // Some type-specific handling\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_NA:\n      // (An empty value for a null array *is* a null)\n      array->null_count += n;\n      array->length += n;\n      return NANOARROW_OK;\n\n    case NANOARROW_TYPE_DENSE_UNION: {\n      // Add one null to the first child and append n references to that child\n      int8_t type_id = _ArrowArrayUnionTypeId(array, 0);\n      NANOARROW_RETURN_NOT_OK(\n          _ArrowArrayAppendEmptyInternal(array->children[0], 1, is_valid));\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendFill(ArrowArrayBuffer(array, 0), type_id, n));\n      for (int64_t i = 0; i < n; i++) {\n        NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(\n            ArrowArrayBuffer(array, 1), (int32_t)array->children[0]->length - 1));\n      }\n      // For the purposes of array->null_count, union elements are never considered \"null\"\n      // even if some children contain nulls.\n      array->length += n;\n      return NANOARROW_OK;\n    }\n\n    case NANOARROW_TYPE_SPARSE_UNION: {\n      // Add n nulls to the first child and append n references to that child\n      int8_t type_id = _ArrowArrayUnionTypeId(array, 0);\n      NANOARROW_RETURN_NOT_OK(\n          _ArrowArrayAppendEmptyInternal(array->children[0], n, is_valid));\n      for (int64_t i = 1; i < array->n_children; i++) {\n        NANOARROW_RETURN_NOT_OK(ArrowArrayAppendEmpty(array->children[i], n));\n      }\n\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendFill(ArrowArrayBuffer(array, 0), type_id, n));\n      // For the purposes of array->null_count, union elements are never considered \"null\"\n      // even if some children contain nulls.\n      array->length += n;\n      return NANOARROW_OK;\n    }\n\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      NANOARROW_RETURN_NOT_OK(ArrowArrayAppendEmpty(\n          array->children[0], n * private_data->layout.child_size_elements));\n      break;\n    case NANOARROW_TYPE_STRUCT:\n      for (int64_t i = 0; i < array->n_children; i++) {\n        NANOARROW_RETURN_NOT_OK(ArrowArrayAppendEmpty(array->children[i], n));\n      }\n      break;\n\n    default:\n      break;\n  }\n\n  // Append n is_valid bits to the validity bitmap. If we haven't allocated a bitmap yet\n  // and we need to append nulls, do it now.\n  if (!is_valid && private_data->bitmap.buffer.data == NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapReserve(&private_data->bitmap, array->length + n));\n    ArrowBitmapAppendUnsafe(&private_data->bitmap, 1, array->length);\n    ArrowBitmapAppendUnsafe(&private_data->bitmap, is_valid, n);\n  } else if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapReserve(&private_data->bitmap, n));\n    ArrowBitmapAppendUnsafe(&private_data->bitmap, is_valid, n);\n  }\n\n  // Add appropriate buffer fill\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    struct ArrowBuffer* buffer = ArrowArrayBuffer(array, i);\n    int64_t size_bytes = private_data->layout.element_size_bits[i] / 8;\n\n    switch (private_data->layout.buffer_type[i]) {\n      case NANOARROW_BUFFER_TYPE_NONE:\n      case NANOARROW_BUFFER_TYPE_VARIADIC_DATA:\n      case NANOARROW_BUFFER_TYPE_VARIADIC_SIZE:\n      case NANOARROW_BUFFER_TYPE_VALIDITY:\n        // These buffer types don't require initialization for empty appends:\n        // - NONE: No buffer exists\n        // - VARIADIC_*: Handled by child arrays\n        // - VALIDITY: Already handled in previous bitmap logic\n        break;\n\n      case NANOARROW_BUFFER_TYPE_SIZE:\n        // Size buffers (e.g., string/array lengths) should be zero-initialized:\n        // This ensures empty elements have logical zero-length\n        NANOARROW_RETURN_NOT_OK(ArrowBufferAppendFill(buffer, 0, size_bytes * n));\n        break;\n\n      case NANOARROW_BUFFER_TYPE_DATA_OFFSET:\n        // Offset buffers require special handling to maintain continuity.\n        // 1. Reserve space for new offset entries\n        NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(buffer, size_bytes * n));\n\n        // 2. Duplicate last offset value for each new (empty) element\n        for (int64_t j = 0; j < n; j++) {\n          ArrowBufferAppendUnsafe(buffer, buffer->data + size_bytes * (array->length + j),\n                                  size_bytes);\n        }\n\n        // 3. Skip next buffer (DATA) since it's paired with offsets\n        //    Rationale: Offset buffers are always followed by data buffers\n        //    that don't require separate initialization here\n        i++;\n        break;\n\n      case NANOARROW_BUFFER_TYPE_DATA:\n        // Fixed-width data buffers require zero-initialization:\n        if (private_data->layout.element_size_bits[i] % 8 == 0) {\n          // Byte-aligned: use efficient memset-style fill\n          NANOARROW_RETURN_NOT_OK(ArrowBufferAppendFill(buffer, 0, size_bytes * n));\n        } else {\n          // Bit-packed: use special bitwise initialization\n          NANOARROW_RETURN_NOT_OK(_ArrowArrayAppendBits(array, i, 0, n));\n        }\n        break;\n\n      case NANOARROW_BUFFER_TYPE_VIEW_OFFSET:\n        // View offset buffers (for string/binary view types) require zero-initialization.\n        NANOARROW_RETURN_NOT_OK(ArrowBufferReserve(buffer, size_bytes * n));\n        NANOARROW_RETURN_NOT_OK(ArrowBufferAppendFill(buffer, 0, size_bytes * n));\n        break;\n\n      case NANOARROW_BUFFER_TYPE_TYPE_ID:\n      case NANOARROW_BUFFER_TYPE_UNION_OFFSET:\n        // These buffer types should have been handled by the outer type switch and\n        // are not expected here, indicating an internal logic error.\n        return EINVAL;\n    }\n  }\n\n  array->length += n;\n  array->null_count += n * !is_valid;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendNull(struct ArrowArray* array, int64_t n) {\n  return _ArrowArrayAppendEmptyInternal(array, n, 0);\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendEmpty(struct ArrowArray* array, int64_t n) {\n  return _ArrowArrayAppendEmptyInternal(array, n, 1);\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendInt(struct ArrowArray* array,\n                                                 int64_t value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  struct ArrowBuffer* data_buffer = ArrowArrayBuffer(array, 1);\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_INT64:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppend(data_buffer, &value, sizeof(int64_t)));\n      break;\n    case NANOARROW_TYPE_INT32:\n      _NANOARROW_CHECK_RANGE(value, INT32_MIN, INT32_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(data_buffer, (int32_t)value));\n      break;\n    case NANOARROW_TYPE_INT16:\n      _NANOARROW_CHECK_RANGE(value, INT16_MIN, INT16_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt16(data_buffer, (int16_t)value));\n      break;\n    case NANOARROW_TYPE_INT8:\n      _NANOARROW_CHECK_RANGE(value, INT8_MIN, INT8_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt8(data_buffer, (int8_t)value));\n      break;\n    case NANOARROW_TYPE_UINT64:\n    case NANOARROW_TYPE_UINT32:\n    case NANOARROW_TYPE_UINT16:\n    case NANOARROW_TYPE_UINT8:\n      _NANOARROW_CHECK_RANGE(value, 0, INT64_MAX);\n      return ArrowArrayAppendUInt(array, value);\n    case NANOARROW_TYPE_DOUBLE:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendDouble(data_buffer, (double)value));\n      break;\n    case NANOARROW_TYPE_FLOAT:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendFloat(data_buffer, (float)value));\n      break;\n    case NANOARROW_TYPE_HALF_FLOAT:\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendUInt16(data_buffer, ArrowFloatToHalfFloat((float)value)));\n      break;\n    case NANOARROW_TYPE_BOOL:\n      NANOARROW_RETURN_NOT_OK(_ArrowArrayAppendBits(array, 1, value != 0, 1));\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendUInt(struct ArrowArray* array,\n                                                  uint64_t value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  struct ArrowBuffer* data_buffer = ArrowArrayBuffer(array, 1);\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_UINT64:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppend(data_buffer, &value, sizeof(uint64_t)));\n      break;\n    case NANOARROW_TYPE_UINT32:\n      _NANOARROW_CHECK_UPPER_LIMIT(value, UINT32_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendUInt32(data_buffer, (uint32_t)value));\n      break;\n    case NANOARROW_TYPE_UINT16:\n      _NANOARROW_CHECK_UPPER_LIMIT(value, UINT16_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendUInt16(data_buffer, (uint16_t)value));\n      break;\n    case NANOARROW_TYPE_UINT8:\n      _NANOARROW_CHECK_UPPER_LIMIT(value, UINT8_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendUInt8(data_buffer, (uint8_t)value));\n      break;\n    case NANOARROW_TYPE_INT64:\n    case NANOARROW_TYPE_INT32:\n    case NANOARROW_TYPE_INT16:\n    case NANOARROW_TYPE_INT8:\n      _NANOARROW_CHECK_UPPER_LIMIT(value, INT64_MAX);\n      return ArrowArrayAppendInt(array, value);\n    case NANOARROW_TYPE_DOUBLE:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendDouble(data_buffer, (double)value));\n      break;\n    case NANOARROW_TYPE_FLOAT:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendFloat(data_buffer, (float)value));\n      break;\n    case NANOARROW_TYPE_HALF_FLOAT:\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendUInt16(data_buffer, ArrowFloatToHalfFloat((float)value)));\n      break;\n    case NANOARROW_TYPE_BOOL:\n      NANOARROW_RETURN_NOT_OK(_ArrowArrayAppendBits(array, 1, value != 0, 1));\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendDouble(struct ArrowArray* array,\n                                                    double value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  struct ArrowBuffer* data_buffer = ArrowArrayBuffer(array, 1);\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_DOUBLE:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppend(data_buffer, &value, sizeof(double)));\n      break;\n    case NANOARROW_TYPE_FLOAT:\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendFloat(data_buffer, (float)value));\n      break;\n    case NANOARROW_TYPE_HALF_FLOAT:\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendUInt16(data_buffer, ArrowFloatToHalfFloat((float)value)));\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\n// Binary views only have two fixed buffers, but be aware that they must also\n// always have more 1 buffer to store variadic buffer sizes (even if there are none)\n#define NANOARROW_BINARY_VIEW_FIXED_BUFFERS 2\n#define NANOARROW_BINARY_VIEW_INLINE_SIZE 12\n#define NANOARROW_BINARY_VIEW_PREFIX_SIZE 4\n#define NANOARROW_BINARY_VIEW_BLOCK_SIZE (32 << 10)  // 32KB\n\n// The Arrow C++ implementation uses anonymous structs as members\n// of the ArrowBinaryView. For Cython support in this library, we define\n// those structs outside of the ArrowBinaryView\nstruct ArrowBinaryViewInlined {\n  int32_t size;\n  uint8_t data[NANOARROW_BINARY_VIEW_INLINE_SIZE];\n};\n\nstruct ArrowBinaryViewRef {\n  int32_t size;\n  uint8_t prefix[NANOARROW_BINARY_VIEW_PREFIX_SIZE];\n  int32_t buffer_index;\n  int32_t offset;\n};\n\nunion ArrowBinaryView {\n  struct ArrowBinaryViewInlined inlined;\n  struct ArrowBinaryViewRef ref;\n  int64_t alignment_dummy;\n};\n\nstatic inline int32_t ArrowArrayVariadicBufferCount(struct ArrowArray* array) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  return private_data->n_variadic_buffers;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAddVariadicBuffers(struct ArrowArray* array,\n                                                          int32_t n_buffers) {\n  const int32_t n_current_bufs = ArrowArrayVariadicBufferCount(array);\n  const int32_t nvariadic_bufs_needed = n_current_bufs + n_buffers;\n\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  private_data->variadic_buffers = (struct ArrowBuffer*)ArrowRealloc(\n      private_data->variadic_buffers, sizeof(struct ArrowBuffer) * nvariadic_bufs_needed);\n  if (private_data->variadic_buffers == NULL) {\n    return ENOMEM;\n  }\n\n  private_data->n_variadic_buffers = nvariadic_bufs_needed;\n  array->n_buffers = NANOARROW_BINARY_VIEW_FIXED_BUFFERS + 1 + nvariadic_bufs_needed;\n\n  private_data->buffer_data = (const void**)ArrowRealloc(\n      private_data->buffer_data, array->n_buffers * sizeof(void*));\n\n  for (int32_t i = n_current_bufs; i < nvariadic_bufs_needed; i++) {\n    ArrowBufferInit(&private_data->variadic_buffers[i]);\n    private_data->buffer_data[NANOARROW_BINARY_VIEW_FIXED_BUFFERS + i] = NULL;\n  }\n\n  // Zero out memory for the final buffer (variadic sizes buffer we haven't built yet)\n  private_data->buffer_data[NANOARROW_BINARY_VIEW_FIXED_BUFFERS + nvariadic_bufs_needed] =\n      NULL;\n\n  // Ensure array->buffers points to a valid value\n  array->buffers = private_data->buffer_data;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendBytes(struct ArrowArray* array,\n                                                   struct ArrowBufferView value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  if (private_data->storage_type == NANOARROW_TYPE_STRING_VIEW ||\n      private_data->storage_type == NANOARROW_TYPE_BINARY_VIEW) {\n    struct ArrowBuffer* data_buffer = ArrowArrayBuffer(array, 1);\n    union ArrowBinaryView bvt;\n    bvt.inlined.size = (int32_t)value.size_bytes;\n\n    if (value.size_bytes <= NANOARROW_BINARY_VIEW_INLINE_SIZE) {\n      memcpy(bvt.inlined.data, value.data.as_char, value.size_bytes);\n      memset(bvt.inlined.data + bvt.inlined.size, 0,\n             NANOARROW_BINARY_VIEW_INLINE_SIZE - bvt.inlined.size);\n    } else {\n      int32_t current_n_vbufs = ArrowArrayVariadicBufferCount(array);\n      if (current_n_vbufs == 0 ||\n          private_data->variadic_buffers[current_n_vbufs - 1].size_bytes +\n                  value.size_bytes >\n              NANOARROW_BINARY_VIEW_BLOCK_SIZE) {\n        const int32_t additional_bufs_needed = 1;\n        NANOARROW_RETURN_NOT_OK(\n            ArrowArrayAddVariadicBuffers(array, additional_bufs_needed));\n        current_n_vbufs += additional_bufs_needed;\n      }\n\n      const int32_t buf_index = current_n_vbufs - 1;\n      struct ArrowBuffer* variadic_buf = &private_data->variadic_buffers[buf_index];\n      memcpy(bvt.ref.prefix, value.data.as_char, NANOARROW_BINARY_VIEW_PREFIX_SIZE);\n      bvt.ref.buffer_index = (int32_t)buf_index;\n      bvt.ref.offset = (int32_t)variadic_buf->size_bytes;\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppend(variadic_buf, value.data.as_char, value.size_bytes));\n    }\n    NANOARROW_RETURN_NOT_OK(ArrowBufferAppend(data_buffer, &bvt, sizeof(bvt)));\n  } else {\n    struct ArrowBuffer* offset_buffer = ArrowArrayBuffer(array, 1);\n    struct ArrowBuffer* data_buffer = ArrowArrayBuffer(\n        array, 1 + (private_data->storage_type != NANOARROW_TYPE_FIXED_SIZE_BINARY));\n    int32_t offset;\n    int64_t large_offset;\n    int64_t fixed_size_bytes = private_data->layout.element_size_bits[1] / 8;\n\n    switch (private_data->storage_type) {\n      case NANOARROW_TYPE_STRING:\n      case NANOARROW_TYPE_BINARY:\n        offset = ((int32_t*)offset_buffer->data)[array->length];\n        if ((((int64_t)offset) + value.size_bytes) > INT32_MAX) {\n          return EOVERFLOW;\n        }\n\n        offset += (int32_t)value.size_bytes;\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(offset_buffer, &offset, sizeof(int32_t)));\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value.data.data, value.size_bytes));\n        break;\n\n      case NANOARROW_TYPE_LARGE_STRING:\n      case NANOARROW_TYPE_LARGE_BINARY:\n        large_offset = ((int64_t*)offset_buffer->data)[array->length];\n        large_offset += value.size_bytes;\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(offset_buffer, &large_offset, sizeof(int64_t)));\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value.data.data, value.size_bytes));\n        break;\n\n      case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n        if (value.size_bytes != fixed_size_bytes) {\n          return EINVAL;\n        }\n\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value.data.data, value.size_bytes));\n        break;\n      default:\n        return EINVAL;\n    }\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendString(struct ArrowArray* array,\n                                                    struct ArrowStringView value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  struct ArrowBufferView buffer_view;\n  buffer_view.data.data = value.data;\n  buffer_view.size_bytes = value.size_bytes;\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_LARGE_STRING:\n    case NANOARROW_TYPE_STRING_VIEW:\n    case NANOARROW_TYPE_BINARY:\n    case NANOARROW_TYPE_LARGE_BINARY:\n    case NANOARROW_TYPE_BINARY_VIEW:\n      return ArrowArrayAppendBytes(array, buffer_view);\n    default:\n      return EINVAL;\n  }\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendInterval(struct ArrowArray* array,\n                                                      const struct ArrowInterval* value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  struct ArrowBuffer* data_buffer = ArrowArrayBuffer(array, 1);\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_INTERVAL_MONTHS: {\n      if (value->type != NANOARROW_TYPE_INTERVAL_MONTHS) {\n        return EINVAL;\n      }\n\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(data_buffer, value->months));\n      break;\n    }\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME: {\n      if (value->type != NANOARROW_TYPE_INTERVAL_DAY_TIME) {\n        return EINVAL;\n      }\n\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(data_buffer, value->days));\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(data_buffer, value->ms));\n      break;\n    }\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO: {\n      if (value->type != NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO) {\n        return EINVAL;\n      }\n\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(data_buffer, value->months));\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(data_buffer, value->days));\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt64(data_buffer, value->ns));\n      break;\n    }\n    default:\n      return EINVAL;\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayAppendDecimal(struct ArrowArray* array,\n                                                     const struct ArrowDecimal* value) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n  struct ArrowBuffer* data_buffer = ArrowArrayBuffer(array, 1);\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_DECIMAL32:\n      if (value->n_words != 0) {\n        return EINVAL;\n      } else {\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value->words, sizeof(uint32_t)));\n        break;\n      }\n    case NANOARROW_TYPE_DECIMAL64:\n      if (value->n_words != 1) {\n        return EINVAL;\n      } else {\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value->words, sizeof(uint64_t)));\n        break;\n      }\n    case NANOARROW_TYPE_DECIMAL128:\n      if (value->n_words != 2) {\n        return EINVAL;\n      } else {\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value->words, 2 * sizeof(uint64_t)));\n        break;\n      }\n    case NANOARROW_TYPE_DECIMAL256:\n      if (value->n_words != 4) {\n        return EINVAL;\n      } else {\n        NANOARROW_RETURN_NOT_OK(\n            ArrowBufferAppend(data_buffer, value->words, 4 * sizeof(uint64_t)));\n        break;\n      }\n    default:\n      return EINVAL;\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayFinishElement(struct ArrowArray* array) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  int64_t child_length;\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_MAP:\n      child_length = array->children[0]->length;\n      if (child_length > INT32_MAX) {\n        return EOVERFLOW;\n      }\n\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendInt32(ArrowArrayBuffer(array, 1), (int32_t)child_length));\n      break;\n    case NANOARROW_TYPE_LARGE_LIST:\n      child_length = array->children[0]->length;\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendInt64(ArrowArrayBuffer(array, 1), child_length));\n      break;\n    case NANOARROW_TYPE_FIXED_SIZE_LIST:\n      child_length = array->children[0]->length;\n      if (child_length !=\n          ((array->length + 1) * private_data->layout.child_size_elements)) {\n        return EINVAL;\n      }\n      break;\n    case NANOARROW_TYPE_LIST_VIEW: {\n      child_length = array->children[0]->length;\n      if (child_length > INT32_MAX) {\n        return EOVERFLOW;\n      }\n\n      const int32_t last_valid_offset = (int32_t)private_data->list_view_offset;\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendInt32(ArrowArrayBuffer(array, 1), last_valid_offset));\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(\n          ArrowArrayBuffer(array, 2), (int32_t)child_length - last_valid_offset));\n      private_data->list_view_offset = child_length;\n      break;\n    }\n    case NANOARROW_TYPE_LARGE_LIST_VIEW: {\n      child_length = array->children[0]->length;\n      const int64_t last_valid_offset = private_data->list_view_offset;\n      NANOARROW_RETURN_NOT_OK(\n          ArrowBufferAppendInt64(ArrowArrayBuffer(array, 1), last_valid_offset));\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt64(ArrowArrayBuffer(array, 2),\n                                                     child_length - last_valid_offset));\n      private_data->list_view_offset = child_length;\n      break;\n    }\n\n    case NANOARROW_TYPE_STRUCT:\n      for (int64_t i = 0; i < array->n_children; i++) {\n        child_length = array->children[i]->length;\n        if (child_length != (array->length + 1)) {\n          return EINVAL;\n        }\n      }\n      break;\n    default:\n      return EINVAL;\n  }\n\n  if (private_data->bitmap.buffer.data != NULL) {\n    NANOARROW_RETURN_NOT_OK(ArrowBitmapAppend(ArrowArrayValidityBitmap(array), 1, 1));\n  }\n\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline ArrowErrorCode ArrowArrayFinishUnionElement(struct ArrowArray* array,\n                                                          int8_t type_id) {\n  struct ArrowArrayPrivateData* private_data =\n      (struct ArrowArrayPrivateData*)array->private_data;\n\n  int64_t child_index = _ArrowArrayUnionChildIndex(array, type_id);\n  if (child_index < 0 || child_index >= array->n_children) {\n    return EINVAL;\n  }\n\n  switch (private_data->storage_type) {\n    case NANOARROW_TYPE_DENSE_UNION:\n      // Append the target child length to the union offsets buffer\n      _NANOARROW_CHECK_RANGE(array->children[child_index]->length, 0, INT32_MAX);\n      NANOARROW_RETURN_NOT_OK(ArrowBufferAppendInt32(\n          ArrowArrayBuffer(array, 1), (int32_t)array->children[child_index]->length - 1));\n      break;\n    case NANOARROW_TYPE_SPARSE_UNION:\n      // Append one empty to any non-target column that isn't already the right length\n      // or abort if appending a null will result in a column with invalid length\n      for (int64_t i = 0; i < array->n_children; i++) {\n        if (i == child_index || array->children[i]->length == (array->length + 1)) {\n          continue;\n        }\n\n        if (array->children[i]->length != array->length) {\n          return EINVAL;\n        }\n\n        NANOARROW_RETURN_NOT_OK(ArrowArrayAppendEmpty(array->children[i], 1));\n      }\n\n      break;\n    default:\n      return EINVAL;\n  }\n\n  // Write to the type_ids buffer\n  NANOARROW_RETURN_NOT_OK(\n      ArrowBufferAppendInt8(ArrowArrayBuffer(array, 0), (int8_t)type_id));\n  array->length++;\n  return NANOARROW_OK;\n}\n\nstatic inline void ArrowArrayViewMove(struct ArrowArrayView* src,\n                                      struct ArrowArrayView* dst) {\n  memcpy(dst, src, sizeof(struct ArrowArrayView));\n  ArrowArrayViewInitFromType(src, NANOARROW_TYPE_UNINITIALIZED);\n}\n\nstatic inline int64_t ArrowArrayViewGetNumBuffers(struct ArrowArrayView* array_view) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      return NANOARROW_BINARY_VIEW_FIXED_BUFFERS + array_view->n_variadic_buffers + 1;\n    default:\n      break;\n  }\n\n  int64_t n_buffers = 0;\n  for (int i = 0; i < NANOARROW_MAX_FIXED_BUFFERS; i++) {\n    if (array_view->layout.buffer_type[i] == NANOARROW_BUFFER_TYPE_NONE) {\n      break;\n    }\n\n    n_buffers++;\n  }\n\n  return n_buffers;\n}\n\nstatic inline struct ArrowBufferView ArrowArrayViewGetBufferView(\n    struct ArrowArrayView* array_view, int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      if (i < NANOARROW_BINARY_VIEW_FIXED_BUFFERS) {\n        return array_view->buffer_views[i];\n      } else if (i >=\n                 (array_view->n_variadic_buffers + NANOARROW_BINARY_VIEW_FIXED_BUFFERS)) {\n        struct ArrowBufferView view;\n        view.data.as_int64 = array_view->variadic_buffer_sizes;\n        view.size_bytes = array_view->n_variadic_buffers * sizeof(double);\n        return view;\n      } else {\n        struct ArrowBufferView view;\n        view.data.data =\n            array_view->variadic_buffers[i - NANOARROW_BINARY_VIEW_FIXED_BUFFERS];\n        view.size_bytes =\n            array_view->variadic_buffer_sizes[i - NANOARROW_BINARY_VIEW_FIXED_BUFFERS];\n        return view;\n      }\n    default:\n      // We need this check to avoid -Warray-bounds from complaining\n      if (i >= NANOARROW_MAX_FIXED_BUFFERS) {\n        struct ArrowBufferView view;\n        view.data.data = NULL;\n        view.size_bytes = 0;\n        return view;\n      } else {\n        return array_view->buffer_views[i];\n      }\n  }\n}\n\nenum ArrowBufferType ArrowArrayViewGetBufferType(struct ArrowArrayView* array_view,\n                                                 int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      if (i < NANOARROW_BINARY_VIEW_FIXED_BUFFERS) {\n        return array_view->layout.buffer_type[i];\n      } else if (i ==\n                 (array_view->n_variadic_buffers + NANOARROW_BINARY_VIEW_FIXED_BUFFERS)) {\n        return NANOARROW_BUFFER_TYPE_VARIADIC_SIZE;\n      } else {\n        return NANOARROW_BUFFER_TYPE_VARIADIC_DATA;\n      }\n    default:\n      // We need this check to avoid -Warray-bounds from complaining\n      if (i >= NANOARROW_MAX_FIXED_BUFFERS) {\n        return NANOARROW_BUFFER_TYPE_NONE;\n      } else {\n        return array_view->layout.buffer_type[i];\n      }\n  }\n}\n\nstatic inline enum ArrowType ArrowArrayViewGetBufferDataType(\n    struct ArrowArrayView* array_view, int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      if (i < NANOARROW_BINARY_VIEW_FIXED_BUFFERS) {\n        return array_view->layout.buffer_data_type[i];\n      } else if (i >=\n                 (array_view->n_variadic_buffers + NANOARROW_BINARY_VIEW_FIXED_BUFFERS)) {\n        return NANOARROW_TYPE_INT64;\n      } else if (array_view->storage_type == NANOARROW_TYPE_BINARY_VIEW) {\n        return NANOARROW_TYPE_BINARY;\n      } else {\n        return NANOARROW_TYPE_STRING;\n      }\n    default:\n      // We need this check to avoid -Warray-bounds from complaining\n      if (i >= NANOARROW_MAX_FIXED_BUFFERS) {\n        return NANOARROW_TYPE_UNINITIALIZED;\n      } else {\n        return array_view->layout.buffer_data_type[i];\n      }\n  }\n}\n\nstatic inline int64_t ArrowArrayViewGetBufferElementSizeBits(\n    struct ArrowArrayView* array_view, int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_BINARY_VIEW:\n    case NANOARROW_TYPE_STRING_VIEW:\n      if (i < NANOARROW_BINARY_VIEW_FIXED_BUFFERS) {\n        return array_view->layout.element_size_bits[i];\n      } else if (i >=\n                 (array_view->n_variadic_buffers + NANOARROW_BINARY_VIEW_FIXED_BUFFERS)) {\n        return sizeof(int64_t) * 8;\n      } else {\n        return 0;\n      }\n    default:\n      // We need this check to avoid -Warray-bounds from complaining\n      if (i >= NANOARROW_MAX_FIXED_BUFFERS) {\n        return 0;\n      } else {\n        return array_view->layout.element_size_bits[i];\n      }\n  }\n}\n\nstatic inline int8_t ArrowArrayViewIsNull(const struct ArrowArrayView* array_view,\n                                          int64_t i) {\n  const uint8_t* validity_buffer = array_view->buffer_views[0].data.as_uint8;\n  i += array_view->offset;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_NA:\n      return 0x01;\n    case NANOARROW_TYPE_DENSE_UNION:\n    case NANOARROW_TYPE_SPARSE_UNION:\n      // Unions are \"never null\" in Arrow land\n      return 0x00;\n    default:\n      return validity_buffer != NULL && !ArrowBitGet(validity_buffer, i);\n  }\n}\n\nstatic inline int64_t ArrowArrayViewComputeNullCount(\n    const struct ArrowArrayView* array_view) {\n  if (array_view->length == 0) {\n    return 0;\n  }\n\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_NA:\n      return array_view->length;\n    case NANOARROW_TYPE_DENSE_UNION:\n    case NANOARROW_TYPE_SPARSE_UNION:\n      // Unions are \"never null\" in Arrow land\n      return 0;\n    default:\n      break;\n  }\n\n  const uint8_t* validity_buffer = array_view->buffer_views[0].data.as_uint8;\n  if (validity_buffer == NULL) {\n    return 0;\n  }\n  return array_view->length -\n         ArrowBitCountSet(validity_buffer, array_view->offset, array_view->length);\n}\n\nstatic inline int8_t ArrowArrayViewUnionTypeId(const struct ArrowArrayView* array_view,\n                                               int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_DENSE_UNION:\n    case NANOARROW_TYPE_SPARSE_UNION:\n      return array_view->buffer_views[0].data.as_int8[array_view->offset + i];\n    default:\n      return -1;\n  }\n}\n\nstatic inline int8_t ArrowArrayViewUnionChildIndex(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  int8_t type_id = ArrowArrayViewUnionTypeId(array_view, i);\n  if (array_view->union_type_id_map == NULL) {\n    return type_id;\n  } else {\n    return array_view->union_type_id_map[type_id];\n  }\n}\n\nstatic inline int64_t ArrowArrayViewUnionChildOffset(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_DENSE_UNION:\n      return array_view->buffer_views[1].data.as_int32[array_view->offset + i];\n    case NANOARROW_TYPE_SPARSE_UNION:\n      return array_view->offset + i;\n    default:\n      return -1;\n  }\n}\n\nstatic inline int64_t ArrowArrayViewListChildOffset(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_LIST:\n    case NANOARROW_TYPE_MAP:\n    case NANOARROW_TYPE_LIST_VIEW:\n      return array_view->buffer_views[1].data.as_int32[i];\n    case NANOARROW_TYPE_LARGE_LIST:\n    case NANOARROW_TYPE_LARGE_LIST_VIEW:\n      return array_view->buffer_views[1].data.as_int64[i];\n    default:\n      return -1;\n  }\n}\n\nstatic struct ArrowBufferView ArrowArrayViewGetBytesFromViewArrayUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  const union ArrowBinaryView* bv = &array_view->buffer_views[1].data.as_binary_view[i];\n  struct ArrowBufferView out = {{NULL}, bv->inlined.size};\n  if (bv->inlined.size <= NANOARROW_BINARY_VIEW_INLINE_SIZE) {\n    out.data.as_uint8 = bv->inlined.data;\n    return out;\n  }\n\n  out.data.data = array_view->variadic_buffers[bv->ref.buffer_index];\n  out.data.as_uint8 += bv->ref.offset;\n  return out;\n}\n\nstatic inline int64_t ArrowArrayViewGetIntUnsafe(const struct ArrowArrayView* array_view,\n                                                 int64_t i) {\n  const struct ArrowBufferView* data_view = &array_view->buffer_views[1];\n  i += array_view->offset;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_INT64:\n      return data_view->data.as_int64[i];\n    case NANOARROW_TYPE_UINT64:\n      return data_view->data.as_uint64[i];\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n    case NANOARROW_TYPE_INT32:\n      return data_view->data.as_int32[i];\n    case NANOARROW_TYPE_UINT32:\n      return data_view->data.as_uint32[i];\n    case NANOARROW_TYPE_INT16:\n      return data_view->data.as_int16[i];\n    case NANOARROW_TYPE_UINT16:\n      return data_view->data.as_uint16[i];\n    case NANOARROW_TYPE_INT8:\n      return data_view->data.as_int8[i];\n    case NANOARROW_TYPE_UINT8:\n      return data_view->data.as_uint8[i];\n    case NANOARROW_TYPE_DOUBLE:\n      return (int64_t)data_view->data.as_double[i];\n    case NANOARROW_TYPE_FLOAT:\n      return (int64_t)data_view->data.as_float[i];\n    case NANOARROW_TYPE_HALF_FLOAT:\n      return (int64_t)ArrowHalfFloatToFloat(data_view->data.as_uint16[i]);\n    case NANOARROW_TYPE_BOOL:\n      return ArrowBitGet(data_view->data.as_uint8, i);\n    default:\n      return INT64_MAX;\n  }\n}\n\nstatic inline uint64_t ArrowArrayViewGetUIntUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  i += array_view->offset;\n  const struct ArrowBufferView* data_view = &array_view->buffer_views[1];\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_INT64:\n      return data_view->data.as_int64[i];\n    case NANOARROW_TYPE_UINT64:\n      return data_view->data.as_uint64[i];\n    case NANOARROW_TYPE_INTERVAL_MONTHS:\n    case NANOARROW_TYPE_INT32:\n      return data_view->data.as_int32[i];\n    case NANOARROW_TYPE_UINT32:\n      return data_view->data.as_uint32[i];\n    case NANOARROW_TYPE_INT16:\n      return data_view->data.as_int16[i];\n    case NANOARROW_TYPE_UINT16:\n      return data_view->data.as_uint16[i];\n    case NANOARROW_TYPE_INT8:\n      return data_view->data.as_int8[i];\n    case NANOARROW_TYPE_UINT8:\n      return data_view->data.as_uint8[i];\n    case NANOARROW_TYPE_DOUBLE:\n      return (uint64_t)data_view->data.as_double[i];\n    case NANOARROW_TYPE_FLOAT:\n      return (uint64_t)data_view->data.as_float[i];\n    case NANOARROW_TYPE_HALF_FLOAT:\n      return (uint64_t)ArrowHalfFloatToFloat(data_view->data.as_uint16[i]);\n    case NANOARROW_TYPE_BOOL:\n      return ArrowBitGet(data_view->data.as_uint8, i);\n    default:\n      return UINT64_MAX;\n  }\n}\n\nstatic inline double ArrowArrayViewGetDoubleUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  i += array_view->offset;\n  const struct ArrowBufferView* data_view = &array_view->buffer_views[1];\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_INT64:\n      return (double)data_view->data.as_int64[i];\n    case NANOARROW_TYPE_UINT64:\n      return (double)data_view->data.as_uint64[i];\n    case NANOARROW_TYPE_INT32:\n      return data_view->data.as_int32[i];\n    case NANOARROW_TYPE_UINT32:\n      return data_view->data.as_uint32[i];\n    case NANOARROW_TYPE_INT16:\n      return data_view->data.as_int16[i];\n    case NANOARROW_TYPE_UINT16:\n      return data_view->data.as_uint16[i];\n    case NANOARROW_TYPE_INT8:\n      return data_view->data.as_int8[i];\n    case NANOARROW_TYPE_UINT8:\n      return data_view->data.as_uint8[i];\n    case NANOARROW_TYPE_DOUBLE:\n      return data_view->data.as_double[i];\n    case NANOARROW_TYPE_FLOAT:\n      return data_view->data.as_float[i];\n    case NANOARROW_TYPE_HALF_FLOAT:\n      return ArrowHalfFloatToFloat(data_view->data.as_uint16[i]);\n    case NANOARROW_TYPE_BOOL:\n      return ArrowBitGet(data_view->data.as_uint8, i);\n    default:\n      return DBL_MAX;\n  }\n}\n\nstatic inline struct ArrowStringView ArrowArrayViewGetStringUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  i += array_view->offset;\n  const struct ArrowBufferView* offsets_view = &array_view->buffer_views[1];\n  const char* data_view = array_view->buffer_views[2].data.as_char;\n\n  struct ArrowStringView view;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_BINARY:\n      view.data = data_view + offsets_view->data.as_int32[i];\n      view.size_bytes =\n          (int64_t)offsets_view->data.as_int32[i + 1] - offsets_view->data.as_int32[i];\n      break;\n    case NANOARROW_TYPE_LARGE_STRING:\n    case NANOARROW_TYPE_LARGE_BINARY:\n      view.data = data_view + offsets_view->data.as_int64[i];\n      view.size_bytes =\n          offsets_view->data.as_int64[i + 1] - offsets_view->data.as_int64[i];\n      break;\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n      view.size_bytes = array_view->layout.element_size_bits[1] / 8;\n      view.data = array_view->buffer_views[1].data.as_char + (i * view.size_bytes);\n      break;\n    case NANOARROW_TYPE_STRING_VIEW:\n    case NANOARROW_TYPE_BINARY_VIEW: {\n      struct ArrowBufferView buf_view =\n          ArrowArrayViewGetBytesFromViewArrayUnsafe(array_view, i);\n      view.data = buf_view.data.as_char;\n      view.size_bytes = buf_view.size_bytes;\n      break;\n    }\n    default:\n      view.data = NULL;\n      view.size_bytes = 0;\n      break;\n  }\n\n  return view;\n}\n\nstatic inline struct ArrowBufferView ArrowArrayViewGetBytesUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i) {\n  i += array_view->offset;\n  const struct ArrowBufferView* offsets_view = &array_view->buffer_views[1];\n  const uint8_t* data_view = array_view->buffer_views[2].data.as_uint8;\n\n  struct ArrowBufferView view;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_STRING:\n    case NANOARROW_TYPE_BINARY:\n      view.size_bytes =\n          (int64_t)offsets_view->data.as_int32[i + 1] - offsets_view->data.as_int32[i];\n      view.data.as_uint8 = data_view + offsets_view->data.as_int32[i];\n      break;\n    case NANOARROW_TYPE_LARGE_STRING:\n    case NANOARROW_TYPE_LARGE_BINARY:\n      view.size_bytes =\n          offsets_view->data.as_int64[i + 1] - offsets_view->data.as_int64[i];\n      view.data.as_uint8 = data_view + offsets_view->data.as_int64[i];\n      break;\n    case NANOARROW_TYPE_FIXED_SIZE_BINARY:\n      view.size_bytes = array_view->layout.element_size_bits[1] / 8;\n      view.data.as_uint8 =\n          array_view->buffer_views[1].data.as_uint8 + (i * view.size_bytes);\n      break;\n    case NANOARROW_TYPE_STRING_VIEW:\n    case NANOARROW_TYPE_BINARY_VIEW:\n      view = ArrowArrayViewGetBytesFromViewArrayUnsafe(array_view, i);\n      break;\n    default:\n      view.data.data = NULL;\n      view.size_bytes = 0;\n      break;\n  }\n\n  return view;\n}\n\nstatic inline void ArrowArrayViewGetIntervalUnsafe(\n    const struct ArrowArrayView* array_view, int64_t i, struct ArrowInterval* out) {\n  const uint8_t* data_view = array_view->buffer_views[1].data.as_uint8;\n  const int64_t offset = array_view->offset;\n  const int64_t index = offset + i;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_INTERVAL_MONTHS: {\n      const size_t size = sizeof(int32_t);\n      memcpy(&out->months, data_view + index * size, sizeof(int32_t));\n      break;\n    }\n    case NANOARROW_TYPE_INTERVAL_DAY_TIME: {\n      const size_t size = sizeof(int32_t) + sizeof(int32_t);\n      memcpy(&out->days, data_view + index * size, sizeof(int32_t));\n      memcpy(&out->ms, data_view + index * size + 4, sizeof(int32_t));\n      break;\n    }\n    case NANOARROW_TYPE_INTERVAL_MONTH_DAY_NANO: {\n      const size_t size = sizeof(int32_t) + sizeof(int32_t) + sizeof(int64_t);\n      memcpy(&out->months, data_view + index * size, sizeof(int32_t));\n      memcpy(&out->days, data_view + index * size + 4, sizeof(int32_t));\n      memcpy(&out->ns, data_view + index * size + 8, sizeof(int64_t));\n      break;\n    }\n    default:\n      break;\n  }\n}\n\nstatic inline void ArrowArrayViewGetDecimalUnsafe(const struct ArrowArrayView* array_view,\n                                                  int64_t i, struct ArrowDecimal* out) {\n  i += array_view->offset;\n  const uint8_t* data_view = array_view->buffer_views[1].data.as_uint8;\n  switch (array_view->storage_type) {\n    case NANOARROW_TYPE_DECIMAL32:\n      ArrowDecimalSetBytes(out, data_view + (i * 4));\n      break;\n    case NANOARROW_TYPE_DECIMAL64:\n      ArrowDecimalSetBytes(out, data_view + (i * 8));\n      break;\n    case NANOARROW_TYPE_DECIMAL128:\n      ArrowDecimalSetBytes(out, data_view + (i * 16));\n      break;\n    case NANOARROW_TYPE_DECIMAL256:\n      ArrowDecimalSetBytes(out, data_view + (i * 32));\n      break;\n    default:\n      memset(out->words, 0, sizeof(out->words));\n      break;\n  }\n}\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n"
  },
  {
    "path": "data/lang/cpp/pool.cpp",
    "content": "#include <string>\n#include <iostream>\n#include <sstream>\n#include <functional>\n#include <vector>\n#include <algorithm>\n#include <stdexcept>\n#include <fstream>\n#include <system_error>\n#include <unordered_map>\n#include <sys/stat.h>\n#include <sys/mman.h>\n\n// needed for foreign interface\n#include <cstdlib>\n#include <cstdio>\n#include <cstdint>\n#include <unistd.h>\n\n#include <limits>\n#include <utility>\n\n\nusing namespace std;\n\nchar* g_tmpdir;\n\nuint8_t* foreign_call(const char* socket_filename, size_t mid, ...) __attribute__((sentinel));\n\n// AUTO include statements start\n// <<<BREAK>>>\n// AUTO include statements end\n\n// Proper linking of cppmorloc requires it be included AFTER the custom modules\n#include \"mlc_arrow.hpp\"\n#include \"cppmorloc.hpp\"\n\n#define PROPAGATE_ERROR(errmsg) \\\n    if(errmsg != NULL) { \\\n      char errmsg_buffer[MAX_ERRMSG_SIZE] = { 0 }; \\\n      snprintf(errmsg_buffer, MAX_ERRMSG_SIZE, \"Error C++ pool (%s:%d in %s):\\n%s\" , __FILE__, __LINE__, __func__, errmsg); \\\n      free(errmsg); \\\n      throw std::runtime_error(errmsg_buffer); \\\n    }\n\n#define PROPAGATE_FAIL_PACKET(errmsg) \\\n    if(errmsg != NULL){ \\\n        uint8_t* fail_packet_ = make_fail_packet(errmsg); \\\n        free(errmsg); \\\n        return fail_packet_; \\\n    }\n\n\n// AUTO serialization statements start\n// <<<BREAK>>>\n// AUTO serialization statements end\n\n\n\nstd::string interweave_strings(const std::vector<std::string>& first, const std::vector<std::string>& second)\n{\n    // Validate sizes - errors here indicate a bug in the morloc compiler\n    if (first.size() != second.size() + 1) {\n        throw std::invalid_argument(\"First list must have exactly 1 more element than second list\");\n    }\n\n    // Pre-calculate total size to avoid reallocations\n    size_t total_size = 0;\n    for (const auto& s : first) total_size += s.size();\n    for (const auto& s : second) total_size += s.size();\n\n    std::string result;\n    result.reserve(total_size);\n\n    // Interweave the strings\n    for (size_t i = 0; i < second.size(); ++i) {\n        result += first[i];\n        result += second[i];\n    }\n    result += first.back();  // Append the final element from first list\n\n    return result;\n}\n\n// Thread-local list of SHM pointers allocated by _put_value.\n// Freed after foreign_call returns (args consumed) or at next dispatch start\n// (result consumed by caller in the synchronous call that returned it).\nstruct ShmEntry { absptr_t ptr; Schema* schema; };\nthread_local std::vector<ShmEntry> _shm_tracker;\n\nstatic void _flush_shm_tracker() {\n    for (auto& e : _shm_tracker) {\n        char* err = NULL;\n        // Only do recursive sub-freeing if we have a schema and this is\n        // the last reference. NULL schema entries (from foreign_call result\n        // tracking) just decrement the refcount.\n        block_header_t* blk = (block_header_t*)((char*)e.ptr - sizeof(block_header_t));\n        if (e.schema && blk->reference_count <= 1) {\n            shfree_by_schema(e.ptr, e.schema, &err);\n            if (err) { free(err); err = NULL; }\n        }\n        shfree(e.ptr, &err);\n        if (err) { free(err); }\n    }\n    _shm_tracker.clear();\n}\n\n// Thread-local schema cache: avoids re-parsing the same schema strings\nSchema* get_cached_schema(const char* schema_str) {\n    thread_local std::unordered_map<std::string, Schema*> cache;\n    auto it = cache.find(schema_str);\n    if (it != cache.end()) return it->second;\n    Schema* schema = parse_schema_cpp(schema_str);\n    cache[schema_str] = schema;\n    return schema;\n}\n\n// Transforms a serialized value into a message ready for the socket\ntemplate <typename T>\nuint8_t* _put_value(const T& value, const std::string& schema_str) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n\n    if constexpr (std::is_same_v<T, mlc::ArrowTable>) {\n        // Arrow export: move table data into SHM, build packet.\n        // const_cast is safe here: the value is always a temporary from\n        // a manifold call, never a truly const object.\n        mlc::ArrowTable& tbl = const_cast<mlc::ArrowTable&>(value);\n        relptr_t relptr = tbl.move_to_shm();\n\n        uint8_t* packet = make_arrow_data_packet(relptr, schema);\n        if (!packet) { throw std::runtime_error(\"Failed to create arrow data packet\"); }\n\n        char* err = nullptr;\n        void* shm_ptr = rel2abs(relptr, &err);\n        if (err) { free(err); }\n        if (shm_ptr) { _shm_tracker.push_back({(absptr_t)shm_ptr, nullptr}); }\n        return packet;\n    } else {\n        // Arrow dispatch: if schema hint is \"arrow\", the C++ type must be mlc::ArrowTable\n        if (schema->hint && strcmp(schema->hint, \"arrow\") == 0) {\n            throw std::runtime_error(\"Arrow schema but C++ type is not mlc::ArrowTable\");\n        }\n\n        void* voidstar = nullptr;\n        try {\n            voidstar = toAnything(schema, value);\n            relptr_t relptr = abs2rel_cpp(voidstar);\n\n            char* errmsg = nullptr;\n            uint8_t* packet = make_data_packet_auto(voidstar, relptr, schema, &errmsg);\n            if (errmsg) {\n                shfree_cpp(voidstar);\n                PROPAGATE_ERROR(errmsg);\n            }\n\n            const morloc_packet_header_t* hdr = (const morloc_packet_header_t*)packet;\n            if (hdr->command.data.source == PACKET_SOURCE_RPTR) {\n                // SHM referenced by packet -- track for deferred cleanup\n                _shm_tracker.push_back({(absptr_t)voidstar, schema});\n            } else {\n                // Data inlined in packet -- free SHM immediately\n                char* free_err = NULL;\n                shfree_by_schema((absptr_t)voidstar, schema, &free_err);\n                if (free_err) { free(free_err); free_err = NULL; }\n                shfree((absptr_t)voidstar, &free_err);\n                if (free_err) { free(free_err); }\n            }\n            return packet;\n        } catch (...) {\n            if (voidstar) shfree_cpp(voidstar);\n            throw;\n        }\n    }\n}\n\n\n// Use a key to retrieve a value\ntemplate <typename T>\nT _get_value(const uint8_t* packet, const std::string& schema_str){\n    const morloc_packet_header_t* header = (const morloc_packet_header_t*)packet;\n    uint8_t source = header->command.data.source;\n    uint8_t format = header->command.data.format;\n\n    if constexpr (std::is_same_v<T, mlc::ArrowTable>) {\n        // Arrow import: packet -> arrow_from_shm -> ArrowTable\n        Schema* schema = get_cached_schema(schema_str.c_str());\n        char* errmsg = nullptr;\n        uint8_t* raw = get_morloc_data_packet_value(packet, schema, &errmsg);\n        if (errmsg) { PROPAGATE_ERROR(errmsg); }\n\n        const arrow_shm_header_t* hdr = (const arrow_shm_header_t*)raw;\n        struct ArrowSchema as;\n        struct ArrowArray aa;\n        char* aerr = nullptr;\n        arrow_from_shm(hdr, &as, &aa, &aerr);\n        if (aerr) { PROPAGATE_ERROR(aerr); }\n\n        char* ierr = nullptr;\n        shincref((absptr_t)raw, &ierr);\n        if (ierr) { free(ierr); }\n        _shm_tracker.push_back({(absptr_t)raw, nullptr});\n\n        return mlc::ArrowTable(std::move(as), std::move(aa));\n    } else {\n        if (format == PACKET_FORMAT_ARROW) {\n            throw std::runtime_error(\"Arrow data but C++ type is not mlc::ArrowTable\");\n        }\n\n        Schema* schema = get_cached_schema(schema_str.c_str());\n\n        // Fast path: inline voidstar -- read directly from packet, no SHM needed\n        if (source == PACKET_SOURCE_MESG && format == PACKET_FORMAT_VOIDSTAR) {\n            const uint8_t* payload = packet + sizeof(morloc_packet_header_t) + header->offset;\n            T* dummy = nullptr;\n            return fromAnything(schema, (const void*)payload, dummy, (const void*)payload);\n        }\n\n        // SHM paths (RPTR or MESG+MSGPACK): existing logic\n        bool is_rptr = (source == PACKET_SOURCE_RPTR);\n\n        char* errmsg = NULL;\n        uint8_t* voidstar = get_morloc_data_packet_value(packet, schema, &errmsg);\n        if(errmsg != NULL) {\n            PROPAGATE_ERROR(errmsg)\n        }\n\n        // For RPTR data, increment refcount so the owner's tracker flush\n        // won't destroy data we may still need (e.g. forwarded packets).\n        if (is_rptr) {\n            char* incref_err = NULL;\n            shincref((absptr_t)voidstar, &incref_err);\n            if (incref_err) { free(incref_err); }\n            _shm_tracker.push_back({(absptr_t)voidstar, schema});\n        }\n\n        T* dummy = nullptr;\n        return fromAnything(schema, (void*)voidstar, dummy);\n    }\n}\n\n\n// Hash a value, returning a 16-char hex string\ntemplate <typename T>\nstd::string _mlc_hash(const T& value, const std::string& schema_str) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    void* voidstar = toAnything(schema, value);\n    char* errmsg = NULL;\n    char* hex = mlc_hash(voidstar, schema, &errmsg);\n    shfree_cpp(voidstar);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n    std::string result(hex);\n    free(hex);\n    return result;\n}\n\n// Save a value to file in msgpack format\ntemplate <typename T>\nvoid _mlc_save(const T& value, const std::string& schema_str, const std::string& path) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    void* voidstar = toAnything(schema, value);\n    char* errmsg = NULL;\n    mlc_save(voidstar, schema, path.c_str(), &errmsg);\n    shfree_cpp(voidstar);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n}\n\n// Save a value to file in flat voidstar binary format\ntemplate <typename T>\nvoid _mlc_save_voidstar(const T& value, const std::string& schema_str, const std::string& path) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    void* voidstar = toAnything(schema, value);\n    char* errmsg = NULL;\n    mlc_save_voidstar(voidstar, schema, path.c_str(), &errmsg);\n    shfree_cpp(voidstar);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n}\n\n// Save a value to file in JSON format\ntemplate <typename T>\nvoid _mlc_save_json(const T& value, const std::string& schema_str, const std::string& path) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    void* voidstar = toAnything(schema, value);\n    char* errmsg = NULL;\n    mlc_save_json(voidstar, schema, path.c_str(), &errmsg);\n    shfree_cpp(voidstar);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n}\n\n// Serialize a value to a JSON string\ntemplate <typename T>\nstd::string _mlc_show(const T& value, const std::string& schema_str) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    void* voidstar = toAnything(schema, value);\n    char* errmsg = NULL;\n    char* json = mlc_show(voidstar, schema, &errmsg);\n    shfree_cpp(voidstar);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n    std::string result(json);\n    free(json);\n    return result;\n}\n\n// Deserialize a JSON string to a typed value\n// Returns std::nullopt on parse failure\ntemplate <typename T>\nstd::optional<T> _mlc_read(const std::string& schema_str, const std::string& json_str) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    char* errmsg = NULL;\n    void* voidstar = mlc_read(json_str.c_str(), schema, &errmsg);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n    if (voidstar == NULL) {\n        return std::nullopt;\n    }\n    T* dummy = nullptr;\n    T result = fromAnything(schema, voidstar, dummy);\n    shfree_cpp(voidstar);\n    return result;\n}\n\n// Load a value from file, auto-detecting format\n// Returns std::nullopt if file does not exist\ntemplate <typename T>\nstd::optional<T> _mlc_load(const std::string& schema_str, const std::string& path) {\n    Schema* schema = get_cached_schema(schema_str.c_str());\n    char* errmsg = NULL;\n    void* voidstar = mlc_load(path.c_str(), schema, &errmsg);\n    if (errmsg != NULL) {\n        PROPAGATE_ERROR(errmsg)\n    }\n    if (voidstar == NULL) {\n        return std::nullopt;\n    }\n    T* dummy = nullptr;\n    T result = fromAnything(schema, voidstar, dummy);\n    shfree_cpp(voidstar);\n    return result;\n}\n\nuint8_t* foreign_call(const char* socket_filename, size_t mid, ...) {\n    char* errmsg = NULL;\n    va_list args;\n    size_t nargs = 0;\n\n    char socket_path[128];\n    snprintf(socket_path, sizeof(socket_path), \"%s/%s\", g_tmpdir, socket_filename);\n\n    // Count arguments (must be NULL-terminated)\n    va_start(args, mid);\n    while (va_arg(args, uint8_t*) != NULL) nargs++;\n    va_end(args);\n\n    // Allocate and populate args array\n    const uint8_t** args_array = (const uint8_t**)malloc((nargs + 1) * sizeof(uint8_t*));\n    if (!args_array) throw std::runtime_error(\"malloc failed in foreign_call\");\n\n    va_start(args, mid);\n    for (size_t i = 0; i < nargs; i++) {\n        args_array[i] = va_arg(args, uint8_t*);\n    }\n    args_array[nargs] = NULL;  // Sentinel\n    va_end(args);\n\n    // Original logic with variadic args converted to array\n    uint8_t* packet = make_morloc_local_call_packet((uint32_t)mid, args_array, nargs, &errmsg);\n    if (errmsg != NULL) {\n        free(args_array);\n        PROPAGATE_ERROR(errmsg)\n    }\n\n    pool_mark_busy();\n    uint8_t* result = send_and_receive_over_socket(socket_path, packet, &errmsg);\n    pool_mark_idle();\n\n    free(packet);\n\n    if (errmsg != NULL) {\n        free(args_array);\n        PROPAGATE_ERROR(errmsg)\n    }\n\n    // Incref the result's SHM so the callee's tracker flush won't destroy\n    // data we may still need (e.g. forwarded result packets).\n    {\n        const morloc_packet_header_t* res_header = (const morloc_packet_header_t*)result;\n        if (res_header->command.data.source == PACKET_SOURCE_RPTR) {\n            size_t relptr = *(size_t*)(result + res_header->offset + sizeof(morloc_packet_header_t));\n            char* resolve_err = NULL;\n            void* res_voidstar = rel2abs(relptr, &resolve_err);\n            if (resolve_err) { free(resolve_err); resolve_err = NULL; }\n            if (res_voidstar) {\n                char* incref_err = NULL;\n                shincref((absptr_t)res_voidstar, &incref_err);\n                if (incref_err) { free(incref_err); }\n                _shm_tracker.push_back({(absptr_t)res_voidstar, nullptr});\n            }\n        }\n    }\n\n    free(args_array);\n    return result;\n}\n\n\n\n// AUTO signatures statements start\n// <<<BREAK>>>\n// AUTO signatures statements end\n\n\n\n// AUTO manifolds statements start\n// <<<BREAK>>>\n// AUTO manifolds statements end\n\n\n\n// AUTO dispatch start\n// <<<BREAK>>>\n// AUTO dispatch end\n\n\n// Wrappers to adapt compiler-generated dispatch functions to pool_dispatch_fn_t.\n// These catch C++ exceptions so the C pool_main never sees them.\nstatic uint8_t* cpp_local_dispatch(uint32_t mid, const uint8_t** args,\n                                    size_t nargs, void* ctx) {\n    (void)nargs; (void)ctx;\n    // Free SHM from previous dispatch (result packet consumed by caller)\n    _flush_shm_tracker();\n    try {\n        return local_dispatch(mid, args);\n    } catch (const std::exception& e) {\n        return make_fail_packet(e.what());\n    } catch (...) {\n        return make_fail_packet(\"An unknown error occurred\");\n    }\n}\n\nstatic uint8_t* cpp_remote_dispatch(uint32_t mid, const uint8_t** args,\n                                     size_t nargs, void* ctx) {\n    (void)nargs; (void)ctx;\n    try {\n        return remote_dispatch(mid, args);\n    } catch (const std::exception& e) {\n        return make_fail_packet(e.what());\n    } catch (...) {\n        return make_fail_packet(\"An unknown error occurred\");\n    }\n}\n\n\nint main(int argc, char* argv[]) {\n    // Line-buffer stderr so diagnostic output is not lost on pool shutdown.\n    // stdout is left fully buffered for performance (genome-scale piping)\n    // and flushed after each job by pool.c.\n    setvbuf(stderr, NULL, _IOLBF, 0);\n\n    // Health check: confirm binary links and print version\n    if (argc == 2 && std::string(argv[1]) == \"--health\") {\n        std::cout << \"{\\\"status\\\":\\\"ok\\\",\\\"version\\\":\\\"__MORLOC_VERSION__\\\"}\" << std::endl;\n        return 0;\n    }\n\n    if (argc != 4) {\n        std::cerr << \"Usage: \" << argv[0] << \" <socket_path> <tmpdir> <shm_basename>\\n\";\n        return 1;\n    }\n\n    g_tmpdir = strdup(argv[2]);\n\n    pool_config_t config = {};\n    config.local_dispatch = cpp_local_dispatch;\n    config.remote_dispatch = cpp_remote_dispatch;\n    config.dispatch_ctx = NULL;\n    config.concurrency = POOL_THREADS;\n    config.initial_workers = 1;\n    config.dynamic_scaling = true;\n\n    int result = pool_main(argc, argv, &config);\n\n    free(g_tmpdir);\n    return result;\n}\n"
  },
  {
    "path": "data/lang/julia/MorlocRuntime.jl",
    "content": "\"\"\"\n    MorlocRuntime\n\nJulia runtime module for morloc. Provides IPC (daemon lifecycle, packet I/O),\nmsgpack-based serialization, and foreign call support.\n\nAll heavy lifting is done by libmorloc via the thin C bridge (libjuliamorloc.so).\nJulia handles only the msgpack <-> native type conversion using MsgPack.jl.\n\"\"\"\nmodule MorlocRuntime\n\nusing MsgPack\n\n# Path to the bridge shared library (set during morloc init)\nconst LIB_PATH = Ref{String}(\"\")\n\nfunction __init__()\n    # Look for libjuliamorloc.so relative to this file, or in standard locations\n    candidates = [\n        joinpath(dirname(@__FILE__), \"libjuliamorloc.so\"),\n        joinpath(dirname(@__FILE__), \"..\", \"lib\", \"libjuliamorloc.so\"),\n    ]\n    # Also check the morloc home lib directory\n    morloc_home = get(ENV, \"MORLOC_HOME\", joinpath(homedir(), \".local\", \"share\", \"morloc\"))\n    push!(candidates, joinpath(morloc_home, \"lib\", \"libjuliamorloc.so\"))\n\n    for path in candidates\n        if isfile(path)\n            LIB_PATH[] = path\n            return\n        end\n    end\n    error(\"Cannot find libjuliamorloc.so. Run `morloc init` first.\")\nend\n\nlib() = LIB_PATH[]\n\n# -- Error handling --\n\nfunction check_error(context::String)\n    msg = unsafe_string(ccall((:jlmorloc_last_error, lib()), Cstring, ()))\n    if !isempty(msg)\n        error(\"$context: $msg\")\n    end\nend\n\n# -- Daemon lifecycle --\n\nfunction start_daemon(socket_path::String, tmpdir::String,\n                      shm_basename::String, shm_size::Integer)\n    ptr = ccall((:jlmorloc_start_daemon, lib()), Ptr{Nothing},\n                (Cstring, Cstring, Cstring, Csize_t),\n                socket_path, tmpdir, shm_basename, UInt(shm_size))\n    ptr == C_NULL && check_error(\"start_daemon\")\n    return ptr\nend\n\nfunction close_daemon(daemon::Ptr{Nothing})\n    ccall((:jlmorloc_close_daemon, lib()), Nothing, (Ptr{Nothing},), daemon)\nend\n\nfunction wait_for_client(daemon::Ptr{Nothing})\n    fd = ccall((:jlmorloc_wait_for_client, lib()), Cint, (Ptr{Nothing},), daemon)\n    fd < 0 && check_error(\"wait_for_client\")\n    return fd\nend\n\n# -- Packet I/O --\n\nfunction stream_from_client(client_fd)\n    out_size = Ref{Csize_t}(0)\n    ptr = ccall((:jlmorloc_stream_from_client, lib()), Ptr{UInt8},\n                (Cint, Ref{Csize_t}), Int32(client_fd), out_size)\n    ptr == C_NULL && check_error(\"stream_from_client\")\n    return ptr  # opaque packet pointer\nend\n\nfunction send_packet_to_foreign_server(client_fd, packet::Ptr{UInt8})\n    rc = ccall((:jlmorloc_send_packet, lib()), Cint,\n               (Cint, Ptr{UInt8}), Int32(client_fd), packet)\n    rc != 0 && check_error(\"send_packet\")\nend\n\nfunction close_socket(fd)\n    ccall((:jlmorloc_close_socket, lib()), Nothing, (Cint,), Int32(fd))\nend\n\n# -- Packet classification --\n\nfunction is_ping(packet::Ptr{UInt8})\n    ccall((:jlmorloc_is_ping, lib()), Cint, (Ptr{UInt8},), packet) != 0\nend\n\nfunction is_local_call(packet::Ptr{UInt8})\n    ccall((:jlmorloc_is_local_call, lib()), Cint, (Ptr{UInt8},), packet) != 0\nend\n\nfunction is_remote_call(packet::Ptr{UInt8})\n    ccall((:jlmorloc_is_remote_call, lib()), Cint, (Ptr{UInt8},), packet) != 0\nend\n\nfunction pong(packet::Ptr{UInt8})\n    result = ccall((:jlmorloc_pong, lib()), Ptr{UInt8}, (Ptr{UInt8},), packet)\n    result == C_NULL && check_error(\"pong\")\n    return result\nend\n\n# -- Call packet parsing --\n\n\"\"\"\n    read_morloc_call_packet(packet) -> (mid, args)\n\nParse a call packet into a manifold index and a vector of argument packets.\n\"\"\"\nfunction read_morloc_call_packet(packet::Ptr{UInt8})\n    out_mid = Ref{UInt32}(0)\n    out_nargs = Ref{Csize_t}(0)\n    call_ptr = ccall((:jlmorloc_read_call, lib()), Ptr{Nothing},\n                     (Ptr{UInt8}, Ref{UInt32}, Ref{Csize_t}),\n                     packet, out_mid, out_nargs)\n    call_ptr == C_NULL && check_error(\"read_call\")\n\n    mid = Int(out_mid[])\n    nargs = Int(out_nargs[])\n    args = Vector{Ptr{UInt8}}(undef, nargs)\n    for i in 1:nargs\n        args[i] = ccall((:jlmorloc_call_arg, lib()), Ptr{UInt8},\n                        (Ptr{Nothing}, Csize_t), call_ptr, UInt(i - 1))\n    end\n\n    ccall((:jlmorloc_free_call, lib()), Nothing, (Ptr{Nothing},), call_ptr)\n    return (mid, args)\nend\n\n# -- Msgpack bridge: serialize/deserialize --\n\n\"\"\"\nStrip the `<TypeName>` prefix from schema strings like `\"<Int64>i4\"` -> `\"i4\"`.\n\"\"\"\nfunction strip_schema_prefix(schema_str::String)\n    if !isempty(schema_str) && schema_str[1] == '<'\n        i = findfirst('>', schema_str)\n        if i !== nothing\n            return schema_str[i+1:end]\n        end\n    end\n    return schema_str\nend\n\n\"\"\"\n    put_value(value, schema_str) -> Ptr{UInt8}\n\nSerialize a Julia value to a morloc data packet via msgpack.\n\"\"\"\nfunction put_value(value, schema_str::String)\n    schema = strip_schema_prefix(schema_str)\n    mpk = MsgPack.pack(to_msgpack(value, schema))\n    pkt = ccall((:jlmorloc_pack, lib()), Ptr{UInt8},\n                (Ptr{UInt8}, Csize_t, Cstring),\n                mpk, length(mpk), schema)\n    pkt == C_NULL && check_error(\"pack\")\n    return pkt\nend\n\n\"\"\"\n    get_value(packet, schema_str) -> Julia value\n\nDeserialize a morloc data packet to a Julia value via msgpack.\n\"\"\"\nfunction get_value(packet::Ptr{UInt8}, schema_str::String)\n    schema = strip_schema_prefix(schema_str)\n    out_size = Ref{Csize_t}(0)\n    mpk_ptr = ccall((:jlmorloc_unpack, lib()), Ptr{UInt8},\n                    (Ptr{UInt8}, Cstring, Ref{Csize_t}),\n                    packet, schema, out_size)\n    mpk_ptr == C_NULL && check_error(\"unpack\")\n\n    mpk_bytes = unsafe_wrap(Array, mpk_ptr, out_size[]; own=true)\n    raw = MsgPack.unpack(mpk_bytes)\n    return from_msgpack(raw, schema)\nend\n\n# -- Error packet --\n\nfunction make_fail_packet(msg::String)\n    ccall((:jlmorloc_make_fail_packet, lib()), Ptr{UInt8}, (Cstring,), msg)\nend\n\n# -- Foreign call --\n\n\"\"\"\n    foreign_call(tmpdir, socket_name, mid, args) -> Ptr{UInt8}\n\nCall another pool (cross-language IPC). args is a vector of packet pointers.\n\"\"\"\nfunction foreign_call(tmpdir::String, socket_name::String,\n                      mid::Integer, args::Vector{Ptr{UInt8}})\n    nargs = length(args)\n    result = ccall((:jlmorloc_foreign_call, lib()), Ptr{UInt8},\n                   (Cstring, Cstring, UInt32, Ptr{Ptr{UInt8}}, Csize_t),\n                   tmpdir, socket_name, UInt32(mid), args, UInt(nargs))\n    result == C_NULL && check_error(\"foreign_call\")\n    return result\nend\n\n# -- Type conversion helpers --\n\n# Schema string format:\n#   \"b\" = bool, \"i4\" = int32, \"i8\" = int64, \"f8\" = float64, \"s\" = string\n#   \"ai4\" = array of int32, \"t(i4f8s)\" = tuple\n#   \"m{name:s,age:i4}\" = record\n\n\"\"\"\nConvert a Julia value to a msgpack-friendly representation based on schema.\nMsgPack.jl handles most types natively, but we need to ensure correct types\nfor the schema (e.g., Int32 vs Int64).\n\"\"\"\nfunction to_msgpack(value, schema::String)\n    if startswith(schema, \"a\")\n        elem_schema = schema[2:end]\n        return [to_msgpack(v, elem_schema) for v in value]\n    elseif startswith(schema, \"t(\")\n        inner = schema[3:end-1]\n        schemas = split_tuple_schema(inner)\n        return [to_msgpack(value[i], schemas[i]) for i in 1:length(schemas)]\n    elseif startswith(schema, \"m{\")\n        inner = schema[3:end-1]\n        fields = split_record_schema(inner)\n        return Dict(k => to_msgpack(value[k], s) for (k, s) in fields)\n    else\n        return to_msgpack_scalar(value, schema)\n    end\nend\n\nfunction to_msgpack_scalar(value, schema::String)\n    if schema == \"b\"\n        return Bool(value)\n    elseif schema == \"i4\"\n        return Int32(value)\n    elseif schema == \"i8\"\n        return Int64(value)\n    elseif schema == \"u4\"\n        return UInt32(value)\n    elseif schema == \"u8\"\n        return UInt64(value)\n    elseif schema == \"f4\"\n        return Float32(value)\n    elseif schema == \"f8\"\n        return Float64(value)\n    elseif schema == \"s\"\n        return String(value)\n    elseif schema == \"u\"\n        return nothing\n    else\n        return value\n    end\nend\n\n\"\"\"\nConvert a raw msgpack value to a Julia type based on schema.\n\"\"\"\nfunction from_msgpack(raw, schema::String)\n    if startswith(schema, \"a\")\n        elem_schema = schema[2:end]\n        return [from_msgpack(v, elem_schema) for v in raw]\n    elseif startswith(schema, \"t(\")\n        inner = schema[3:end-1]\n        schemas = split_tuple_schema(inner)\n        return Tuple(from_msgpack(raw[i], schemas[i]) for i in 1:length(schemas))\n    elseif startswith(schema, \"m{\")\n        inner = schema[3:end-1]\n        fields = split_record_schema(inner)\n        return Dict(k => from_msgpack(raw[k], s) for (k, s) in fields)\n    else\n        return from_msgpack_scalar(raw, schema)\n    end\nend\n\nfunction from_msgpack_scalar(raw, schema::String)\n    if schema == \"b\"\n        return Bool(raw)\n    elseif schema == \"i4\"\n        return Int32(raw)\n    elseif schema == \"i8\"\n        return Int64(raw)\n    elseif schema == \"u4\"\n        return UInt32(raw)\n    elseif schema == \"u8\"\n        return UInt64(raw)\n    elseif schema == \"f4\"\n        return Float32(raw)\n    elseif schema == \"f8\"\n        return Float64(raw)\n    elseif schema == \"s\"\n        return String(raw)\n    elseif schema == \"u\"\n        return nothing\n    else\n        return raw\n    end\nend\n\n# -- Schema parsing helpers --\n\nfunction split_tuple_schema(inner::String)\n    schemas = String[]\n    i = 1\n    while i <= length(inner)\n        s, i = parse_one_schema(inner, i)\n        push!(schemas, s)\n    end\n    return schemas\nend\n\nfunction split_record_schema(inner::String)\n    fields = Pair{String,String}[]\n    i = 1\n    while i <= length(inner)\n        # parse field name\n        colon = findnext(':', inner, i)\n        name = inner[i:colon-1]\n        i = colon + 1\n        # parse field schema\n        s, i = parse_one_schema(inner, i)\n        push!(fields, name => s)\n        if i <= length(inner) && inner[i] == ','\n            i += 1\n        end\n    end\n    return fields\nend\n\nfunction parse_one_schema(s::String, i::Int)\n    if s[i] == 'a'\n        inner, next_i = parse_one_schema(s, i + 1)\n        return \"a\" * inner, next_i\n    elseif s[i] == 't'\n        # find matching ')'\n        depth = 0\n        j = i + 1\n        while j <= length(s)\n            if s[j] == '('; depth += 1; end\n            if s[j] == ')'; depth -= 1; if depth == 0; break; end; end\n            j += 1\n        end\n        return s[i:j], j + 1\n    elseif s[i] == 'm'\n        depth = 0\n        j = i + 1\n        while j <= length(s)\n            if s[j] == '{'; depth += 1; end\n            if s[j] == '}'; depth -= 1; if depth == 0; break; end; end\n            j += 1\n        end\n        return s[i:j], j + 1\n    elseif s[i] in ('i', 'u', 'f')\n        # numeric: i4, i8, u4, u8, f4, f8\n        return s[i:i+1], i + 2\n    elseif s[i] == 's'\n        return \"s\", i + 1\n    elseif s[i] == 'b'\n        return \"b\", i + 1\n    else\n        error(\"Unknown schema character: $(s[i]) at position $i in '$s'\")\n    end\nend\n\nend # module\n"
  },
  {
    "path": "data/lang/julia/init.sh",
    "content": "#!/bin/bash\nset -e\n\nMORLOC_HOME=\"$1\"\nBUILD_DIR=\"$2\"\nSANITIZE_FLAGS=\"$3\"\nINCLUDE_DIR=\"$MORLOC_HOME/include\"\nLIB_DIR=\"$MORLOC_HOME/lib\"\nLANG_DIR=\"$MORLOC_HOME/lang/julia\"\n\nmkdir -p \"$LANG_DIR\"\n\n# Install language descriptor and runtime files\ncp \"$BUILD_DIR/lang.yaml\" \"$LANG_DIR/\"\ncp \"$BUILD_DIR/pool.jl\" \"$LANG_DIR/\"\ncp \"$BUILD_DIR/MorlocRuntime.jl\" \"$LANG_DIR/\"\n\n# Compile juliabridge.c -> libjuliamorloc.so\ngcc -shared -fPIC -O2 $SANITIZE_FLAGS -I\"$INCLUDE_DIR\" -o \"$LIB_DIR/libjuliamorloc.so\" \\\n    \"$BUILD_DIR/juliabridge.c\" -L\"$LIB_DIR\" -Wl,-rpath,\"$LIB_DIR\" -lmorloc -lpthread\n"
  },
  {
    "path": "data/lang/julia/juliabridge.c",
    "content": "/* juliabridge.c -- Thin C bridge between Julia and libmorloc.\n *\n * Compiled to libjuliamorloc.so, called from Julia via ccall.\n * Wraps libmorloc functions that use opaque structs (language_daemon_t,\n * morloc_call_t, Schema) or the ERRMSG pattern into simple pointer/int\n * interfaces that Julia's FFI can handle directly.\n */\n\n#include \"morloc.h\"\n#include <stdlib.h>\n#include <string.h>\n#include <stdio.h>\n\n/* Thread-local error message buffer */\nstatic __thread char jl_errbuf[4096];\nstatic __thread char* jl_errmsg = NULL;\n\nstatic void clear_err(void) {\n    jl_errmsg = NULL;\n    jl_errbuf[0] = '\\0';\n}\n\n/* Get the last error message (returns \"\" if none). */\nconst char* jlmorloc_last_error(void) {\n    return jl_errmsg ? jl_errmsg : \"\";\n}\n\n/* -- Daemon lifecycle -- */\n\nvoid* jlmorloc_start_daemon(const char* socket_path, const char* tmpdir,\n                             const char* shm_basename, size_t shm_size) {\n    clear_err();\n    language_daemon_t* d = start_daemon(socket_path, tmpdir, shm_basename,\n                                         shm_size, &jl_errmsg);\n    if (!d && jl_errmsg) {\n        snprintf(jl_errbuf, sizeof(jl_errbuf), \"%s\", jl_errmsg);\n        jl_errmsg = jl_errbuf;\n    }\n    return (void*)d;\n}\n\nvoid jlmorloc_close_daemon(void* daemon) {\n    language_daemon_t* d = (language_daemon_t*)daemon;\n    close_daemon(&d);\n}\n\nint jlmorloc_wait_for_client(void* daemon) {\n    clear_err();\n    return wait_for_client((language_daemon_t*)daemon, &jl_errmsg);\n}\n\n/* -- Packet I/O -- */\n\n/* Returns a pointer to the packet bytes. Caller must NOT free this\n * directly -- it lives in shared memory or was allocated by libmorloc. */\nuint8_t* jlmorloc_stream_from_client(int client_fd, size_t* out_size) {\n    clear_err();\n    uint8_t* pkt = stream_from_client(client_fd, &jl_errmsg);\n    if (pkt && out_size) {\n        /* Packet size is in the first 4 bytes (little-endian uint32) */\n        uint32_t sz;\n        memcpy(&sz, pkt, sizeof(sz));\n        *out_size = (size_t)sz;\n    }\n    return pkt;\n}\n\nint jlmorloc_send_packet(int client_fd, uint8_t* packet) {\n    clear_err();\n    size_t sent = send_packet_to_foreign_server(client_fd, packet, &jl_errmsg);\n    return sent > 0 ? 0 : -1;\n}\n\nvoid jlmorloc_close_socket(int fd) {\n    close_socket(fd);\n}\n\n/* -- Packet classification -- */\n\nint jlmorloc_is_ping(const uint8_t* packet) {\n    clear_err();\n    return packet_is_ping(packet, &jl_errmsg) ? 1 : 0;\n}\n\nint jlmorloc_is_local_call(const uint8_t* packet) {\n    clear_err();\n    return packet_is_local_call(packet, &jl_errmsg) ? 1 : 0;\n}\n\nint jlmorloc_is_remote_call(const uint8_t* packet) {\n    clear_err();\n    return packet_is_remote_call(packet, &jl_errmsg) ? 1 : 0;\n}\n\nuint8_t* jlmorloc_pong(const uint8_t* packet) {\n    clear_err();\n    return return_ping(packet, &jl_errmsg);\n}\n\n/* -- Call packet parsing -- */\n\n/* Parse a call packet. Returns the manifold index via out_mid,\n * the number of arguments via out_nargs, and a pointer to the\n * morloc_call_t (which the caller must free via jlmorloc_free_call). */\nvoid* jlmorloc_read_call(const uint8_t* packet, uint32_t* out_mid,\n                          size_t* out_nargs) {\n    clear_err();\n    morloc_call_t* call = read_morloc_call_packet(packet, &jl_errmsg);\n    if (!call) return NULL;\n    *out_mid = call->midx;\n    *out_nargs = call->nargs;\n    return (void*)call;\n}\n\n/* Get the i-th argument packet from a parsed call. */\nuint8_t* jlmorloc_call_arg(void* call_ptr, size_t i) {\n    morloc_call_t* call = (morloc_call_t*)call_ptr;\n    if (i >= call->nargs) return NULL;\n    return call->args[i];\n}\n\nvoid jlmorloc_free_call(void* call_ptr) {\n    if (call_ptr) free_morloc_call((morloc_call_t*)call_ptr);\n}\n\n/* -- Msgpack bridge -- */\n\n/* Convert msgpack bytes + schema string -> morloc data packet.\n * The schema_str is a compact type descriptor like \"i4\", \"ai4\", \"m{x:f8}\". */\nuint8_t* jlmorloc_pack(const char* mpk, size_t mpk_size,\n                        const char* schema_str) {\n    clear_err();\n    Schema* schema = parse_schema(schema_str, &jl_errmsg);\n    if (!schema) return NULL;\n    uint8_t* pkt = make_data_packet_from_mpk(mpk, mpk_size, schema);\n    free_schema(schema);\n    return pkt;\n}\n\n/* Convert a morloc data packet -> msgpack bytes.\n * Returns a malloc'd buffer; caller must free it. */\nchar* jlmorloc_unpack(const uint8_t* packet, const char* schema_str,\n                       size_t* out_size) {\n    clear_err();\n    Schema* schema = parse_schema(schema_str, &jl_errmsg);\n    if (!schema) return NULL;\n    char* mpk = NULL;\n    size_t mpk_size = 0;\n    int ok = get_data_packet_as_mpk(packet, schema, &mpk, &mpk_size, &jl_errmsg);\n    free_schema(schema);\n    if (!ok) return NULL;\n    *out_size = mpk_size;\n    return mpk;\n}\n\n/* -- Error packet -- */\n\nuint8_t* jlmorloc_make_fail_packet(const char* msg) {\n    return make_fail_packet(msg);\n}\n\n/* -- Foreign call (cross-pool IPC) -- */\n\nuint8_t* jlmorloc_foreign_call(const char* tmpdir, const char* socket_name,\n                                uint32_t mid, uint8_t** arg_packets,\n                                size_t nargs) {\n    clear_err();\n    /* Build the call packet */\n    uint8_t* call_pkt = make_morloc_local_call_packet(\n        mid, (const uint8_t**)arg_packets, nargs, &jl_errmsg);\n    if (!call_pkt) return NULL;\n\n    /* Build the socket path */\n    size_t pathlen = strlen(tmpdir) + 1 + strlen(socket_name) + 1;\n    char* socket_path = (char*)malloc(pathlen);\n    snprintf(socket_path, pathlen, \"%s/%s\", tmpdir, socket_name);\n\n    /* Send and receive */\n    uint8_t* result = send_and_receive_over_socket(socket_path, call_pkt,\n                                                    &jl_errmsg);\n    free(socket_path);\n    free(call_pkt);\n    return result;\n}\n\n/* -- Shared memory init (needed before daemon start in some cases) -- */\n\nint jlmorloc_shinit(const char* basename, int volume, size_t size) {\n    clear_err();\n    return shinit(basename, volume, size, &jl_errmsg) ? 0 : -1;\n}\n\nvoid jlmorloc_set_fallback_dir(const char* dir) {\n    shm_set_fallback_dir(dir);\n}\n"
  },
  {
    "path": "data/lang/julia/lang.yaml",
    "content": "# Julia language descriptor for morloc compiler\n# Metadata fields (read by LangRegistry) + descriptor fields (read by generic translator)\n\n# Identity and metadata\nname: jl\nextension: jl\naliases: [\"julia\"]\nis_compiled: false\nrun_command: [\"julia\"]\nserial_type: \"bytes\"\ncost: 5\n\n# Descriptor identity (kept for compatibility with generic translator)\nldName: julia\nldExtension: jl\n\n# Literals\nldBoolTrue: \"true\"\nldBoolFalse: \"false\"\nldNullLiteral: \"nothing\"\n\n# Constructors\nldListStyle: bracket\nldTupleConstructor: \"\"\nldRecordConstructor: \"Dict\"\nldRecordSeparator: \"=>\"\n\n# Access styles\nldIndexStyle: one_bracket\nldKeyAccess: bracket\nldFieldAccess: dot\n\n# Serialize/deserialize function names (from juliamorloc.jl)\nldSerializeFn: \"MorlocRuntime.put_value\"\nldDeserializeFn: \"MorlocRuntime.get_value\"\nldIntrinsicPrefix: \"MorlocRuntime.\"\n\n# Foreign call\nldForeignCallFn: \"MorlocRuntime.foreign_call\"\nldForeignCallIntSuffix: \"\"\n\n# Import syntax\nldQualifiedImports: false\nldIncludeRelToFile: true\n\n# Template fields\nldAssignOp: \"=\"\nldLambdaTemplate: \"({{args}}) -> {{body}}\"\nldDoBlockExpr: \"(() -> {{expr}})\"\nldDoBlockBlock: \"\"\nldPartialTemplate: \"({{bound_args}}) -> {{fn}}({{all_args}})\"\nldImportTemplate: \"include(\\\"{{path}}\\\")\"\nldSocketPathTemplate: \"joinpath(global_state[\\\"tmpdir\\\"], {{socket}})\"\nldResourcePackTemplate: \"[{{mem}}, {{time}}, {{cpus}}, {{gpus}}]\"\nldReturnTemplate: \"return({{expr}})\"\nldFuncDefHeader: \"function {{name}}({{args}})\"\nldBlockStyle: end_keyword\nldBlockEnd: \"end\"\nldErrorWrapOpen: \"\"\nldErrorWrapClose: []\nldPatternStyle: concat_call\nldConcatFn: \"string\"\nldQuoteTerminator: '\"'\nldQuoteTerminatorEsc: '\\\"'\nldMapStyle: list_comprehension\nldDispatchLocalHeader: \"dispatch = Dict(\"\nldDispatchLocalEntry: \"    {{mid}} => {{name}},\"\nldDispatchLocalFooter: \")\"\nldDispatchRemoteHeader: \"remote_dispatch = Dict(\"\nldDispatchRemoteEntry: \"    {{mid}} => {{name}}_remote,\"\nldDispatchRemoteFooter: \")\"\n\n# Pool template (loaded from pool.jl at runtime, left empty here)\nldPoolTemplate: \"\"\nldBreakMarker: \"# <<<BREAK>>>\"\nldCommentMarker: \"#\"\n"
  },
  {
    "path": "data/lang/julia/pool.jl",
    "content": "# Morloc Julia pool template\n# Single-threaded daemon: accepts one connection at a time.\n\n# Add morloc runtime to load path\nconst MORLOC_HOME = get(ENV, \"MORLOC_HOME\", joinpath(homedir(), \".local\", \"share\", \"morloc\"))\npush!(LOAD_PATH, joinpath(MORLOC_HOME, \"lang\", \"julia\"))\n\n# Global state accessible to manifolds (e.g., tmpdir for foreign calls)\nglobal_state = Dict{String,String}()\n\n# <<<BREAK>>>\n\nusing MorlocRuntime\n\n# <<<BREAK>>>\n\n# <<<BREAK>>>\n\nfunction run_job(client_fd)\n    try\n        client_data = MorlocRuntime.stream_from_client(client_fd)\n\n        if MorlocRuntime.is_local_call(client_data)\n            (mid, args) = MorlocRuntime.read_morloc_call_packet(client_data)\n            try\n                result = dispatch[mid](args...)\n            catch e\n                result = MorlocRuntime.make_fail_packet(string(e))\n            end\n\n        elseif MorlocRuntime.is_remote_call(client_data)\n            (mid, args) = MorlocRuntime.read_morloc_call_packet(client_data)\n            try\n                result = remote_dispatch[mid](args...)\n            catch e\n                result = MorlocRuntime.make_fail_packet(string(e))\n            end\n\n        elseif MorlocRuntime.is_ping(client_data)\n            result = MorlocRuntime.pong(client_data)\n\n        else\n            error(\"Expected a ping or call type packet\")\n        end\n\n        MorlocRuntime.send_packet_to_foreign_server(client_fd, result)\n\n    catch e\n        # Best-effort: wrap the error in a fail packet and send it back so the\n        # caller gets a structured error instead of hanging on a closed socket.\n        # Includes the full backtrace so context propagates through the stack.\n        msg = sprint(showerror, e, catch_backtrace())\n        try\n            result = MorlocRuntime.make_fail_packet(msg)\n            MorlocRuntime.send_packet_to_foreign_server(client_fd, result)\n        catch\n            # Client may already be gone (timed-out ping, broken pipe); ignore.\n        end\n        @error \"job failed\" exception=(e, catch_backtrace())\n    finally\n        MorlocRuntime.close_socket(client_fd)\n    end\nend\n\nfunction main()\n    socket_path = ARGS[1]\n    tmpdir = ARGS[2]\n    shm_basename = ARGS[3]\n\n    global_state[\"tmpdir\"] = tmpdir\n\n    daemon = MorlocRuntime.start_daemon(socket_path, tmpdir, shm_basename, 0xffff)\n\n    # Simple signal handling\n    running = Ref(true)\n\n    @async begin\n        try\n            while running[]\n                sleep(0.01)\n            end\n        catch\n        end\n    end\n\n    try\n        while running[]\n            client_fd = MorlocRuntime.wait_for_client(daemon)\n            if client_fd > 0\n                run_job(client_fd)\n            end\n        end\n    catch e\n        if !(e isa InterruptException)\n            @error \"Pool error\" exception=(e, catch_backtrace())\n        end\n    finally\n        MorlocRuntime.close_daemon(daemon)\n    end\nend\n\nmain()\n"
  },
  {
    "path": "data/lang/languages.yaml",
    "content": "# Pairwise language costs for the morloc optimizer\n#\n# Same-language function overhead (intra-language call cost)\nsame_language_costs:\n  c: 1\n  cpp: 1\n  py: 10\n  r:  20\n\n# Cost of calling INTO a language from a different language (IPC overhead)\ncross_language_costs:\n  c: 1001\n  cpp: 1000\n  py: 10000\n  r: 40000\n\n# Special optimized pairs (from -> to) that bypass normal IPC\noptimized_pairs:\n  - from: cpp\n    to: c\n    cost: 1\n\n# Defaults for unknown/plugin languages\ndefault_same_language: 10\ndefault_cross_language: 10000\n"
  },
  {
    "path": "data/lang/py/Makefile",
    "content": "all:\n\tpython3 setup.py build_ext --inplace\n\tcp -fs pymorloc.cpython* pymorloc\n"
  },
  {
    "path": "data/lang/py/init.sh",
    "content": "#!/bin/bash\nset -e\n\nexport MORLOC_HOME=\"$1\"\nBUILD_DIR=\"$2\"\nSANITIZE_FLAGS=\"$3\"\nOPT_DIR=\"$MORLOC_HOME/opt\"\n\n# Clean stale build artifacts\nrm -f \"$OPT_DIR\"/pymorloc.cpython* \"$OPT_DIR/pymorloc\"\nrm -rf \"$OPT_DIR/build\"\n\n# Copy files to opt dir\ncp \"$BUILD_DIR/pymorloc.c\" \"$OPT_DIR/\"\ncp \"$BUILD_DIR/setup.py\" \"$OPT_DIR/\"\ncp \"$BUILD_DIR/Makefile\" \"$OPT_DIR/\"\n\n# Build pymorloc extension\nexport CFLAGS=\"$SANITIZE_FLAGS\"\nmake -C \"$OPT_DIR\" -f Makefile\n"
  },
  {
    "path": "data/lang/py/lang.yaml",
    "content": "# Python language descriptor for morloc compiler\n# Metadata fields (read by LangRegistry) + descriptor fields (read by generic translator)\n\n# Identity and metadata\nname: py\nextension: py\naliases: [\"python\", \"python3\"]\nis_compiled: false\nrun_command: [\"python3\"]\nserial_type: \"str\"\ncost: 3\npreamble:\n  - 'sys.path = [os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), \"..\", \"..\")), os.path.expanduser(\".\"), os.path.expanduser(\"{{opt}}\"), os.path.expanduser(\"{{lib}}\")] + sys.path'\n  - \"import importlib\"\n  - \"import pymorloc as morloc\"\n\n# Literals\nldBoolTrue: \"True\"\nldBoolFalse: \"False\"\nldNullLiteral: \"None\"\n\n# Constructors\nldListStyle: bracket\nldTupleConstructor: \"\"\nldRecordConstructor: \"OrderedDict\"\nldRecordSeparator: \"=\"\n\n# Access styles\nldIndexStyle: zero_bracket\nldKeyAccess: \"bracket\"\nldFieldAccess: dot\n\n# Serialize/deserialize\nldSerializeFn: \"morloc.put_value\"\nldDeserializeFn: \"morloc.get_value\"\nldIntrinsicPrefix: \"morloc.\"\n\n# Foreign call\nldForeignCallFn: \"morloc.foreign_call\"\nldForeignCallIntSuffix: \"\"\n\n# Remote call\nldRemoteCallFn: \"morloc.remote_call\"\n\n# Record handling\nldDictStyleRecords: true\nldQuoteRecordKeys: false\n\n# Import syntax\nldQualifiedImports: true\nldIncludeRelToFile: false\n\n# Template fields\nldAssignOp: \"=\"\nldLambdaTemplate: \"lambda {{args}}: {{body}}\"\nldDoBlockExpr: \"(lambda: {{expr}})\"\nldDoBlockBlock: \"\"\nldPartialTemplate: \"functools.partial({{fn_with_context}})\"\nldImportTemplate: \"{{namespace}} = importlib.import_module(\\\"{{module_path}}\\\")\"\nldSocketPathTemplate: \"os.path.join(global_state[\\\"tmpdir\\\"], {{socket}})\"\nldResourcePackTemplate: \"struct.pack('iiii', {{mem}}, {{time}}, {{cpus}}, {{gpus}})\"\nldReturnTemplate: \"return({{expr}})\"\nldFuncDefHeader: \"def {{name}}({{args}}):\"\nldBlockStyle: indent\nldBlockEnd: \"\"\nldErrorWrapOpen: \"try:\"\nldErrorWrapClose:\n  - \"except Exception as e:\"\n  - \"    raise RuntimeError(f\\\"Error (pool daemon in {{name}}):\\\\n{e!s}\\\")\"\nldPatternStyle: fstring\nldQuoteTerminator: '\"\"\"'\nldQuoteTerminatorEsc: '\\\"\\\"\\\"'\nldMapStyle: loop_append\nldDispatchLocalHeader: \"dispatch = {\"\nldDispatchLocalEntry: \"    {{mid}}: {{name}},\"\nldDispatchLocalFooter: \"}\"\nldDispatchRemoteHeader: \"remote_dispatch = {\"\nldDispatchRemoteEntry: \"    {{mid}}: {{name}}_remote,\"\nldDispatchRemoteFooter: \"}\"\n\n# Pool template (loaded from pool.py, left empty here)\nldPoolTemplate: \"\"\nldBreakMarker: \"# <<<BREAK>>>\"\nldCommentMarker: \"#\"\n"
  },
  {
    "path": "data/lang/py/pool.py",
    "content": "import signal\nimport sys\nimport select\nimport os # required for setting path to morloc dependencies\nimport time\nimport copy\nimport array\nimport struct\nimport socket as _socket\nfrom collections import OrderedDict\nfrom multiprocessing import Process, Value, RawValue\nimport ctypes\nimport functools\n\n\n# Global variables for clean signal handling\ndaemon = None\nworkers = []\nglobal_state = dict()\n_shutdown_wakeup_fd = -1\n\n# AUTO include sources start\n# <<<BREAK>>>\n# AUTO include sources end\n\n# Dynamic worker spawning: monkey-patch foreign_call to track busy workers.\n# Workers atomically increment busy_count before a foreign_call and decrement\n# after. When busy_count reaches total_workers, a byte is written to a wake-up\n# pipe to tell the main process to spawn a new worker.\n_original_foreign_call = morloc.foreign_call\n_busy_ref = None\n_total_ref = None\n_wakeup_fd = -1\n\ndef _init_worker_tracking(busy, total, wakeup_fd):\n    global _busy_ref, _total_ref, _wakeup_fd\n    _busy_ref = busy\n    _total_ref = total\n    _wakeup_fd = wakeup_fd\n    morloc.foreign_call = _tracked_foreign_call\n\ndef _tracked_foreign_call(*args):\n    prev = _busy_ref.value\n    _busy_ref.value = prev + 1\n    if prev + 1 >= _total_ref.value and _wakeup_fd >= 0:\n        try:\n            os.write(_wakeup_fd, b'!')\n        except OSError:\n            pass\n    try:\n        return _original_foreign_call(*args)\n    finally:\n        _busy_ref.value -= 1\n\n# AUTO include manifolds start\n# <<<BREAK>>>\n# AUTO include manifolds end\n\n\n# AUTO include dispatch start\n# <<<BREAK>>>\n# AUTO include dispatch end\n\ndef run_job(client_fd: int) -> None:\n    try:\n        # Free SHM from previous dispatch result (consumed by caller)\n        morloc.flush_shm_tracker()\n        client_data = morloc.stream_from_client(client_fd)\n\n        if(morloc.is_local_call(client_data)):\n            (mid, args) = morloc.read_morloc_call_packet(client_data)\n\n            try:\n                result = dispatch[mid](*args)\n            except Exception as e:\n                result = morloc.make_fail_packet(str(e))\n\n        elif(morloc.is_remote_call(client_data)):\n            (mid, args) = morloc.read_morloc_call_packet(client_data)\n\n            try:\n                result = remote_dispatch[mid](*args)\n            except Exception as e:\n                result = morloc.make_fail_packet(str(e))\n\n        elif(morloc.is_ping(client_data)):\n            result = morloc.pong(client_data)\n\n        else:\n            raise ValueError(\"Expected a ping or call type packet\")\n\n        # Flush stdout BEFORE sending the result back. The nexus prints its\n        # own output (the return value) right after receiving this response.\n        # Both processes share the same stdout fd, so if we flush after sending,\n        # the nexus can print first, causing out-of-order output.\n        sys.stdout.flush()\n\n        morloc.send_packet_to_foreign_server(client_fd, result)\n\n    except Exception as e:\n        # Try to send a fail packet back to the caller before giving up.\n        # This may fail (e.g., broken pipe from a timed-out ping), which is OK.\n        try:\n            result = morloc.make_fail_packet(str(e))\n            morloc.send_packet_to_foreign_server(client_fd, result)\n        except Exception:\n            pass\n        print(f\"job failed: {e!s}\", file=sys.stderr)\n    finally:\n        # Safety-net flush for any output from error handling paths\n        sys.stdout.flush()\n        # close child copy\n        morloc.close_socket(client_fd)\n\n\ndef _send_fd(sock, fd):\n    \"\"\"Send a file descriptor over a Unix domain socket.\"\"\"\n    sock.sendmsg([b'\\x00'],\n                 [(_socket.SOL_SOCKET, _socket.SCM_RIGHTS,\n                   array.array('i', [fd]))])\n\ndef _recv_fd(sock):\n    \"\"\"Receive a file descriptor from a Unix domain socket.\"\"\"\n    msg, ancdata, flags, addr = sock.recvmsg(1, _socket.CMSG_SPACE(4))\n    if not msg and not ancdata:\n        raise EOFError(\"Connection closed\")\n    for cmsg_level, cmsg_type, cmsg_data in ancdata:\n        if (cmsg_level == _socket.SOL_SOCKET and\n                cmsg_type == _socket.SCM_RIGHTS):\n            a = array.array('i')\n            a.frombytes(cmsg_data[:4])\n            return a[0]\n    raise RuntimeError(\"No fd received in ancillary data\")\n\n\nWORKER_IDLE_TIMEOUT = 5.0  # seconds before an idle worker exits\n\ndef worker_process(job_fd, tmpdir, shm_basename, shutdown_flag, busy_count, total_workers, wakeup_w):\n    # Reset signal handlers inherited from main. If user code inside run_job\n    # calls multiprocessing.Pool (or anything else that forks and later\n    # SIGTERMs its own children), those grandchildren would otherwise inherit\n    # main's signal_handler and flip the shared shutdown_flag, causing main\n    # to SIGKILL this worker mid-response. See the multiprocessing-py-1 bug.\n    signal.signal(signal.SIGTERM, signal.SIG_DFL)\n    signal.signal(signal.SIGINT, signal.SIG_DFL)\n    morloc.set_fallback_dir(tmpdir)\n    morloc.shinit(shm_basename, 0, 0xffff)\n    _init_worker_tracking(busy_count, total_workers, wakeup_w)\n    sock = _socket.fromfd(job_fd, _socket.AF_UNIX, _socket.SOCK_STREAM)\n    os.close(job_fd)  # sock owns a dup'd copy\n    last_activity = time.monotonic()\n    try:\n        while not shutdown_flag.value:\n            rlist, _, _ = select.select([sock.fileno()], [], [], 0.01)\n            if shutdown_flag.value:\n                break\n            if rlist:\n                try:\n                    client_fd = _recv_fd(sock)\n                    run_job(client_fd)\n                    last_activity = time.monotonic()\n                except (EOFError, OSError):\n                    break\n            elif total_workers.value > 1 and time.monotonic() - last_activity > WORKER_IDLE_TIMEOUT:\n                break\n    except BaseException as e:\n        # Catch-all for errors that escape run_job's own exception handling:\n        # MemoryError, KeyboardInterrupt, SystemExit, or bugs in the worker\n        # loop itself. Without this, the worker dies silently and the nexus\n        # only sees \"failed to read response header\" with no indication of\n        # what went wrong in the pool.\n        #\n        # Race condition: the nexus detects the broken socket and may start\n        # its clean_exit tear-down (SIGTERM -> SIGKILL) while this print is\n        # still buffered. We flush immediately to maximize the chance the\n        # message reaches the terminal before we are killed. stderr is\n        # line-buffered (set in __main__), but the flush is a safety net for\n        # edge cases (redirected stderr, forked-process buffer state).\n        import traceback\n        print(f\"morloc pool worker fatal error: {e!s}\", file=sys.stderr)\n        traceback.print_exc(file=sys.stderr)\n        sys.stderr.flush()\n    finally:\n        sock.close()\n\n\ndef signal_handler(sig, frame):\n    global daemon\n    # Ignore further SIGTERM/SIGINT during cleanup. Python processes pending\n    # signals between bytecodes, including while another signal handler is\n    # running, so a second SIGTERM arriving mid-cleanup would otherwise\n    # re-enter this handler and double-free the daemon pointer.\n    try:\n        signal.signal(signal.SIGTERM, signal.SIG_IGN)\n        signal.signal(signal.SIGINT, signal.SIG_IGN)\n    except Exception:\n        pass\n    shutdown_flag.value = True\n    if _shutdown_wakeup_fd >= 0:\n        try:\n            os.write(_shutdown_wakeup_fd, b'!')\n        except OSError:\n            pass\n    # Capture the daemon pointer into a local and clear the global BEFORE\n    # invoking close_daemon. If a pending signal still slips through and\n    # re-enters this handler, it will see daemon=None and skip the free.\n    d = daemon\n    daemon = None\n    if d is not None:\n        morloc.close_daemon(d)\n\n\ndef client_listener(job_fd, socket_path, tmpdir, shm_basename, shutdown_flag):\n    global daemon\n    daemon = morloc.start_daemon(socket_path, tmpdir, shm_basename, 0xffff)\n    sock = _socket.fromfd(job_fd, _socket.AF_UNIX, _socket.SOCK_STREAM)\n    os.close(job_fd)  # sock owns a dup'd copy\n\n    while not shutdown_flag.value:\n        try:\n            client_fd = morloc.wait_for_client(daemon)\n        except Exception as e:\n            print(f\"In python daemon, failed to connect to client: {e!s}\", file=sys.stderr)\n            continue\n\n        if client_fd > 0:\n            try:\n                _send_fd(sock, client_fd)\n            except Exception as e:\n                print(f\"In python daemon, failed to start worker: {e!s}\", file=sys.stderr)\n            finally:\n                morloc.close_socket(client_fd)\n    sock.close()\n\n\n\nif __name__ == \"__main__\":\n    # Line-buffer stderr so diagnostic output is not lost when pool is killed.\n    # stdout is left fully buffered for performance (genome-scale piping) and\n    # flushed explicitly after each job and during shutdown.\n    sys.stderr.reconfigure(line_buffering=True)\n\n    shutdown_flag = Value('b', False)  # Shared flag\n\n    signal.signal(signal.SIGINT, signal_handler)\n    signal.signal(signal.SIGTERM, signal_handler)\n\n    # Health check: confirm imports loaded and print version\n    if len(sys.argv) > 1 and sys.argv[1] == \"--health\":\n        sys.stdout.write('{\"status\":\"ok\",\"version\":\"__MORLOC_VERSION__\"}\\n')\n        sys.exit(0)\n\n    # Process arguments passed from the nexus\n    try:\n        socket_path = sys.argv[1]\n        tmpdir = sys.argv[2]\n        shm_basename = sys.argv[3]\n    except IndexError:\n        print(\"Usage: script.py <socket_path> <tmpdir> <shm_basename>\")\n        sys.exit(1)\n\n    global_state[\"tmpdir\"] = tmpdir\n\n    # Shared job queue: listener writes fds to write_sock, workers read from read_sock.\n    # Only idle workers (blocked in recvmsg) pick up jobs, preventing the round-robin\n    # deadlock where a callback gets dispatched to a busy worker.\n    read_sock, write_sock = _socket.socketpair(_socket.AF_UNIX, _socket.SOCK_STREAM)\n\n    num_workers = 1\n    workers = []\n\n    # Shared counters for dynamic worker spawning.\n    # Workers increment busy_count before foreign_call and decrement after.\n    # When all workers are busy, main process spawns a new one.\n    busy_count = RawValue(ctypes.c_int, 0)\n    total_workers = RawValue(ctypes.c_int, num_workers)\n    wakeup_r, wakeup_w = os.pipe()\n    os.set_blocking(wakeup_r, False)\n    _shutdown_wakeup_fd = wakeup_w\n\n    # Keep a dup of the read end so we can spawn new workers later\n    spare_read_fd = os.dup(read_sock.fileno())\n\n    for i in range(num_workers):\n        worker = Process(target=worker_process,\n                         args=(read_sock.fileno(), tmpdir, shm_basename, shutdown_flag,\n                               busy_count, total_workers, wakeup_w))\n        worker.start()\n        workers.append(worker)\n    read_sock.close()  # main/listener don't need the read end (spare_read_fd kept)\n\n    # Start client listener process\n    listener_process = Process(\n        target=client_listener,\n        args=(write_sock.fileno(), socket_path, tmpdir, shm_basename, shutdown_flag)\n    )\n    listener_process.start()\n    write_sock.close()  # main doesn't need the write end\n\n    # Main loop: monitor wake-up pipe, spawn new workers when all are busy,\n    # and reap idle workers that have exited.\n    while not shutdown_flag.value:\n        rlist, _, _ = select.select([wakeup_r], [], [], 0.01)\n        if rlist:\n            try:\n                os.read(wakeup_r, 4096)  # drain pipe\n            except OSError:\n                pass\n\n        # Reap dead workers (idle timeout or error exit)\n        alive = []\n        for w in workers:\n            if w.is_alive():\n                alive.append(w)\n            else:\n                w.join(timeout=0)\n                w.close()\n        workers = alive\n        total_workers.value = max(1, len(workers))\n\n        # Spawn a new worker if all are busy (or all have exited)\n        if len(workers) == 0 or busy_count.value >= total_workers.value:\n            w = Process(target=worker_process,\n                        args=(spare_read_fd, tmpdir, shm_basename, shutdown_flag,\n                              busy_count, total_workers, wakeup_w))\n            w.start()\n            workers.append(w)\n            total_workers.value = len(workers)\n\n    # Shutdown sequence\n    os.close(wakeup_r)\n    os.close(wakeup_w)\n    os.close(spare_read_fd)\n\n    # 1. Stop listener first\n    listener_process.terminate()\n    listener_process.join(timeout=0.001)\n    listener_process.kill()\n    listener_process.join()  # Final blocking reap\n    listener_process.close()\n\n    # 2. Terminate workers with escalating force\n    for p in workers:\n        if p.is_alive():\n            p.kill()\n        p.join()  # Final blocking reap\n        p.close()\n\n    sys.exit(0)\n"
  },
  {
    "path": "data/lang/py/pymorloc.c",
    "content": "#define PY_SSIZE_T_CLEAN\n#include \"morloc.h\"\n#include \"Python.h\"\n#include <errno.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <sys/stat.h>\n\n// boilerplate for numpy support\n#define PY_ARRAY_UNIQUE_SYMBOL MORLOC_ARRAY_API\n#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION\n#include <numpy/arrayobject.h>\n\n// SHM tracker for _put_value allocations (deferred cleanup)\n#define SHM_TRACKER_INIT_CAP 16\ntypedef struct {\n    absptr_t ptr;\n    Schema* schema;\n} shm_entry_t;\nstatic shm_entry_t* shm_tracker = NULL;\nstatic size_t shm_tracker_count = 0;\nstatic size_t shm_tracker_cap = 0;\n\nstatic void shm_tracker_push(absptr_t ptr, Schema* schema) {\n    if (shm_tracker_count >= shm_tracker_cap) {\n        size_t new_cap = shm_tracker_cap ? shm_tracker_cap * 2 : SHM_TRACKER_INIT_CAP;\n        shm_entry_t* new_buf = (shm_entry_t*)realloc(shm_tracker, new_cap * sizeof(shm_entry_t));\n        if (!new_buf) return;\n        shm_tracker = new_buf;\n        shm_tracker_cap = new_cap;\n    }\n    shm_tracker[shm_tracker_count].ptr = ptr;\n    shm_tracker[shm_tracker_count].schema = schema;\n    shm_tracker_count++;\n}\n\nstatic void flush_shm_tracker(void) {\n    for (size_t i = 0; i < shm_tracker_count; i++) {\n        char* err = NULL;\n        block_header_t* blk = (block_header_t*)((char*)shm_tracker[i].ptr - sizeof(block_header_t));\n        if (shm_tracker[i].schema && blk->reference_count <= 1) {\n            shfree_by_schema(shm_tracker[i].ptr, shm_tracker[i].schema, &err);\n            if (err) { free(err); err = NULL; }\n        }\n        shfree(shm_tracker[i].ptr, &err);\n        if (err) { free(err); }\n        if (shm_tracker[i].schema) {\n            free_schema(shm_tracker[i].schema);\n        }\n    }\n    shm_tracker_count = 0;\n}\n\n#define NOTHING\n\n#define MAYFAIL \\\n    char* child_errmsg_ = NULL; \\\n\n// Returns a strdup'd string that the caller must free, or NULL.\nchar* get_prior_err(){\n    char* prior_err = NULL;\n    if (PyErr_Occurred()) {\n        // Fetch existing exception\n        PyObject *type, *value, *traceback;\n        PyErr_Fetch(&type, &value, &traceback);\n\n        // Extract error message\n        PyObject* str = PyObject_Str(value);  // Convert exception to string\n        if (str) {\n            const char* raw = PyUnicode_AsUTF8(str);\n            if (raw) {\n                prior_err = strdup(raw);\n            }\n            Py_DECREF(str);\n        }\n        Py_XDECREF(type);\n        Py_XDECREF(value);\n        Py_XDECREF(traceback);\n    }\n    return prior_err;\n}\n\n\n#define PyTRY(fun, ...) \\\n    fun(__VA_ARGS__ __VA_OPT__(,) &child_errmsg_); \\\n    if(child_errmsg_ != NULL){ \\\n        char* prior_err = get_prior_err(); \\\n        if(prior_err == NULL){ \\\n            PyErr_Format(PyExc_RuntimeError, \"Error (%s:%d in %s):\\n%s\", __FILE__, __LINE__, __func__, child_errmsg_); \\\n        } else { \\\n            PyErr_Format(PyExc_RuntimeError, \"%s\\nError (%s:%d in %s):\\n%s\", prior_err, __FILE__, __LINE__, __func__, child_errmsg_); \\\n            free(prior_err); \\\n        } \\\n        goto error; \\\n    }\n\n#define PyRAISE(msg, ...) { \\\n    char* prior_err_ = get_prior_err(); \\\n    if(prior_err_ == NULL){ \\\n        PyErr_Format(PyExc_RuntimeError, \"Error (%s:%d in %s):\\n\" msg \"\\n\", __FILE__, __LINE__, __func__, ##__VA_ARGS__); \\\n    } else { \\\n        PyErr_Format(PyExc_RuntimeError, \"%s\\nError (%s:%d in %s):\\n\" msg \"\\n\", prior_err_, __FILE__, __LINE__, __func__, ##__VA_ARGS__); \\\n        free(prior_err_); \\\n    } \\\n    goto error; \\\n    }\n\n#define PyTRACE(cond) \\\n    if(cond){ \\\n        char* prior_err = get_prior_err(); \\\n        if(prior_err != NULL){ \\\n            PyErr_Format(PyExc_TypeError, \"Error (%s:%d in %s):\\n%s\", __FILE__, __LINE__, __func__, prior_err); \\\n            free(prior_err); \\\n            goto error; \\\n        } \\\n    }\n\nPyObject* numpy_module = NULL;\n\n\n// This function will be called to import numpy if, and only if, a numpy feature\n// is used. This avoids the agonizingly long numpy import time.\nvoid* import_numpy() {\n    numpy_module = PyImport_ImportModule(\"numpy\");\n    if(numpy_module == NULL){\n        PyRAISE(\"NumPy is not available\");\n    }\n\n    import_array();\n\nerror:\n    return NULL;\n}\n\n\n\n// Map morloc schema element type to numpy type number\nstatic int schema_to_npy_type(morloc_serial_type type) {\n    switch (type) {\n        case MORLOC_BOOL:    return NPY_BOOL;\n        case MORLOC_SINT8:   return NPY_INT8;\n        case MORLOC_SINT16:  return NPY_INT16;\n        case MORLOC_SINT32:  return NPY_INT32;\n        case MORLOC_SINT64:  return NPY_INT64;\n        case MORLOC_UINT8:   return NPY_UINT8;\n        case MORLOC_UINT16:  return NPY_UINT16;\n        case MORLOC_UINT32:  return NPY_UINT32;\n        case MORLOC_UINT64:  return NPY_UINT64;\n        case MORLOC_FLOAT32: return NPY_FLOAT32;\n        case MORLOC_FLOAT64: return NPY_FLOAT64;\n        default:             return -1;\n    }\n}\n\nPyObject* fromAnything(const Schema* schema, const void* data, const void* base_ptr){ MAYFAIL\n\n    PyObject* obj = NULL;\n    switch (schema->type) {\n        case MORLOC_NIL:\n            Py_RETURN_NONE;\n        case MORLOC_BOOL:\n            obj = PyBool_FromLong(*(bool*)data);\n            break;\n        case MORLOC_SINT8:\n            obj = PyLong_FromLong(*(int8_t*)data);\n            break;\n        case MORLOC_SINT16:\n            obj = PyLong_FromLong(*(int16_t*)data);\n            break;\n        case MORLOC_SINT32:\n            obj = PyLong_FromLong(*(int32_t*)data);\n            break;\n        case MORLOC_SINT64:\n            obj = PyLong_FromLongLong(*(int64_t*)data);\n            break;\n        case MORLOC_UINT8:\n            obj = PyLong_FromUnsignedLong(*(uint8_t*)data);\n            break;\n        case MORLOC_UINT16:\n            obj = PyLong_FromUnsignedLong(*(uint16_t*)data);\n            break;\n        case MORLOC_UINT32:\n            obj = PyLong_FromUnsignedLong(*(uint32_t*)data);\n            break;\n        case MORLOC_UINT64:\n            obj = PyLong_FromUnsignedLongLong(*(uint64_t*)data);\n            break;\n        case MORLOC_FLOAT32:\n            obj = PyFloat_FromDouble(*(float*)data);\n            break;\n        case MORLOC_FLOAT64:\n            obj = PyFloat_FromDouble(*(double*)data);\n            break;\n        case MORLOC_STRING: {\n            Array* str_array = (Array*)data;\n            void* tmp_ptr = NULL;\n\n            if (str_array->size != 0) {\n                tmp_ptr = PyTRY(resolve_relptr, str_array->data, base_ptr);\n            }\n\n            if (schema->hint != NULL && strcmp(schema->hint, \"bytes\") == 0) {\n                // load binary data as a python bytes object\n                if (str_array->size == 0) {\n                    obj = PyBytes_FromStringAndSize(\"\", 0);  // empty bytes object\n                } else {\n                    obj = PyBytes_FromStringAndSize(tmp_ptr, str_array->size);\n                }\n                if (!obj) {\n                    PyRAISE(\"Failed to parse data as bytes\");\n                }\n            } else if (schema->hint != NULL && strcmp(schema->hint, \"bytearray\") == 0) {\n                // load binary data as a python bytearray object\n                if (str_array->size == 0) {\n                    obj = PyByteArray_FromStringAndSize(\"\", 0);  // empty bytearray object\n                } else {\n                    obj = PyByteArray_FromStringAndSize(tmp_ptr, str_array->size);\n                }\n                if (!obj) {\n                    PyRAISE(\"Failed to parse data as bytearray\");\n                }\n            } else {\n                // otherwise, load this as a str type\n                if (str_array->size == 0) {\n                    obj = PyUnicode_New(0, 127);  // empty string object\n                } else {\n                    obj = PyUnicode_FromStringAndSize(tmp_ptr, str_array->size);\n                }\n                if (!obj) {\n                    PyRAISE(\"Failed to parse data as string\");\n                }\n            }\n            break;\n        }\n        case MORLOC_ARRAY: {\n            Array* array = (Array*)data;\n            if (schema->hint != NULL && strcmp(schema->hint, \"numpy.ndarray\") == 0) {\n                import_numpy();\n                Schema* element_schema = schema->parameters[0];\n                npy_intp dims[] = {array->size};\n                void* absptr = NULL;\n                int nd = 1; // number of dimensions\n                int type_num;\n                // Determine the NumPy type number based on the element schema\n                switch (element_schema->type) {\n                    case MORLOC_BOOL:    type_num = NPY_BOOL; break;\n                    case MORLOC_SINT8:   type_num = NPY_INT8; break;\n                    case MORLOC_SINT16:  type_num = NPY_INT16; break;\n                    case MORLOC_SINT32:  type_num = NPY_INT32; break;\n                    case MORLOC_SINT64:  type_num = NPY_INT64; break;\n                    case MORLOC_UINT8:   type_num = NPY_UINT8; break;\n                    case MORLOC_UINT16:  type_num = NPY_UINT16; break;\n                    case MORLOC_UINT32:  type_num = NPY_UINT32; break;\n                    case MORLOC_UINT64:  type_num = NPY_UINT64; break;\n                    case MORLOC_FLOAT32: type_num = NPY_FLOAT32; break;\n                    case MORLOC_FLOAT64: type_num = NPY_FLOAT64; break;\n                    default:\n                        PyRAISE(\"Unsupported element type for NumPy array\");\n                }\n\n                absptr = PyTRY(resolve_relptr, array->data, base_ptr);\n\n                // Create the NumPy array\n                obj = PyArray_SimpleNewFromData(nd, dims, type_num, absptr);\n\n                if(obj == NULL) {\n                    PyRAISE(\"Failed to parse data\");\n                }\n\n                // Note that we do not want to give ownership to Python\n                // This is shared memory, which means, python should not mutate\n                // it.\n\n            } else if (schema->hint != NULL && strcmp(schema->hint, \"bytearray\") == 0) {\n                // Create a Python bytearray object\n                void* absptr = PyTRY(resolve_relptr, array->data, base_ptr);\n                obj = PyByteArray_FromStringAndSize((const char*)absptr, array->size);\n                if (!obj) {\n                    PyErr_SetString(PyExc_TypeError, \"Failed to create bytearray\");\n                    goto error;\n                }\n                // Note: Similar to the numpy case, we don't want to give ownership to Python.\n                // The bytearray is created from a copy of the data, so no additional handling is needed.\n            } else if (schema->parameters[0]->type == MORLOC_UINT8) {\n                // Create a Python bytes object for UINT8 arrays\n                void* tmp_ptr = PyTRY(resolve_relptr, array->data, base_ptr);\n                obj = PyBytes_FromStringAndSize((const char*)tmp_ptr, array->size);\n                if (obj == NULL) {\n                    PyRAISE(\"Failed to one bytes\")\n                }\n            } else if (schema->hint == NULL || (schema->hint != NULL && strcmp(schema->hint, \"list\") == 0)) {\n                // For other types, create a standard list\n                obj = PyList_New(array->size);\n                if(obj == NULL){\n                    PyRAISE(\"Failed to one string\");\n                }\n                if(array->size > 0){\n                    char* start = (char*) PyTRY(resolve_relptr, array->data, base_ptr);\n                    size_t width = schema->parameters[0]->width;\n                    Schema* element_schema = schema->parameters[0];\n                    for (size_t i = 0; i < array->size; i++) {\n                        PyObject* item = fromAnything(element_schema, start + width * i, base_ptr);\n                        if (!item || PyList_SetItem(obj, i, item) < 0) {\n                            Py_XDECREF(item);\n                            PyRAISE(\"Failed to access element in list\")\n                        }\n                    }\n                }\n            } else {\n                PyRAISE(\"Unexpected array hint\");\n            }\n            break;\n        }\n        case MORLOC_TUPLE: {\n            obj = PyTuple_New(schema->size);\n            if(obj == NULL){\n                PyRAISE(\"Failed in tuple\");\n            }\n            for (size_t i = 0; i < schema->size; i++) {\n                void* item_ptr = (char*)data + schema->offsets[i];\n                PyObject* item = fromAnything(schema->parameters[i], item_ptr, base_ptr);\n                if (!item || PyTuple_SetItem(obj, i, item) < 0) {\n                    Py_XDECREF(item);\n                    PyRAISE(\"Failed to access tuple element\");\n                }\n            }\n            break;\n        }\n        case MORLOC_MAP: {\n            obj = PyDict_New();\n            if(obj == NULL){\n                PyRAISE(\"Failed in map\");\n            }\n            for (size_t i = 0; i < schema->size; i++) {\n                void* item_ptr = (char*)data + schema->offsets[i];\n                PyObject* value = fromAnything(schema->parameters[i], item_ptr, base_ptr);\n                PyObject* key = PyUnicode_FromString(schema->keys[i]);\n                if (!value || !key || PyDict_SetItem(obj, key, value) < 0) {\n                    Py_XDECREF(value);\n                    Py_XDECREF(key);\n                    PyRAISE(\"Failed to access map element\");\n                }\n                Py_DECREF(key);\n                Py_DECREF(value);\n            }\n            break;\n        }\n        case MORLOC_OPTIONAL: {\n            uint8_t tag = *(const uint8_t*)data;\n            if (tag == 0) {\n                Py_RETURN_NONE;\n            }\n            obj = fromAnything(schema->parameters[0], (const char*)data + schema->offsets[0], base_ptr);\n            if (!obj) {\n                PyRAISE(\"Failed to deserialize optional inner value\");\n            }\n            break;\n        }\n        case MORLOC_TENSOR: {\n            import_numpy();\n            const Tensor* tensor = (const Tensor*)data;\n            size_t ndim = schema_tensor_ndim(schema);\n\n            int type_num = schema_to_npy_type(schema->parameters[0]->type);\n            if (type_num < 0) { PyRAISE(\"Unsupported tensor element type\"); }\n\n            if (tensor->total_elements == 0) {\n                npy_intp zero_dims[1] = {0};\n                obj = PyArray_SimpleNew(1, zero_dims, type_num);\n                break;\n            }\n\n            const int64_t* shape = (const int64_t*)resolve_relptr(tensor->shape, base_ptr, NULL);\n            const void* tdata = resolve_relptr(tensor->data, base_ptr, NULL);\n\n            npy_intp np_dims[5];\n            for (size_t i = 0; i < ndim; i++) np_dims[i] = (npy_intp)shape[i];\n\n            // Create numpy array as a copy (R/W) from the data\n            obj = PyArray_SimpleNewFromData((int)ndim, np_dims, type_num, (void*)tdata);\n            if (!obj) { PyRAISE(\"Failed to create numpy array from tensor\"); }\n\n            // Make a copy so the array owns its data (SHM may be freed)\n            PyObject* owned = PyArray_NewCopy((PyArrayObject*)obj, NPY_CORDER);\n            Py_DECREF(obj);\n            obj = owned;\n            if (!obj) { PyRAISE(\"Failed to copy tensor data\"); }\n            break;\n        }\n        default:\n            PyRAISE(\"Unsupported schema type\");\n    }\n\n    return obj;\n\nerror:\n    Py_XDECREF(obj);\n    return NULL;\n}\n\n\n#define HANDLE_SINT_TYPE(CTYPE, PYLONG_FUNC, MIN, MAX) \\\n    do { \\\n        if (!PyLong_Check(obj)) { \\\n            PyErr_Format(PyExc_TypeError, \"Expected int for %s, but got %s\", #CTYPE, Py_TYPE(obj)->tp_name); \\\n            goto error; \\\n        } \\\n        long long value = PYLONG_FUNC(obj); \\\n        if (value < MIN || value > MAX || PyErr_Occurred()) { \\\n            PyErr_Format(PyExc_OverflowError, \"Integer overflow for %s\", #CTYPE); \\\n            goto error; \\\n        } \\\n        *(CTYPE*)dest = (CTYPE)value; \\\n    } while(0)\n\n#define HANDLE_UINT_TYPE(CTYPE, PYLONG_FUNC, MAX) \\\n    do { \\\n        if (!PyLong_Check(obj)) { \\\n            PyErr_Format(PyExc_TypeError, \"Expected int for %s, but got %s\", #CTYPE, Py_TYPE(obj)->tp_name); \\\n            goto error; \\\n        } \\\n        unsigned long long value = PYLONG_FUNC(obj); \\\n        if (value > MAX || PyErr_Occurred()) { \\\n            PyErr_Format(PyExc_OverflowError, \"Integer overflow for %s\", #CTYPE); \\\n            goto error; \\\n        } \\\n        *(CTYPE*)dest = (CTYPE)value; \\\n    } while(0)\n\n\n\nssize_t get_shm_size(const Schema* schema, PyObject* obj) {\n    switch (schema->type) {\n        case MORLOC_NIL:\n        case MORLOC_BOOL:\n        case MORLOC_SINT8:\n        case MORLOC_SINT16:\n        case MORLOC_SINT32:\n        case MORLOC_SINT64:\n        case MORLOC_UINT8:\n        case MORLOC_UINT16:\n        case MORLOC_UINT32:\n        case MORLOC_UINT64:\n        case MORLOC_FLOAT32:\n        case MORLOC_FLOAT64:\n            return schema->width;\n        case MORLOC_STRING:\n        case MORLOC_ARRAY:\n            if (schema->type == MORLOC_STRING && !(PyUnicode_Check(obj) || PyBytes_Check(obj) || PyByteArray_Check(obj) )) {\n                PyRAISE(\"Expected str or bytes for MORLOC_STRING, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n            if (schema->type == MORLOC_ARRAY && !(PyList_Check(obj) || PyBytes_Check(obj) || PyByteArray_Check(obj) || PyObject_HasAttrString(obj, \"__array_interface__\"))) {\n                PyRAISE(\"Expected list, bytes, bytearray, or numpy array for MORLOC_ARRAY, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n        \n            {\n                ssize_t required_size = 0;\n                // worst-case cursor alignment padding for element data\n                required_size += (ssize_t)(schema_alignment(schema->parameters[0]) - 1);\n\n                if (PyList_Check(obj)) {\n                    Py_ssize_t list_size = PyList_Size(obj);\n                    size_t element_width = schema->parameters[0]->width;\n                    switch(schema->parameters[0]->type){\n                        case MORLOC_NIL:\n                        case MORLOC_BOOL:\n                        case MORLOC_SINT8:\n                        case MORLOC_SINT16:\n                        case MORLOC_SINT32:\n                        case MORLOC_SINT64:\n                        case MORLOC_UINT8:\n                        case MORLOC_UINT16:\n                        case MORLOC_UINT32:\n                        case MORLOC_UINT64:\n                        case MORLOC_FLOAT32:\n                        case MORLOC_FLOAT64:\n                            required_size += list_size * element_width;\n                            break;\n                        case MORLOC_STRING:\n                        case MORLOC_ARRAY:\n                        case MORLOC_TUPLE:\n                        case MORLOC_MAP:\n                        case MORLOC_OPTIONAL:\n                            for(size_t i = 0; i < (size_t)list_size; i++){\n                               required_size += get_shm_size(schema->parameters[0], PyList_GetItem(obj, i));\n                            }\n                            break;\n                    }\n                } else if (PyObject_HasAttrString(obj, \"__array_interface__\")) {\n                    import_numpy();\n                    PyArrayObject *arr = (PyArrayObject *)obj;\n                    npy_intp *dims = PyArray_DIMS(arr);\n                    int ndim = PyArray_NDIM(arr);\n                    size_t total_elements = 1;\n                    for (int i = 0; i < ndim; i++) {\n                        total_elements *= dims[i];\n                    }\n                    required_size += total_elements * PyArray_ITEMSIZE(arr);\n                } else if (PyBytes_Check(obj)) {\n                    required_size += (ssize_t)PyBytes_GET_SIZE(obj);\n                } else if (PyByteArray_Check(obj)) {\n                    required_size += (ssize_t)PyByteArray_GET_SIZE(obj);\n                } else if (PyUnicode_Check(obj)) {\n                    PyUnicode_AsUTF8AndSize(obj, &required_size);\n                } else {\n                    PyRAISE(\"Unsupported data type\");\n                }\n\n                required_size += sizeof(Array);\n                return required_size;\n            }\n\n        case MORLOC_TUPLE:\n            if (!PyTuple_Check(obj) && !PyList_Check(obj)) {\n                PyRAISE(\"Expected tuple or list for MORLOC_TUPLE, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n\n            {\n                Py_ssize_t size = PyTuple_Check(obj) ? PyTuple_Size(obj) : PyList_Size(obj);\n                if ((size_t)size != schema->size) {\n                    PyRAISE(\"Tuple/List size mismatch\");\n                }\n\n                size_t required_size = schema->width;\n\n                for (Py_ssize_t i = 0; i < size; ++i) {\n                    PyObject* item = PyTuple_Check(obj) ? PyTuple_GetItem(obj, i) : PyList_GetItem(obj, i);\n                    ssize_t element_size = get_shm_size(schema->parameters[i], item);\n                    if(element_size != -1){\n                        if ((size_t)element_size > schema->parameters[i]->width) {\n                            required_size += (size_t)element_size - schema->parameters[i]->width;\n                        }\n                    } else {\n                        return -1;\n                    }\n                }\n                return (ssize_t)required_size;\n            }\n\n        case MORLOC_MAP:\n            if (!PyDict_Check(obj)) {\n                PyRAISE(\"Expected dict for MORLOC_MAP, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n\n            {\n                size_t required_size = schema->width;\n                for (size_t i = 0; i < schema->size; ++i) {\n                    PyObject* key = PyUnicode_FromString(schema->keys[i]);\n                    PyObject* value = PyDict_GetItem(obj, key);\n                    Py_DECREF(key);\n                    if (value) {\n                        ssize_t element_size = get_shm_size(schema->parameters[i], value);\n                        if(element_size != -1){\n                            if ((size_t)element_size > schema->parameters[i]->width) {\n                                required_size += (size_t)element_size - schema->parameters[i]->width;\n                            }\n                        } else {\n                            return -1;\n                        }\n                    }\n                }\n                return (ssize_t)required_size;\n            }\n\n        case MORLOC_OPTIONAL:\n            if (obj == Py_None) {\n                return (ssize_t)schema->width;\n            }\n            {\n                ssize_t inner_size = get_shm_size(schema->parameters[0], obj);\n                if (inner_size == -1) return -1;\n                ssize_t extra = (inner_size > (ssize_t)schema->parameters[0]->width) ? inner_size - (ssize_t)schema->parameters[0]->width : 0;\n                return (ssize_t)schema->width + extra;\n            }\n\n        case MORLOC_TENSOR:\n            {\n                import_numpy();\n                int type_num = schema_to_npy_type(schema->parameters[0]->type);\n                if (type_num < 0) { PyRAISE(\"Unsupported tensor element type\"); }\n                PyArrayObject* arr = (PyArrayObject*)PyArray_FROM_OTF(obj, type_num, NPY_ARRAY_C_CONTIGUOUS);\n                if (!arr) { PyRAISE(\"Expected numpy array for MORLOC_TENSOR\"); }\n                size_t total = (size_t)PyArray_SIZE(arr);\n                size_t elem_width = schema->parameters[0]->width;\n                ssize_t required = (ssize_t)sizeof(Tensor);\n                required += (ssize_t)(_Alignof(int64_t) - 1);\n                required += (ssize_t)(schema_tensor_ndim(schema) * sizeof(int64_t));\n                required += (ssize_t)(schema_alignment(schema->parameters[0]) - 1);\n                required += (ssize_t)(total * elem_width);\n                Py_DECREF(arr);\n                return required;\n            }\n\n        default:\n            PyRAISE(\"Unsupported schema type\");\n    }\n\n    PyRAISE(\"Reached the unreachable\");\n\nerror:\n    return -1;\n}\n\n\n\nint to_voidstar_r(void* dest, void** cursor, const Schema* schema, PyObject* obj) { MAYFAIL\n    switch (schema->type) {\n        case MORLOC_NIL:\n            if (obj != Py_None) {\n                PyRAISE(\"Expected None for MORLOC_NIL, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n            *((int8_t*)dest) = (int8_t)0;\n            break;\n\n        case MORLOC_BOOL:\n            if (!PyBool_Check(obj)) {\n                PyRAISE(\"Expected bool for MORLOC_BOOL, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n            *((bool*)dest) = (obj == Py_True);\n            break;\n\n        case MORLOC_SINT8:\n            HANDLE_SINT_TYPE(int8_t, PyLong_AsLongLong, INT8_MIN, INT8_MAX);\n            break;\n        case MORLOC_SINT16:\n            HANDLE_SINT_TYPE(int16_t, PyLong_AsLongLong, INT16_MIN, INT16_MAX);\n            break;\n        case MORLOC_SINT32:\n            HANDLE_SINT_TYPE(int32_t, PyLong_AsLongLong, INT32_MIN, INT32_MAX);\n            break;\n        case MORLOC_SINT64:\n            HANDLE_SINT_TYPE(int64_t, PyLong_AsLongLong, INT64_MIN, INT64_MAX);\n            break;\n        case MORLOC_UINT8:\n            HANDLE_UINT_TYPE(uint8_t, PyLong_AsUnsignedLongLong, UINT8_MAX);\n            break;\n        case MORLOC_UINT16:\n            HANDLE_UINT_TYPE(uint16_t, PyLong_AsUnsignedLongLong, UINT16_MAX);\n            break;\n        case MORLOC_UINT32:\n            HANDLE_UINT_TYPE(uint32_t, PyLong_AsUnsignedLongLong, UINT32_MAX);\n            break;\n        case MORLOC_UINT64:\n            HANDLE_UINT_TYPE(uint64_t, PyLong_AsUnsignedLongLong, UINT64_MAX);\n            break;\n\n        case MORLOC_FLOAT32:\n            if (!PyFloat_Check(obj)) {\n                PyRAISE(\"Expected float for MORLOC_FLOAT32, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n            *((float*)dest) = (float)PyFloat_AsDouble(obj);\n            break;\n\n        case MORLOC_FLOAT64:\n            if(PyFloat_Check(obj))\n            {\n                *((double*)dest) = PyFloat_AsDouble(obj);\n            } else if(PyLong_Check(obj)){\n                *((double*)dest) = (double)PyLong_AsLongLong(obj);\n            } else {\n                PyRAISE(\"Expected float or int for MORLOC_FLOAT64, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n            break;\n\n        case MORLOC_STRING:\n        case MORLOC_ARRAY:\n            if (schema->type == MORLOC_STRING && !(PyUnicode_Check(obj) || PyBytes_Check(obj)  || PyByteArray_Check(obj))) {\n                PyRAISE(\"Expected str or bytes for MORLOC_STRING, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n    \n            if (schema->type == MORLOC_ARRAY && !(PyList_Check(obj) || PyBytes_Check(obj) || PyByteArray_Check(obj) || PyObject_HasAttrString(obj, \"__array_interface__\"))) { \n                PyRAISE(\"Expected list, bytes, bytearray, or numpy array for MORLOC_ARRAY, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n    \n            {\n                Py_ssize_t size;\n\n                // \"bytes\" type is mutable, so it exposes a non-const pointer \n                char* mutable_data = NULL;\n\n                // strings type are immutable, so const\n                const char* immutable_data = NULL; \n\n                if (PyList_Check(obj)) {\n                    size = PyList_Size(obj);\n                } else if (PyBytes_Check(obj)) {\n                    // This needs non-const data\n                    PyBytes_AsStringAndSize(obj, &mutable_data, &size);\n                } else if (PyByteArray_Check(obj)) {\n                    mutable_data = PyByteArray_AS_STRING(obj);\n                    size = PyByteArray_GET_SIZE(obj);\n                } else if (schema->type == MORLOC_ARRAY && PyObject_HasAttrString(obj, \"__array_interface__\")) { // check if it is a numpy array\n                    import_numpy();\n                    PyArrayObject* arr = (PyArrayObject*)obj;\n                    size = PyArray_SIZE(arr);\n                    // This needs const data\n                    immutable_data = PyArray_DATA(arr); // Get the data pointer\n\n                    // Verify that the array is contiguous\n                    if (!PyArray_ISCONTIGUOUS(arr)) {\n                        PyRAISE(\"NumPy array must be contiguous\");\n                    }\n                } else {\n                    immutable_data = PyUnicode_AsUTF8AndSize(obj, &size);\n                }\n    \n                Array* result = (Array*)dest;\n                result->size = (size_t)size;\n\n                if(result->size == 0){\n                    result->data = RELNULL;\n                    break;\n                }\n\n                // align cursor for element data placement\n                *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, schema_alignment(schema->parameters[0]));\n\n                result->data = PyTRY(abs2rel, *cursor);\n\n                if (PyList_Check(obj)) {\n                    // Fixed size width of each element (variable size data will\n                    // be written to the cursor location)\n                    size_t width = schema->parameters[0]->width;\n    \n                    // Move the cursor to the location immediately after the\n                    // fixed sized elements\n                    *cursor = (void*)(*(char**)cursor + size * width);\n\n                    char* start = (char*) PyTRY(rel2abs, result->data);\n                    Schema* element_schema = schema->parameters[0];\n                    for (Py_ssize_t i = 0; i < size; i++) {\n                        PyObject* item = PyList_GetItem(obj, i);\n                        if (to_voidstar_r(start + width * i, cursor, element_schema, item) != 0) {\n                            goto error;\n                        }\n                    }\n\n                } else if (PyBytes_Check(obj) || PyByteArray_Check(obj)){\n                    absptr_t tmp_ptr = PyTRY(rel2abs, result->data);\n                    memcpy(tmp_ptr, mutable_data, size);\n                    // move cursor to the location after the copied data\n                    *cursor = (void*)(*(char**)cursor + size);\n                }\n                else{\n                    size_t width = schema->parameters[0]->width;\n\n                    absptr_t tmp_ptr = PyTRY(rel2abs, result->data);\n                    memcpy(tmp_ptr, immutable_data, size * width);\n\n                    // Move the cursor to the location immediately after the\n                    // fixed sized elements\n                    *cursor = (void*)(*(char**)cursor + size * width);\n                }\n            }\n            break;\n\n\n        case MORLOC_TUPLE:\n            if (!PyTuple_Check(obj) && !PyList_Check(obj)) {\n                PyRAISE(\"Expected tuple or list for MORLOC_TUPLE, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n\n            {\n                Py_ssize_t size = PyTuple_Check(obj) ? PyTuple_Size(obj) : PyList_Size(obj);\n                if ((size_t)size != schema->size) {\n                    PyRAISE(\"Tuple/List size mismatch\");\n                }\n                for (Py_ssize_t i = 0; i < size; ++i) {\n                    PyObject* item = PyTuple_Check(obj) ? PyTuple_GetItem(obj, i) : PyList_GetItem(obj, i);\n                    if (to_voidstar_r((char*)dest + schema->offsets[i], cursor, schema->parameters[i], item) != 0) {\n                        goto error;\n                    }\n                }\n            }\n            break;\n\n        case MORLOC_MAP:\n            if (!PyDict_Check(obj)) {\n                PyRAISE(\"Expected dict for MORLOC_MAP, but got %s\", Py_TYPE(obj)->tp_name);\n            }\n\n            {\n                for (size_t i = 0; i < schema->size; ++i) {\n                    PyObject* key = PyUnicode_FromString(schema->keys[i]);\n                    PyObject* value = PyDict_GetItem(obj, key);\n                    Py_DECREF(key);\n                    if (value) {\n                        if (to_voidstar_r((char*)dest + schema->offsets[i], cursor, schema->parameters[i], value) != 0) {\n                            goto error;\n                        }\n                    }\n                }\n            }\n            break;\n\n        case MORLOC_OPTIONAL:\n            if (obj == Py_None) {\n                *((uint8_t*)dest) = 0;\n                memset((char*)dest + schema->offsets[0], 0, schema->parameters[0]->width);\n            } else {\n                *((uint8_t*)dest) = 1;\n                if (to_voidstar_r((char*)dest + schema->offsets[0], cursor, schema->parameters[0], obj) != 0) {\n                    goto error;\n                }\n            }\n            break;\n\n        case MORLOC_TENSOR:\n            {\n                import_numpy();\n                int type_num = schema_to_npy_type(schema->parameters[0]->type);\n                if (type_num < 0) { PyRAISE(\"Unsupported tensor element type\"); }\n                PyArrayObject* arr = (PyArrayObject*)PyArray_FROM_OTF(obj, type_num, NPY_ARRAY_C_CONTIGUOUS);\n                if (!arr) { PyRAISE(\"Expected numpy array for MORLOC_TENSOR\"); }\n\n                int ndim = PyArray_NDIM(arr);\n                npy_intp* np_shape = PyArray_DIMS(arr);\n                size_t total = (size_t)PyArray_SIZE(arr);\n                size_t elem_width = schema->parameters[0]->width;\n\n                Tensor* tensor = (Tensor*)dest;\n                tensor->total_elements = total;\n                tensor->device_type = 0;\n                tensor->device_id = 0;\n\n                if (total == 0) {\n                    tensor->shape = RELNULL;\n                    tensor->data = RELNULL;\n                    Py_DECREF(arr);\n                    break;\n                }\n\n                // Write shape array\n                *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, _Alignof(int64_t));\n                {\n                    char* rel_err = NULL;\n                    tensor->shape = abs2rel((absptr_t)*cursor, &rel_err);\n                    if (rel_err) { free(rel_err); Py_DECREF(arr); PyRAISE(\"abs2rel failed for tensor shape\"); }\n                }\n                int64_t* shape_dst = (int64_t*)*cursor;\n                for (int i = 0; i < ndim; i++) shape_dst[i] = (int64_t)np_shape[i];\n                *cursor = (char*)*cursor + ndim * sizeof(int64_t);\n\n                // Write data buffer\n                size_t elem_align = schema_alignment(schema->parameters[0]);\n                *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, elem_align);\n                {\n                    char* rel_err = NULL;\n                    tensor->data = abs2rel((absptr_t)*cursor, &rel_err);\n                    if (rel_err) { free(rel_err); Py_DECREF(arr); PyRAISE(\"abs2rel failed for tensor data\"); }\n                }\n                memcpy(*cursor, PyArray_DATA(arr), total * elem_width);\n                *cursor = (char*)*cursor + total * elem_width;\n\n                Py_DECREF(arr);\n            }\n            break;\n\n        default:\n            PyRAISE(\"Unsupported schema type\");\n    }\n\n    return 0;\n\nerror:\n    return -1;\n}\n\nvoid* to_voidstar(const Schema* schema, PyObject* obj){ MAYFAIL\n  void* dest = NULL;\n\n  // calculate the required size of the shared memory object\n  ssize_t shm_size = get_shm_size(schema, obj);\n  if(shm_size == -1){\n      PyRAISE(\"Schema does not match object\");\n  }\n\n  // allocate the required memory as a single block\n  dest = PyTRY(shmalloc, (size_t)shm_size);\n\n  // set the write location of variable size chunks\n  void* cursor = (void*)((char*)dest + schema->width);\n\n  // write the data to the block\n  int result = to_voidstar_r(dest, &cursor, schema, obj);\n  if (result != 0) {\n      goto error;\n  }\n\n  return dest;\n\nerror:\n  if (dest != NULL) {\n      char* free_errmsg = NULL;\n      shfree(dest, &free_errmsg);\n      free(free_errmsg);\n  }\n  return NULL;\n}\n\n\nstatic PyObject* pybinding__wait_for_client(PyObject* self, PyObject* args) { MAYFAIL\n    PyObject* daemon_capsule;\n\n    if (!PyArg_ParseTuple(args, \"O\", &daemon_capsule)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    language_daemon_t* daemon = (language_daemon_t*)PyCapsule_GetPointer(daemon_capsule, \"language_daemon_t\");\n\n    int client_fd = PyTRY(wait_for_client, daemon);\n\n    return PyLong_FromLong((long)client_fd);\n\nerror:\n    return NULL;\n}\n\nstatic PyObject* pybinding__start_daemon(PyObject* self, PyObject* args) { MAYFAIL\n    const char* socket_path;\n    const char* tmpdir;\n    const char* shm_basename;\n    size_t shm_default_size;\n    language_daemon_t* daemon = NULL;\n\n    if (!PyArg_ParseTuple(args, \"sssk\", &socket_path, &tmpdir, &shm_basename, &shm_default_size)) {\n      goto error;\n    }\n\n    daemon = PyTRY(\n        start_daemon,\n        socket_path,\n        tmpdir,\n        shm_basename,\n        shm_default_size\n    );\n\n    return PyCapsule_New(daemon, \"language_daemon_t\", NULL);\n\nerror:\n    FREE(daemon)\n    return NULL;\n}\n\n\nstatic PyObject* pybinding__close_daemon(PyObject* self, PyObject* args) {\n    PyObject* daemon_capsule;\n\n    if (!PyArg_ParseTuple(args, \"O\", &daemon_capsule)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    language_daemon_t* daemon = (language_daemon_t*)PyCapsule_GetPointer(daemon_capsule, \"language_daemon_t\");\n\n    if(daemon != NULL){\n        close_daemon(&daemon);\n    }\n\n    Py_RETURN_NONE;\n\nerror:\n    return NULL;\n}\n\n\nstatic PyObject*  pybinding__read_morloc_call_packet(PyObject* self, PyObject* args){ MAYFAIL\n    char* packet;\n    size_t packet_size;\n    morloc_call_t* call_packet = NULL;\n    PyObject* py_tuple = NULL;\n    PyObject* py_args = NULL;\n    PyObject* py_mid = NULL;\n\n    if (!PyArg_ParseTuple(args, \"y#\", &packet, &packet_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n    call_packet = PyTRY(read_morloc_call_packet, (const uint8_t*)packet);\n\n    py_tuple = PyTuple_New(2);\n    if (!py_tuple) { PyRAISE(\"Allocation failed\"); }\n    py_args = PyList_New(call_packet->nargs);\n    if (!py_args) { PyRAISE(\"Allocation failed\"); }\n    py_mid = PyLong_FromLong((long)call_packet->midx);\n    if (!py_mid) { PyRAISE(\"Allocation failed\"); }\n    for(size_t i = 0; i < call_packet->nargs; i++){\n        size_t arg_packet_size = PyTRY(morloc_packet_size, call_packet->args[i]);\n        PyObject* py_arg = PyBytes_FromStringAndSize(\n            (char*)call_packet->args[i],\n            arg_packet_size\n        );\n        PyList_SetItem(py_args, i, py_arg);\n    }\n\n    PyTuple_SetItem(py_tuple, 0, py_mid);\n    PyTuple_SetItem(py_tuple, 1, py_args);\n    py_mid = NULL;  // stolen by PyTuple_SetItem\n    py_args = NULL;  // stolen by PyTuple_SetItem\n\n    free_morloc_call(call_packet);\n\n    return py_tuple;\n\nerror:\n    if (call_packet) free_morloc_call(call_packet);\n    Py_XDECREF(py_mid);\n    Py_XDECREF(py_args);\n    Py_XDECREF(py_tuple);\n    return NULL;\n}\n\nstatic PyObject*  pybinding__send_packet_to_foreign_server(PyObject* self, PyObject* args){ MAYFAIL\n    int client_fd = 0;\n    uint8_t* packet = NULL;\n    size_t packet_size = 0;\n\n    if (!PyArg_ParseTuple(args, \"iy#\", &client_fd, &packet, &packet_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    size_t bytes_sent = PyTRY(send_packet_to_foreign_server, client_fd, packet);\n\n    return PyLong_FromSize_t(bytes_sent);\n\nerror:\n    return NULL;\n}\n\n\nstatic PyObject*  pybinding__stream_from_client(PyObject* self, PyObject* args){ MAYFAIL\n    int client_fd = 0;\n    uint8_t* packet = NULL;\n\n    if (!PyArg_ParseTuple(args, \"i\", &client_fd)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    packet = PyTRY(stream_from_client, client_fd);\n\n    size_t packet_size = PyTRY(morloc_packet_size, packet);\n\n    PyObject* retval = PyBytes_FromStringAndSize((char*)packet, packet_size);\n\n    free(packet);\n\n    return retval;\n\nerror:\n    FREE(packet)\n    return NULL;\n}\n\n\n\nstatic PyObject*  pybinding__close_socket(PyObject* self, PyObject* args){\n    int socket_id = 0;\n\n    if (!PyArg_ParseTuple(args, \"i\", &socket_id)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    close_socket(socket_id);\n\n    Py_RETURN_NONE;\n\nerror:\n    return NULL;\n}\n\n// Transforms a value into a message ready for the socket\nstatic PyObject* pybinding__put_value(PyObject* self, PyObject* args){ MAYFAIL\n    uint8_t* packet = NULL;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n    size_t packet_size = 0;\n    bool tracked = false;\n\n    PyObject* obj;\n    const char* schema_str;\n\n    if (!PyArg_ParseTuple(args, \"Os\", &obj, &schema_str)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    // Arrow dispatch: if schema hint is \"arrow\", use Arrow C Data Interface\n    if (schema->hint && strcmp(schema->hint, \"arrow\") == 0) {\n        // Export pyarrow object via C Data Interface -> copy to shm -> packet\n        struct ArrowSchema arrow_schema;\n        struct ArrowArray arrow_array;\n\n        // Call obj._export_to_c(arrow_array_ptr, arrow_schema_ptr)\n        PyObject* export_result = PyObject_CallMethod(\n            obj, \"_export_to_c\",\n            \"nn\", (Py_ssize_t)&arrow_array, (Py_ssize_t)&arrow_schema);\n        if (!export_result) {\n            free_schema(schema);\n            PyRAISE(\"Failed to export pyarrow object via C Data Interface\");\n        }\n        Py_DECREF(export_result);\n\n        char* errmsg = NULL;\n        relptr_t relptr = arrow_to_shm(&arrow_array, &arrow_schema, &errmsg);\n\n        // Release the exported C Data Interface structs\n        if (arrow_schema.release) arrow_schema.release(&arrow_schema);\n        if (arrow_array.release) arrow_array.release(&arrow_array);\n\n        if (errmsg) {\n            free_schema(schema);\n            PyErr_SetString(PyExc_RuntimeError, errmsg);\n            free(errmsg);\n            return NULL;\n        }\n\n        packet = make_arrow_data_packet(relptr, schema);\n        if (!packet) {\n            free_schema(schema);\n            PyRAISE(\"Failed to create arrow data packet\");\n        }\n\n        // Track shm for cleanup\n        char* resolve_err = NULL;\n        void* shm_ptr = rel2abs(relptr, &resolve_err);\n        if (resolve_err) { free(resolve_err); }\n        if (shm_ptr) {\n            shm_tracker_push((absptr_t)shm_ptr, NULL);\n            tracked = true;\n        }\n\n        packet_size = PyTRY(morloc_packet_size, packet);\n        PyObject* retval = PyBytes_FromStringAndSize((char*)packet, packet_size);\n        free(packet);\n        free_schema(schema);\n        return retval;\n    }\n\n    voidstar = to_voidstar(schema, obj);\n    PyTRACE(voidstar == NULL)\n\n    // convert to a relative pointer conserved between language servers\n    relptr_t relptr = PyTRY(abs2rel, voidstar);\n\n    packet = PyTRY(make_data_packet_auto, voidstar, relptr, schema);\n\n    {\n        const morloc_packet_header_t* hdr = (const morloc_packet_header_t*)packet;\n        if (hdr->command.data.source == PACKET_SOURCE_RPTR) {\n            // SHM referenced by packet -- track for deferred cleanup\n            shm_tracker_push((absptr_t)voidstar, schema);\n            tracked = true;\n        } else {\n            // Data inlined in packet -- free SHM immediately\n            char* free_err = NULL;\n            shfree_by_schema((absptr_t)voidstar, schema, &free_err);\n            if (free_err) { free(free_err); free_err = NULL; }\n            shfree((absptr_t)voidstar, &free_err);\n            if (free_err) { free(free_err); }\n            voidstar = NULL;\n        }\n    }\n\n    packet_size = PyTRY(morloc_packet_size, packet);\n\n    {\n        PyObject* retval = PyBytes_FromStringAndSize((char*)packet, packet_size);\n        free(packet);\n        if (!tracked) {\n            free_schema(schema);\n        }\n        return retval;\n    }\n\nerror:\n    FREE(packet)\n    if (!tracked) {\n        if (voidstar && schema) {\n            char* free_err = NULL;\n            shfree_by_schema((absptr_t)voidstar, schema, &free_err);\n            if (free_err) { free(free_err); free_err = NULL; }\n            shfree((absptr_t)voidstar, &free_err);\n            if (free_err) { free(free_err); }\n        }\n        free_schema(schema);\n    }\n    return NULL;\n}\n\n\n// Use a key to retrieve a value\nstatic PyObject* pybinding__get_value(PyObject* self, PyObject* args){ MAYFAIL\n    uint8_t* voidstar = NULL;\n    Schema* schema = NULL;\n    PyObject* obj = NULL;\n    bool tracked = false;\n\n    const char* packet;\n    size_t packet_size;\n    const char* schema_str;\n\n    if (!PyArg_ParseTuple(args, \"y#s\", &packet, &packet_size, &schema_str)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    const morloc_packet_header_t* header = (const morloc_packet_header_t*)packet;\n    uint8_t source = header->command.data.source;\n    uint8_t format = header->command.data.format;\n\n    schema = PyTRY(parse_schema, schema_str)\n\n    // Arrow dispatch: if packet format is Arrow, import via C Data Interface\n    if (format == PACKET_FORMAT_ARROW) {\n        voidstar = PyTRY(get_morloc_data_packet_value, (uint8_t*)packet, schema);\n\n        const arrow_shm_header_t* arrow_hdr = (const arrow_shm_header_t*)voidstar;\n\n        struct ArrowSchema arrow_schema;\n        struct ArrowArray arrow_array;\n        char* arrow_err = NULL;\n        arrow_from_shm(arrow_hdr, &arrow_schema, &arrow_array, &arrow_err);\n        if (arrow_err) {\n            free_schema(schema);\n            PyErr_SetString(PyExc_RuntimeError, arrow_err);\n            free(arrow_err);\n            return NULL;\n        }\n\n        // Import via pyarrow RecordBatch.from_buffers or _import_from_c\n        PyObject* pyarrow_mod = PyImport_ImportModule(\"pyarrow\");\n        if (!pyarrow_mod) {\n            if (arrow_schema.release) arrow_schema.release(&arrow_schema);\n            if (arrow_array.release) arrow_array.release(&arrow_array);\n            free_schema(schema);\n            PyRAISE(\"pyarrow is required for arrow-typed data\");\n        }\n\n        PyObject* rb_class = PyObject_GetAttrString(pyarrow_mod, \"RecordBatch\");\n        Py_DECREF(pyarrow_mod);\n        if (!rb_class) {\n            if (arrow_schema.release) arrow_schema.release(&arrow_schema);\n            if (arrow_array.release) arrow_array.release(&arrow_array);\n            free_schema(schema);\n            PyRAISE(\"Failed to get pyarrow.RecordBatch\");\n        }\n\n        // Use RecordBatch._import_from_c(array_ptr, schema_ptr)\n        obj = PyObject_CallMethod(rb_class, \"_import_from_c\",\n            \"nn\", (Py_ssize_t)&arrow_array, (Py_ssize_t)&arrow_schema);\n        Py_DECREF(rb_class);\n\n        // Incref shm so it stays alive while pyarrow references the buffers\n        char* incref_err = NULL;\n        shincref((absptr_t)voidstar, &incref_err);\n        if (incref_err) { free(incref_err); }\n        shm_tracker_push((absptr_t)voidstar, NULL);\n\n        free_schema(schema);\n        if (!obj) return NULL;\n        return obj;\n    }\n\n    // Fast path: inline voidstar -- read directly from packet, no SHM needed\n    if (source == PACKET_SOURCE_MESG && format == PACKET_FORMAT_VOIDSTAR) {\n        const uint8_t* payload = (const uint8_t*)packet + sizeof(morloc_packet_header_t) + header->offset;\n        obj = fromAnything(schema, (const void*)payload, (const void*)payload);\n        PyTRACE(obj == NULL)\n        free_schema(schema);\n        return obj;\n    }\n\n    // SHM paths (RPTR or MESG+MSGPACK)\n    bool is_rptr = (source == PACKET_SOURCE_RPTR);\n\n    voidstar = PyTRY(get_morloc_data_packet_value, (uint8_t*)packet, schema);\n\n    // For RPTR data, increment refcount so the owner's tracker flush\n    // won't destroy data we may still need (e.g. forwarded packets).\n    if (is_rptr) {\n        char* incref_err = NULL;\n        shincref((absptr_t)voidstar, &incref_err);\n        if (incref_err) { free(incref_err); }\n        // Track for deferred decref (tracker takes schema ownership)\n        shm_tracker_push((absptr_t)voidstar, schema);\n        tracked = true;\n    }\n\n    obj = fromAnything(schema, voidstar, NULL);\n    PyTRACE(obj == NULL)\n\n    if (!tracked) {\n        free_schema(schema);\n    }\n\n    return obj;\n\nerror:\n    if (!tracked) {\n        free_schema(schema);\n    }\n    return NULL;\n}\n\n\n// Free tracked SHM allocations from put_value calls.\n// Called at dispatch start to free result SHM from previous dispatch.\nstatic PyObject* pybinding__flush_shm_tracker(PyObject* self, PyObject* args) {\n    (void)self; (void)args;\n    flush_shm_tracker();\n    Py_RETURN_NONE;\n}\n\n\n// Make a foreign call\n//\n// Arguments:\n//   1. socket path\n//   2. midx\n//   3. list of arguments, each is bytestring packet\nstatic PyObject* pybinding__foreign_call(PyObject* self, PyObject* args) { MAYFAIL\n    char* socket_path;\n    int mid;\n    PyObject* py_args;\n    const uint8_t** arg_packets = NULL;\n    Py_ssize_t nargs;\n    Py_ssize_t i;\n    uint8_t* packet = NULL;\n    uint8_t* result = NULL;\n    size_t result_length = 0;\n\n    // Parse arguments: string, integer, and sequence\n    if (!PyArg_ParseTuple(args, \"siO\", &socket_path, &mid, &py_args)) {\n        PyRAISE(\"Failed to parse argument\")\n    }\n\n    // Verify third argument is a sequence\n    if (!PySequence_Check(py_args)) {\n        PyRAISE(\"Third argument must be a sequence\");\n    }\n\n    // Get sequence size and allocate C arrays\n    nargs = PySequence_Size(py_args);\n    arg_packets = (const uint8_t**)calloc(nargs, sizeof(uint8_t*));\n    if (!arg_packets) {\n        PyErr_NoMemory();\n        goto error;\n    }\n\n    // Convert Python bytes to C buffers\n    for (i = 0; i < nargs; i++) {\n        PyObject* item = PySequence_GetItem(py_args, i);\n        if (!PyBytes_Check(item)) {\n            Py_DECREF(item);\n            free(arg_packets);\n            arg_packets = NULL;\n            PyRAISE(\"All arguments must be bytes objects\");\n        }\n        arg_packets[i] = (const uint8_t*)PyBytes_AsString(item);\n        Py_DECREF(item);\n    }\n\n    packet = PyTRY(make_morloc_local_call_packet, (uint32_t)mid, arg_packets, (size_t)nargs);\n\n    free(arg_packets);\n    arg_packets = NULL;\n\n    result = PyTRY(send_and_receive_over_socket, socket_path, packet);\n    free(packet);\n    packet = NULL;\n\n    // Incref the result's SHM so the callee's tracker flush won't destroy\n    // data we may still need (e.g. forwarded result packets).\n    {\n        const morloc_packet_header_t* res_header = (const morloc_packet_header_t*)result;\n        if (res_header->command.data.source == PACKET_SOURCE_RPTR) {\n            size_t relptr = *(size_t*)((uint8_t*)result + res_header->offset + sizeof(morloc_packet_header_t));\n            char* resolve_err = NULL;\n            void* res_voidstar = rel2abs(relptr, &resolve_err);\n            if (resolve_err) { free(resolve_err); resolve_err = NULL; }\n            if (res_voidstar) {\n                char* incref_err = NULL;\n                shincref((absptr_t)res_voidstar, &incref_err);\n                if (incref_err) { free(incref_err); }\n                shm_tracker_push((absptr_t)res_voidstar, NULL);\n            }\n        }\n    }\n\n    result_length = PyTRY(morloc_packet_size, result);\n\n    PyObject* retval = PyBytes_FromStringAndSize((char*)result, result_length);\n\n    free(result);\n\n    return retval;\n\nerror:\n    FREE(arg_packets)\n    FREE(packet)\n    return NULL;\n}\n\n\nstatic PyObject* pybinding__remote_call(PyObject* self, PyObject* args) { MAYFAIL\n    int midx;\n    char* socket_base;\n    char* cache_path;\n    PyObject* res_struct; // python struct that is converted to a resource_t struct\n    PyObject* arg_packets_obj; // python list of bytes types\n    const uint8_t** arg_packets = NULL;\n    uint8_t* result = NULL;\n\n    if (!PyArg_ParseTuple(args, \"issOO\", &midx, &socket_base, &cache_path, &res_struct, &arg_packets_obj)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    if (!PyBytes_Check(res_struct)) {\n        PyRAISE(\"res_struct must be a bytes object from struct.pack()\");\n    }\n\n    // Ensure the resources struct is the right size\n    if (PyBytes_Size(res_struct) != sizeof(resources_t)) {\n        PyRAISE(\"Struct size mismatch\");\n    }\n\n    resources_t* res = (resources_t*)PyBytes_AsString(res_struct);\n    PyTRACE(res == NULL)\n\n    Py_ssize_t nargs = PyList_Size(arg_packets_obj);\n\n    arg_packets = calloc(nargs, sizeof(uint8_t*));\n    if (arg_packets == NULL) {\n        PyRAISE(\"Memory allocation failed\");\n    }\n\n    for (Py_ssize_t i = 0; i < nargs; i++) {\n        PyObject* packet_obj = PyList_GetItem(arg_packets_obj, i);\n        if (!PyBytes_Check(packet_obj)) {\n            PyRAISE(\"Packets must be bytes\");\n        }\n        arg_packets[i] = (uint8_t*)PyBytes_AsString(packet_obj);\n    }\n\n    result = PyTRY(\n        remote_call,\n        midx,\n        socket_base,\n        cache_path,\n        res,\n        arg_packets,\n        (size_t)nargs\n    );\n\n    free(arg_packets);\n\n    if (result == NULL) Py_RETURN_NONE;\n    size_t result_length = PyTRY(morloc_packet_size, result);\n    PyObject* py_result = PyBytes_FromStringAndSize((char*)result, result_length);\n    free(result);\n    return py_result;\n\nerror:\n    if (result != NULL){\n        free(result);\n    }\n    if (arg_packets != NULL){\n        // The elements are handled by Python and should not be freed\n        free(arg_packets);\n    }\n    return NULL;\n}\n\n\nstatic PyObject* pybinding__is_ping(PyObject* self, PyObject* args) { MAYFAIL\n    char* packet;\n    size_t packet_size;\n\n    if (!PyArg_ParseTuple(args, \"y#\", &packet, &packet_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    bool is_ping = PyTRY(packet_is_ping, (uint8_t*)packet);\n\n    PyObject* obj = PyBool_FromLong((long)is_ping);\n\n    return obj;\n\nerror:\n    return NULL;\n}\n\n\nstatic PyObject* pybinding__is_local_call(PyObject* self, PyObject* args) { MAYFAIL\n    char* packet;\n    size_t packet_size;\n\n    if (!PyArg_ParseTuple(args, \"y#\", &packet, &packet_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    bool is_local_call = PyTRY(packet_is_local_call, (uint8_t*)packet);\n\n    PyObject* obj = PyBool_FromLong((long)is_local_call);\n\n    return obj;\n\nerror:\n    return NULL;\n}\n\nstatic PyObject* pybinding__is_remote_call(PyObject* self, PyObject* args) { MAYFAIL\n    char* packet;\n    size_t packet_size;\n\n    if (!PyArg_ParseTuple(args, \"y#\", &packet, &packet_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    bool is_remote_call = PyTRY(packet_is_remote_call, (uint8_t*)packet);\n\n    PyObject* obj = PyBool_FromLong((long)is_remote_call);\n\n    return obj;\n\nerror:\n    return NULL;\n}\n\n\nstatic PyObject* pybinding__pong(PyObject* self, PyObject* args) { MAYFAIL\n    char* packet;\n    size_t packet_size;\n    uint8_t* pong = NULL;\n\n    if (!PyArg_ParseTuple(args, \"y#\", &packet, &packet_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    pong = PyTRY(return_ping, (uint8_t*)packet);\n\n    size_t pong_size = PyTRY(morloc_packet_size, pong);\n\n    {\n        PyObject* retval = PyBytes_FromStringAndSize((char*)pong, pong_size);\n        free(pong);\n        return retval;\n    }\n\nerror:\n    FREE(pong)\n    return NULL;\n}\n\nstatic PyObject* pybinding__set_fallback_dir(PyObject* self, PyObject* args) {\n    const char* dir;\n    if (!PyArg_ParseTuple(args, \"s\", &dir)) {\n        return NULL;\n    }\n    shm_set_fallback_dir(dir);\n    Py_RETURN_NONE;\n}\n\nstatic PyObject* pybinding__shinit(PyObject* self, PyObject* args) { MAYFAIL\n    shm_t* shm = NULL;\n    \n    const char* shm_basename;\n    size_t volume_index;\n    size_t shm_default_size;\n\n    if (!PyArg_ParseTuple(args, \"skk\", &shm_basename, &volume_index, &shm_default_size)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    shm = PyTRY(\n        shinit,\n        shm_basename,\n        volume_index,\n        shm_default_size\n    );\n\n    return PyCapsule_New(shm, \"shm_t\", NULL);\n\nerror:\n    FREE(shm)\n    return NULL;\n}\n\n\nstatic PyObject* pybinding__make_fail_packetg(PyObject* self, PyObject* args) { MAYFAIL\n    const char* packet_errmsg;\n    uint8_t* packet = NULL;\n\n    if (!PyArg_ParseTuple(args, \"s\", &packet_errmsg)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    packet = make_fail_packet(packet_errmsg);\n\n    size_t packet_size = PyTRY(morloc_packet_size, packet);\n\n    {\n        PyObject* retval = PyBytes_FromStringAndSize((char*)packet, packet_size);\n        free(packet);\n        return retval;\n    }\n\nerror:\n    FREE(packet)\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_hash(PyObject* self, PyObject* args) { MAYFAIL\n    PyObject* obj;\n    const char* schema_str;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n    char* hex = NULL;\n\n    if (!PyArg_ParseTuple(args, \"Os\", &obj, &schema_str)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    voidstar = to_voidstar(schema, obj);\n    PyTRACE(voidstar == NULL)\n\n    hex = PyTRY(mlc_hash, voidstar, schema);\n\n    {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n\n    {\n        PyObject* retval = PyUnicode_FromString(hex);\n        free(hex);\n        return retval;\n    }\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    FREE(hex)\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_save(PyObject* self, PyObject* args) { MAYFAIL\n    PyObject* obj;\n    const char* schema_str;\n    const char* path;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n\n    if (!PyArg_ParseTuple(args, \"Oss\", &obj, &schema_str, &path)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    voidstar = to_voidstar(schema, obj);\n    PyTRACE(voidstar == NULL)\n\n    PyTRY(mlc_save, voidstar, schema, path);\n\n    {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    Py_RETURN_NONE;\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_save_voidstar(PyObject* self, PyObject* args) { MAYFAIL\n    PyObject* obj;\n    const char* schema_str;\n    const char* path;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n\n    if (!PyArg_ParseTuple(args, \"Oss\", &obj, &schema_str, &path)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    voidstar = to_voidstar(schema, obj);\n    PyTRACE(voidstar == NULL)\n\n    PyTRY(mlc_save_voidstar, voidstar, schema, path);\n\n    {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    Py_RETURN_NONE;\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_save_json(PyObject* self, PyObject* args) { MAYFAIL\n    PyObject* obj;\n    const char* schema_str;\n    const char* path;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n\n    if (!PyArg_ParseTuple(args, \"Oss\", &obj, &schema_str, &path)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    voidstar = to_voidstar(schema, obj);\n    PyTRACE(voidstar == NULL)\n\n    PyTRY(mlc_save_json, voidstar, schema, path);\n\n    {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    Py_RETURN_NONE;\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_show(PyObject* self, PyObject* args) { MAYFAIL\n    PyObject* obj;\n    const char* schema_str;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n    char* json = NULL;\n\n    if (!PyArg_ParseTuple(args, \"Os\", &obj, &schema_str)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    voidstar = to_voidstar(schema, obj);\n    PyTRACE(voidstar == NULL)\n\n    json = PyTRY(mlc_show, voidstar, schema);\n\n    {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n\n    {\n        PyObject* retval = PyUnicode_FromString(json);\n        free(json);\n        return retval;\n    }\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    FREE(json)\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_read(PyObject* self, PyObject* args) { MAYFAIL\n    const char* schema_str;\n    const char* json_str;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n\n    if (!PyArg_ParseTuple(args, \"ss\", &schema_str, &json_str)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    {\n        char* errmsg = NULL;\n        voidstar = mlc_read(json_str, schema, &errmsg);\n        if (errmsg != NULL) {\n            free(errmsg);\n        }\n    }\n\n    if (voidstar == NULL) {\n        free_schema(schema);\n        Py_RETURN_NONE;\n    }\n\n    {\n        PyObject* obj = fromAnything(schema, voidstar, NULL);\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n        free_schema(schema);\n        PyTRACE(obj == NULL)\n        return obj;\n    }\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    return NULL;\n}\n\nstatic PyObject* pybinding__mlc_load(PyObject* self, PyObject* args) { MAYFAIL\n    const char* schema_str;\n    const char* path;\n    Schema* schema = NULL;\n    void* voidstar = NULL;\n\n    if (!PyArg_ParseTuple(args, \"ss\", &schema_str, &path)) {\n        PyRAISE(\"Failed to parse arguments\");\n    }\n\n    schema = PyTRY(parse_schema, schema_str);\n\n    voidstar = PyTRY(mlc_load, path, schema);\n\n    if (voidstar == NULL) {\n        free_schema(schema);\n        Py_RETURN_NONE;\n    }\n\n    {\n        PyObject* obj = fromAnything(schema, voidstar, NULL);\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n        free_schema(schema);\n        PyTRACE(obj == NULL)\n        return obj;\n    }\n\nerror:\n    if (voidstar) {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n    return NULL;\n}\n\nstatic PyMethodDef Methods[] = {\n    {\"set_fallback_dir\", pybinding__set_fallback_dir, METH_VARARGS, \"Set fallback directory for file-backed shared memory\"},\n    {\"shinit\", pybinding__shinit, METH_VARARGS, \"Open the shared memory pool\"},\n    {\"start_daemon\", pybinding__start_daemon, METH_VARARGS, \"Initialize the shared memory and socket for the python daemon\"},\n    {\"close_daemon\", pybinding__close_daemon, METH_VARARGS, \"Banish the daemon back to the abyss from whence it came\"},\n    {\"wait_for_client\", pybinding__wait_for_client, METH_VARARGS, \"Listen over a pipe until a client packet arrives\"},\n    {\"read_morloc_call_packet\", pybinding__read_morloc_call_packet, METH_VARARGS, \"Parse a morloc call packet\"},\n    {\"send_packet_to_foreign_server\", pybinding__send_packet_to_foreign_server, METH_VARARGS, \"Send data to a foreign server\"},\n    {\"stream_from_client\", pybinding__stream_from_client, METH_VARARGS, \"Stream data from the client\"},\n    {\"close_socket\", pybinding__close_socket, METH_VARARGS, \"Close the socket\"},\n    {\"flush_shm_tracker\", pybinding__flush_shm_tracker, METH_NOARGS, \"Free tracked SHM allocations from put_value calls\"},\n    {\"foreign_call\", pybinding__foreign_call, METH_VARARGS, \"Send a call packet to a foreign pool\"},\n    {\"get_value\", pybinding__get_value, METH_VARARGS, \"Convert a packet to a Python value\"},\n    {\"put_value\", pybinding__put_value, METH_VARARGS, \"Convert a Python value to a packet\"},\n    {\"is_ping\", pybinding__is_ping, METH_VARARGS, \"Packet is a ping\"},\n    {\"is_local_call\", pybinding__is_local_call, METH_VARARGS, \"Packet is a local call\"},\n    {\"is_remote_call\", pybinding__is_remote_call, METH_VARARGS, \"Packet is a remote call\"},\n    {\"pong\", pybinding__pong, METH_VARARGS, \"Return a ping\"},\n    {\"make_fail_packet\", pybinding__make_fail_packetg, METH_VARARGS, \"Create a fail packet from an error message\"},\n    {\"remote_call\", pybinding__remote_call, METH_VARARGS, \"Make a call to a remote cluster\"},\n    {\"mlc_hash\", pybinding__mlc_hash, METH_VARARGS, \"Hash a value using xxhash\"},\n    {\"mlc_save\", pybinding__mlc_save, METH_VARARGS, \"Save a value to file in msgpack format\"},\n    {\"mlc_save_voidstar\", pybinding__mlc_save_voidstar, METH_VARARGS, \"Save a value to file in flat voidstar binary format\"},\n    {\"mlc_save_json\", pybinding__mlc_save_json, METH_VARARGS, \"Save a value to file in JSON format\"},\n    {\"mlc_load\", pybinding__mlc_load, METH_VARARGS, \"Load a value from file\"},\n    {\"mlc_show\", pybinding__mlc_show, METH_VARARGS, \"Serialize a value to JSON string\"},\n    {\"mlc_read\", pybinding__mlc_read, METH_VARARGS, \"Deserialize a JSON string to a value\"},\n    {NULL, NULL, 0, NULL} // this is a sentinel value\n};\n\nstatic struct PyModuleDef pymorloc = {\n    PyModuleDef_HEAD_INIT,\n    \"pymorloc\",\n    \"Python interface to Morloc binary and MessagePack data\",\n    -1,\n    Methods\n};\n\nPyMODINIT_FUNC PyInit_pymorloc(void) {\n    return PyModule_Create(&pymorloc);\n}\n"
  },
  {
    "path": "data/lang/py/setup.py",
    "content": "import os\nfrom setuptools import setup, Extension\nimport numpy as np\n\ntry:\n    np_include_path = np.get_include()\nexcept AttributeError:\n    raise RuntimeError(\"Numpy is required to build this extension\")\n\nmorloc_home = os.environ.get(\n    'MORLOC_HOME',\n    os.path.expanduser('~/.local/share/morloc')\n)\n\nmodule = Extension(\n    'pymorloc',\n    sources=['pymorloc.c'],\n    include_dirs=[\n        os.path.join(morloc_home, 'include'),\n        np_include_path\n    ],\n    library_dirs=[os.path.join(morloc_home, 'lib')],\n    runtime_library_dirs=[os.path.join(morloc_home, 'lib')],\n    libraries=['morloc']\n)\n\nsetup(\n    name='pymorloc',\n    version='0.1',\n    ext_modules=[module],\n    extras_require={\n        'numpy': ['numpy']\n    }\n)\n"
  },
  {
    "path": "data/lang/r/init.sh",
    "content": "#!/bin/bash\nset -e\n\nMORLOC_HOME=\"$1\"\nBUILD_DIR=\"$2\"\nSANITIZE_FLAGS=\"$3\"\nINCLUDE_DIR=\"$MORLOC_HOME/include\"\nLIB_DIR=\"$MORLOC_HOME/lib\"\n\n# Write source to include dir (R CMD SHLIB expects it there)\ncp \"$BUILD_DIR/rmorloc.c\" \"$INCLUDE_DIR/\"\n\n# Compile directly with gcc so SANITIZE_FLAGS can be passed at both compile\n# and link time. R CMD SHLIB drops sanitizer flags from the link step, which\n# would leave librmorloc.so with unresolved ubsan symbols (R itself is not\n# built with ubsan, so dlopen would fail with:\n#   \"undefined symbol: __ubsan_handle_type_mismatch_v1_abort\").\n#\n# We only link against R's core shared lib (libR). R's full --ldflags include\n# libs the R interpreter uses (pcre2, tirpc, icu...) that may not be present\n# as separate dev packages on the build machine; R-loadable shared objects\n# resolve those symbols through the loaded R interpreter at dlopen time, not\n# at link time.\nR_CPPFLAGS=$(R CMD config --cppflags)\nR_HOME=$(R RHOME)\ngcc $R_CPPFLAGS -I\"$INCLUDE_DIR\" $SANITIZE_FLAGS -fpic -O2 \\\n    -c \"$INCLUDE_DIR/rmorloc.c\" -o \"$INCLUDE_DIR/rmorloc.o\"\ngcc -shared $SANITIZE_FLAGS \\\n    -Wl,-Bsymbolic-functions -Wl,-z,relro \\\n    -o \"$LIB_DIR/librmorloc.so\" \"$INCLUDE_DIR/rmorloc.o\" \\\n    -L\"$LIB_DIR\" -Wl,-rpath,\"$LIB_DIR\" -lmorloc -lpthread \\\n    -L\"$R_HOME/lib\" -lR\n\n# Clean up\nrm -f \"$INCLUDE_DIR/rmorloc.c\" \"$INCLUDE_DIR/rmorloc.o\"\n"
  },
  {
    "path": "data/lang/r/lang.yaml",
    "content": "# R language descriptor for morloc compiler\n# Metadata fields (read by LangRegistry) + descriptor fields (read by generic translator)\n\n# Identity and metadata\nname: r\nextension: R\naliases: []\nis_compiled: false\nrun_command: [\"Rscript\"]\nserial_type: \"character\"\ncost: 4\npreamble:\n  - 'dyn.load(\"{{home}}/lib/librmorloc.so\")'\n  - '.morloc.srcdir <- normalizePath(file.path(dirname(sub(\"^--file=\", \"\", grep(\"^--file=\", commandArgs(FALSE), value=TRUE)[1])), \"..\", \"..\"), mustWork=FALSE)'\n  - '.morloc.source <- function(p) source(ifelse(startsWith(p, \"/\"), p, file.path(.morloc.srcdir, p)), chdir=TRUE)'\n\n# Literals\nldBoolTrue: \"TRUE\"\nldBoolFalse: \"FALSE\"\nldNullLiteral: \"NULL\"\n\n# Constructors\nldListStyle: type_dependent\nldTupleConstructor: \"list\"\nldRecordConstructor: \"list\"\nldRecordSeparator: \"=\"\n\n# Access styles\nldIndexStyle: one_double_bracket\nldKeyAccess: \"double_bracket\"\nldFieldAccess: dollar\n\n# Serialize/deserialize\nldSerializeFn: \"morloc_put_value\"\nldDeserializeFn: \"morloc_get_value\"\nldIntrinsicPrefix: \"morloc_\"\n\n# Foreign call\nldForeignCallFn: \"morloc_foreign_call\"\nldForeignCallIntSuffix: \"L\"\nldIntLiteralSuffix: \"L\"\n\n# Remote call\nldRemoteCallFn: \"morloc_remote_call\"\n\n# Record handling\nldDictStyleRecords: false\nldQuoteRecordKeys: false\n\n# Import syntax\nldQualifiedImports: false\nldIncludeRelToFile: false\n\n# Template fields\nldAssignOp: \"<-\"\nldLambdaTemplate: \"function({{args}}) { {{body}} }\"\nldDoBlockExpr: \"(function() {{expr}})\"\nldDoBlockBlock: \"(function(){\\n{{body}}\\n})\"\nldPartialTemplate: \"function({{bound_args}}) { {{fn}}({{all_args}}) }\"\nldImportTemplate: \".morloc.source(\\\"{{path}}\\\")\"\nldSocketPathTemplate: \"paste0(global_state$tmpdir, \\\"/\\\", {{socket}})\"\nldResourcePackTemplate: \"list(memory={{mem}}L, time={{time}}L, cpus={{cpus}}L, gpus={{gpus}}L)\"\nldReturnTemplate: \"return({{expr}})\"\nldFuncDefHeader: \"{{name}} <- function({{args}})\"\nldBlockStyle: braces\nldBlockEnd: \"}\"\nldErrorWrapOpen: \"\"\nldErrorWrapClose: []\nldPatternStyle: concat_call\nldConcatFn: \"paste0\"\nldQuoteTerminator: '\"'\nldQuoteTerminatorEsc: '\\\"'\nldAtomicTypes: [\"integer\", \"numeric\", \"double\", \"logical\", \"character\"]\nldAtomicListFn: \"c\"\nldGenericListFn: \"list\"\nldMapStyle: apply_callback\nldDispatchLocalHeader: \".dispatch <- list()\"\nldDispatchLocalEntry: \".dispatch[[{{mid}}L]] <- {{name}}\"\nldDispatchLocalFooter: \"\"\nldDispatchRemoteHeader: \".remote_dispatch <- list()\"\nldDispatchRemoteEntry: \".remote_dispatch[[{{mid}}L]] <- {{name}}_remote\"\nldDispatchRemoteFooter: \"\"\n\n# Pool template (loaded from pool.R, left empty here)\nldPoolTemplate: \"\"\nldBreakMarker: \"# <<<BREAK>>>\"\nldCommentMarker: \"#\"\n"
  },
  {
    "path": "data/lang/r/pool.R",
    "content": "# AUTO include sources start\n# <<<BREAK>>>\n# AUTO include sources end\n\nmorloc_is_ping                       <- function(...){ .Call(\"morloc_is_ping\",                       ...) }\nmorloc_pong                          <- function(...){ .Call(\"morloc_pong\",                          ...) }\nmorloc_is_local_call                 <- function(...){ .Call(\"morloc_is_local_call\",                 ...) }\nmorloc_is_remote_call                <- function(...){ .Call(\"morloc_is_remote_call\",                ...) }\nmorloc_make_fail_packet              <- function(...){ .Call(\"morloc_make_fail_packet\",              ...) }\nmorloc_wait_for_client               <- function(...){ .Call(\"morloc_wait_for_client\",               ...) }\nmorloc_stream_from_client            <- function(...){ .Call(\"morloc_stream_from_client\",            ...) }\nmorloc_read_morloc_call_packet       <- function(...){ .Call(\"morloc_read_morloc_call_packet\",       ...) }\nmorloc_send_packet_to_foreign_server <- function(...){ .Call(\"morloc_send_packet_to_foreign_server\", ...) }\nmorloc_close_socket                  <- function(...){ .Call(\"morloc_close_socket\",                  ...) }\nmorloc_start_daemon                  <- function(...){ .Call(\"morloc_start_daemon\",                  ...) }\nmorloc_shinit                        <- function(...){ .Call(\"morloc_shinit\",                        ...) }\nmorloc_foreign_call                  <- function(...){ .Call(\"morloc_foreign_call\",                  ...) }\nmorloc_get_value                     <- function(...){ .Call(\"morloc_get_value\",                     ...) }\nmorloc_put_value                     <- function(...){ .Call(\"morloc_put_value\",                     ...) }\nmorloc_mlc_show                      <- function(...){ .Call(\"morloc_mlc_show\",                      ...) }\nmorloc_socketpair                    <- function(...){ .Call(\"morloc_socketpair\",                    ...) }\nmorloc_fork                          <- function(...){ .Call(\"morloc_fork\",                          ...) }\nmorloc_send_fd                       <- function(...){ .Call(\"morloc_send_fd\",                       ...) }\nmorloc_recv_fd                       <- function(...){ .Call(\"morloc_recv_fd\",                       ...) }\nmorloc_kill                          <- function(...){ .Call(\"morloc_kill\",                          ...) }\nmorloc_waitpid                       <- function(...){ .Call(\"morloc_waitpid\",                       ...) }\nmorloc_install_sigterm_handler       <- function(...){ .Call(\"morloc_install_sigterm_handler\",       ...) }\nmorloc_is_shutting_down              <- function(...){ .Call(\"morloc_is_shutting_down\",              ...) }\nmorloc_waitpid_blocking              <- function(...){ .Call(\"morloc_waitpid_blocking\",              ...) }\nmorloc_detach_daemon                 <- function(...){ .Call(\"morloc_detach_daemon\",                 ...) }\nmorloc_shared_counter_create         <- function(...){ .Call(\"morloc_shared_counter_create\",         ...) }\nmorloc_shared_counter_inc            <- function(...){ .Call(\"morloc_shared_counter_inc\",            ...) }\nmorloc_shared_counter_dec            <- function(...){ .Call(\"morloc_shared_counter_dec\",            ...) }\nmorloc_shared_counter_read           <- function(...){ .Call(\"morloc_shared_counter_read\",           ...) }\nmorloc_pipe                          <- function(...){ .Call(\"morloc_pipe\",                          ...) }\nmorloc_write_byte                    <- function(...){ .Call(\"morloc_write_byte\",                    ...) }\nmorloc_close_fd                      <- function(...){ .Call(\"morloc_close_fd\",                      ...) }\nmorloc_worker_loop_c                 <- function(...){ .Call(\"morloc_worker_loop_c\",                 ...) }\nmorloc_set_line_buffered             <- function(...){ .Call(\"morloc_set_line_buffered\",             ...) }\nmorloc_exit                          <- function(...){ .Call(\"morloc_exit\",                          ...) }\n\nglobal_state <- list()\n\n# Dynamic worker spawning: monkey-patch morloc_foreign_call to track busy workers.\n# Workers atomically increment a shared counter before a foreign_call and\n# decrement after. When all workers are busy, a byte is written to a wake-up\n# pipe to tell the dispatcher to spawn a new worker.\n.orig_foreign_call <- morloc_foreign_call\n.busy_counter <- NULL\n.wakeup_fd <- NULL\n.n_workers_total <- 0L\n\nmorloc_foreign_call <- function(...) {\n  val <- morloc_shared_counter_inc(.busy_counter)\n  if (val >= .n_workers_total && !is.null(.wakeup_fd)) {\n    tryCatch(morloc_write_byte(.wakeup_fd, as.raw(0x21)), error = function(e) NULL)\n  }\n  on.exit(morloc_shared_counter_dec(.busy_counter))\n  .orig_foreign_call(...)\n}\n\n# AUTO include manifolds start\n# <<<BREAK>>>\n# AUTO include manifolds end\n\n# AUTO include dispatch start\n# <<<BREAK>>>\n# AUTO include dispatch end\n\nworker_loop <- function(pipe_fd) {\n  morloc_worker_loop_c(pipe_fd, .dispatch, .remote_dispatch)\n}\n\nmain <- function(socket_path, tmpdir, shm_basename) {\n  # Force line-buffered stdout/stderr so output from user functions is not lost\n  # when the nexus kills the pool process group.\n  morloc_set_line_buffered()\n  morloc_install_sigterm_handler()\n\n  daemon <- morloc_start_daemon(socket_path, tmpdir, shm_basename, 0xffff)\n  n_workers <- 1L\n\n  # Shared job queue: dispatcher writes fds to fd[1], workers read from fd[2].\n  # Only idle workers (blocked in recvmsg) pick up jobs, preventing the\n  # round-robin deadlock where a callback gets dispatched to a busy worker.\n  job_queue <- morloc_socketpair()\n\n  # Shared counter for dynamic worker spawning\n  busy_counter <- morloc_shared_counter_create()\n  wakeup <- morloc_pipe()  # c(read_fd, write_fd)\n\n  # Set globals so the monkey-patched morloc_foreign_call can use them.\n  # Forked children inherit these values.\n  .busy_counter <<- busy_counter\n  .wakeup_fd <<- wakeup[2L]\n  .n_workers_total <<- n_workers\n\n  pids <- integer(n_workers)\n  for (i in seq_len(n_workers)) {\n    pid <- morloc_fork()\n    if (pid == 0L) {\n      morloc_detach_daemon(daemon)\n      morloc_close_socket(job_queue[1L])  # child doesn't write\n      morloc_close_fd(wakeup[1L])         # child doesn't read wakeup pipe\n      worker_loop(job_queue[2L])\n      morloc_exit(0L)\n    }\n    pids[i] <- pid\n  }\n  # Keep job_queue[2L] open so dynamically spawned children can use it\n\n  on.exit({\n    tryCatch(morloc_close_socket(job_queue[1L]), error = function(e) NULL)\n    tryCatch(morloc_close_socket(job_queue[2L]), error = function(e) NULL)\n    tryCatch(morloc_close_fd(wakeup[1L]), error = function(e) NULL)\n    tryCatch(morloc_close_fd(wakeup[2L]), error = function(e) NULL)\n    for (pid in pids) {\n      if (pid > 0L) {\n        tryCatch(morloc_kill(pid, 9L), error = function(e) NULL)\n        tryCatch(morloc_waitpid_blocking(pid), error = function(e) NULL)\n      }\n    }\n  })\n\n  # Dispatch loop - idle workers pull from shared queue.\n  # After each dispatch cycle, check if all workers are busy and spawn more.\n  while (!morloc_is_shutting_down()) {\n    client_fd <- morloc_wait_for_client(daemon)\n    if (client_fd > 0L) {\n      tryCatch({\n        morloc_send_fd(job_queue[1L], client_fd)\n      }, error = function(e) {\n        cat(paste(\"Failed to dispatch job:\", e$message, \"\\n\"), file = stderr())\n      }, finally = {\n        morloc_close_socket(client_fd)\n      })\n    }\n\n    # Dynamic worker spawning: if all workers are blocked in foreign_call,\n    # spawn a new one so incoming callbacks can still be served.\n    current_busy <- morloc_shared_counter_read(busy_counter)\n    if (current_busy >= n_workers) {\n      pid <- morloc_fork()\n      if (pid == 0L) {\n        morloc_detach_daemon(daemon)\n        morloc_close_socket(job_queue[1L])\n        morloc_close_fd(wakeup[1L])\n        worker_loop(job_queue[2L])\n        morloc_exit(0L)\n      }\n      pids <- c(pids, pid)\n      n_workers <- n_workers + 1L\n      .n_workers_total <<- n_workers\n    }\n  }\n}\n\nargs <- commandArgs(trailingOnly = TRUE)\n\n# Health check: confirm sources loaded and print version\nif (length(args) == 1 && args[1] == \"--health\") {\n  cat('{\"status\":\"ok\",\"version\":\"__MORLOC_VERSION__\"}\\n')\n  quit(status = 0)\n}\n\nif (length(args) != 3) {\n  cat(\"Usage: Rscript pool.R <socket_path> <tmpdir> <shm_basename>\\n\", file=stderr())\n  quit(status = 1)\n}\n\nsocket_path <- args[1]\ntmpdir <- args[2]\nshm_basename <- args[3]\n\nglobal_state$tmpdir <- tmpdir\n\ntryCatch(\n  {\n    main(socket_path, tmpdir, shm_basename)\n  },  error = function(e) {\n      stop(paste(\"Pool failed:\", e$message))\n  })\n\n# Use _exit to avoid R cleanup which triggers heap corruption on glibc >= 2.39\n# (R's finalizers attempt to free objects in SHM-related C extensions)\nmorloc_exit(0L)\n"
  },
  {
    "path": "data/lang/r/rmorloc.c",
    "content": "#include <R.h>\n#include <Rinternals.h>\n#include <Rdefines.h>\n#include <R_ext/Arith.h>\n\n#include <stdint.h>\n#include <stdbool.h>\n#include <limits.h>\n#include <string.h>\n#include <errno.h>\n#include <fcntl.h>\n#include <sys/mman.h>\n#include <sys/select.h>\n#include <sys/socket.h>\n#include <sys/wait.h>\n#include <signal.h>\n#include <unistd.h>\n\n#include \"morloc.h\"\n\n// {{{ macros\n\n#define MAYFAIL char* child_errmsg_ = NULL;\n\n#define R_TRY(fun, ...) \\\n    fun(__VA_ARGS__ __VA_OPT__(,) &child_errmsg_); \\\n    if(child_errmsg_ != NULL){ \\\n        error(\"Error in R pool (%s:%d in %s):\\n%s\", __FILE__, __LINE__, __func__, child_errmsg_); \\\n    }\n\n#define R_TRY_WITH(clean, fun, ...) \\\n    fun(__VA_ARGS__ __VA_OPT__(,) &child_errmsg_); \\\n    if(child_errmsg_ != NULL){ \\\n        clean; \\\n        error(\"Error in R pool (%s:%d in %s):\\n%s\", __FILE__, __LINE__, __func__, child_errmsg_); \\\n    }\n\n#define MORLOC_ERROR(msg, ...) error(\"Error in R pool (%s:%d in %s):\" msg, __FILE__, __LINE__, __func__, ##__VA_ARGS__);\n\n/// }}}\n\n// {{{ to_voidstar\n\nstatic size_t get_shm_size(const Schema* schema, SEXP obj) {\n    size_t size = 0;\n    switch (schema->type) {\n        case MORLOC_NIL:\n        case MORLOC_BOOL:\n        case MORLOC_SINT8:\n        case MORLOC_SINT16:\n        case MORLOC_SINT32:\n        case MORLOC_SINT64:\n        case MORLOC_UINT8:\n        case MORLOC_UINT16:\n        case MORLOC_UINT32:\n        case MORLOC_UINT64:\n        case MORLOC_FLOAT32:\n        case MORLOC_FLOAT64:\n            return schema->width;\n        case MORLOC_STRING:\n        case MORLOC_ARRAY:\n            {\n                size_t length = (size_t)LENGTH(obj);\n                size = sizeof(Array);\n                // worst-case cursor alignment padding for element data\n                size += schema_alignment(schema->parameters[0]) - 1;\n                const char* str;\n\n                switch (TYPEOF(obj)) {\n                    case CHARSXP:\n                        str = CHAR(obj);\n                        size += (size_t)strlen(str);  // Do not include null terminator\n                        break;\n                    case STRSXP:\n                        if (LENGTH(obj) == 1) {\n                            str = CHAR(STRING_ELT(obj, 0));\n                            size += (size_t)strlen(str);  // Do not include null terminator\n                        } else {\n                            if(schema->parameters[0]->type == MORLOC_STRING){\n                                for(size_t i = 0; i < length; i++){\n                                    size += get_shm_size(schema->parameters[0], STRING_ELT(obj, i));\n                                }\n                            } else {\n                                MORLOC_ERROR(\"Expected character vector of length 1, but got length %zu\", length);\n                            }\n                        }\n                        break;\n                    case VECSXP:  // This handles lists\n                        for (int i = 0; i < length; i++) {\n                            size += get_shm_size(schema->parameters[0], VECTOR_ELT(obj, i));\n                        }\n                        break;\n                    case LGLSXP:\n                    case INTSXP:\n                    case REALSXP:\n                    case RAWSXP:\n                        size += length * schema->parameters[0]->width;\n                        break;\n                    default:\n                        MORLOC_ERROR(\"Unsupported type in get_shm_size array: %s\", type2char(TYPEOF(obj)));\n                }\n                return size;\n            }\n\n        case MORLOC_TUPLE:\n            if (!isVectorList(obj)) {\n                MORLOC_ERROR(\"Expected list for MORLOC_TUPLE, but got %s\", type2char(TYPEOF(obj)));\n            }\n\n            {\n                size_t array_size = (size_t)xlength(obj);\n                if (array_size != schema->size) {\n                    MORLOC_ERROR(\"Expected tuple of length %zu, but found list of length %zu\", schema->size, size);\n                }\n                size = schema->width;\n                for (R_xlen_t i = 0; i < (R_xlen_t)array_size; ++i) {\n                    SEXP item = VECTOR_ELT(obj, i);\n                    size_t elem = get_shm_size(schema->parameters[i], item);\n                    if (elem > schema->parameters[i]->width) {\n                        size += elem - schema->parameters[i]->width;\n                    }\n                }\n                return size;\n            }\n\n        case MORLOC_MAP:\n            {\n                if (isNewList(obj)) {\n                    // Handle named list\n                    size = schema->width;\n                    SEXP names = getAttrib(obj, R_NamesSymbol);\n                    if (names == R_NilValue) {\n                        error(\"List must have names for MORLOC_MAP\");\n                    }\n                    for (size_t i = 0; i < schema->size; ++i) {\n                        SEXP key = PROTECT(mkChar(schema->keys[i]));\n                        int index = -1;\n                        for (int j = 0; j < length(obj); j++) {\n                            if (strcmp(CHAR(STRING_ELT(names, j)), CHAR(key)) == 0) {\n                                index = j;\n                                break;\n                            }\n                        }\n                        if (index != -1) {\n                            SEXP value = VECTOR_ELT(obj, index);\n                            size_t elem = get_shm_size(schema->parameters[i], value);\n                            if (elem > schema->parameters[i]->width) {\n                                size += elem - schema->parameters[i]->width;\n                            }\n                        }\n                        UNPROTECT(1);\n                    }\n                    return size;\n                } else {\n                    error(\"Expected a named list for MORLOC_MAP\");\n                }\n            }\n\n        case MORLOC_OPTIONAL:\n            if (obj == R_NilValue) {\n                return schema->width;\n            }\n            {\n                size_t inner_size = get_shm_size(schema->parameters[0], obj);\n                size = schema->width;\n                if (inner_size > schema->parameters[0]->width) {\n                    size += inner_size - schema->parameters[0]->width;\n                }\n                return size;\n            }\n\n        case MORLOC_TENSOR:\n            {\n                size_t ndim = schema_tensor_ndim(schema);\n                size_t elem_width = schema->parameters[0]->width;\n                SEXP dim = getAttrib(obj, R_DimSymbol);\n                size_t total = 1;\n                if (dim != R_NilValue) {\n                    for (int i = 0; i < length(dim); i++)\n                        total *= (size_t)INTEGER(dim)[i];\n                } else {\n                    total = (size_t)XLENGTH(obj);\n                }\n                size = sizeof(Tensor);\n                size += _Alignof(int64_t) - 1;\n                size += ndim * sizeof(int64_t);\n                size += schema_alignment(schema->parameters[0]) - 1;\n                size += total * elem_width;\n                return size;\n            }\n\n        default:\n            MORLOC_ERROR(\"Unhandled schema type\");\n            break;\n    }\n\n    return size;\n}\n\n\n#define HANDLE_SINT_TYPE(CTYPE, MIN, MAX) \\\n    do { \\\n        if (!(isInteger(obj) || isReal(obj))) { \\\n            MORLOC_ERROR(\"Expected integer for %s, but got %s\", #CTYPE, type2char(TYPEOF(obj))); \\\n        } \\\n        double value = asReal(obj); \\\n        if (value < MIN || value > MAX) { \\\n            MORLOC_ERROR(\"Integer overflow for %s\", #CTYPE); \\\n        } \\\n        *(CTYPE*)dest = (CTYPE)value; \\\n    } while(0)\n\n#define HANDLE_UINT_TYPE(CTYPE, MAX) \\\n    do { \\\n        if (!(isInteger(obj) || isReal(obj))) { \\\n            MORLOC_ERROR(\"Expected integer for %s, but got %s\", #CTYPE, type2char(TYPEOF(obj))); \\\n        } \\\n        double value = asReal(obj); \\\n        if (value < 0 || value > MAX) { \\\n            MORLOC_ERROR(\"Integer overflow for %s\", #CTYPE); \\\n        } \\\n        *(CTYPE*)dest = (CTYPE)value; \\\n    } while(0)\n\nstatic void* to_voidstar_r(void* dest, void** cursor, SEXP obj, const Schema* schema){\n    MAYFAIL\n\n    switch (schema->type) {\n        case MORLOC_NIL:\n            if (obj != R_NilValue) {\n                MORLOC_ERROR(\"Expected NULL for MORLOC_NIL, but got %s\", type2char(TYPEOF(obj)));\n            }\n            *((int8_t*)dest) = (int8_t)0;\n            break;\n        case MORLOC_BOOL:\n            if (!isLogical(obj)) {\n                MORLOC_ERROR(\"Expected logical for MORLOC_BOOL, but got %s\", type2char(TYPEOF(obj)));\n            }\n            *((uint8_t*)dest) = (uint8_t)((LOGICAL(obj)[0] == TRUE) ? 1 : 0);\n            break;\n        case MORLOC_SINT8:\n            HANDLE_SINT_TYPE(int8_t, INT8_MIN, INT8_MAX);\n            break;\n        case MORLOC_SINT16:\n            HANDLE_SINT_TYPE(int16_t, INT16_MIN, INT16_MAX);\n            break;\n        case MORLOC_SINT32:\n            HANDLE_SINT_TYPE(int32_t, INT32_MIN, INT32_MAX);\n            break;\n        case MORLOC_SINT64:\n            HANDLE_SINT_TYPE(int64_t, INT64_MIN, INT64_MAX);\n            break;\n        case MORLOC_UINT8:\n            HANDLE_UINT_TYPE(uint8_t, UINT8_MAX);\n            break;\n        case MORLOC_UINT16:\n            HANDLE_UINT_TYPE(uint16_t, UINT16_MAX);\n            break;\n        case MORLOC_UINT32:\n            HANDLE_UINT_TYPE(uint32_t, UINT32_MAX);\n            break;\n        case MORLOC_UINT64:\n            HANDLE_UINT_TYPE(uint64_t, UINT64_MAX);\n            break;\n        case MORLOC_FLOAT32:\n            if (!(isReal(obj) || isInteger(obj))) {\n                MORLOC_ERROR(\"Expected numeric for MORLOC_FLOAT32, but got %s\", type2char(TYPEOF(obj)));\n            }\n            *((float*)dest) = (float)asReal(obj);\n            break;\n\n        case MORLOC_FLOAT64:\n            if (!(isReal(obj) || isInteger(obj))) {\n                MORLOC_ERROR(\"Expected numeric for MORLOC_FLOAT64, but got %s\", type2char(TYPEOF(obj)));\n            }\n            *((double*)dest) = asReal(obj);\n            break;\n        case MORLOC_STRING:\n            {\n                const char* str = NULL;\n                size_t length = 0;\n                switch(TYPEOF(obj)){\n                    case CHARSXP:\n                        str = CHAR(obj);\n                        length = (size_t)strlen(str);\n                        break;\n                    case STRSXP:\n                        if (LENGTH(obj) == 1) {\n                            str = CHAR(STRING_ELT(obj, 0));\n                            length = (size_t)strlen(str);\n                        } else {\n                            MORLOC_ERROR(\"Expected character of length 1\");\n                        }\n                        break;\n                    case RAWSXP:\n                        str = RAW(obj);\n                        length = LENGTH(obj);\n                        break;\n                    default:\n                      MORLOC_ERROR(\"Expected a character type\");\n                      break;\n                }\n                Array* array = (Array*)dest;\n                array->size = length;  // Do not include null terminator\n                if(length > 0){\n                    // align cursor for element data placement\n                    *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, schema_alignment(schema->parameters[0]));\n                    array->data = R_TRY(abs2rel, *cursor);\n                    absptr_t tmp_ptr = R_TRY(rel2abs, array->data);\n                    memcpy(tmp_ptr, str, array->size);\n                } else {\n                    array->data = RELNULL;\n                }\n\n                // move cursor to the location after the copied data\n                *cursor = (void*)(*(char**)cursor + array->size);\n            }\n            break;\n        case MORLOC_ARRAY:\n            Array* array = (Array*)dest;\n            array->size = (size_t)length(obj);\n            if(array->size == 0){\n                array->data = RELNULL;\n                break;\n            }\n\n            // align cursor for element data placement\n            *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, schema_alignment(schema->parameters[0]));\n            array->data = R_TRY(abs2rel, *cursor);\n            Schema* element_schema = schema->parameters[0];\n            char* start;\n\n            switch (TYPEOF(obj)) {\n                case STRSXP:\n                    {\n                        if(element_schema->type == MORLOC_STRING){\n                            // set the cursor the the location after the array headers\n                            *cursor = (void*)(*(char**)cursor + array->size * element_schema->width);\n                            start = R_TRY(rel2abs, array->data);\n                            for(size_t i = 0; i < array->size; i++){\n                                SEXP elem = STRING_ELT(obj, i);\n                                to_voidstar_r(start + i * element_schema->width, cursor, elem, element_schema);\n                            }\n                        } else {\n                            MORLOC_ERROR(\"Expected character vector of length 1, but got length %ld\", array->size);\n                        }\n                    }\n                    break;\n                case RAWSXP:  // Raw vectors\n                    if (element_schema->type != MORLOC_UINT8) {\n                        MORLOC_ERROR(\"Expected MORLOC_UINT8 for raw vector\");\n                    }\n                    absptr_t tmp_ptr = R_TRY(rel2abs, array->data);\n                    memcpy(tmp_ptr, RAW(obj), array->size * sizeof(uint8_t));\n                    *cursor = (void*)(*(char**)cursor + array->size * sizeof(uint8_t));\n                    break;\n                case VECSXP:  // This handles lists\n                    *cursor = (void*)(*(char**)cursor + array->size * element_schema->width);\n                    start = R_TRY(rel2abs, array->data);\n                    for (int i = 0; i < array->size; i++) {\n                        SEXP elem = VECTOR_ELT(obj, i);\n                        to_voidstar_r(start + i * element_schema->width, cursor, elem, element_schema);\n                    }\n                    break;\n                case LGLSXP:\n                    *cursor = (void*)(*(char**)cursor + array->size * element_schema->width);\n                    start = R_TRY(rel2abs, array->data);\n                    for (int i = 0; i < array->size; i++) {\n                        SEXP elem = PROTECT(ScalarLogical(LOGICAL(obj)[i]));\n                        to_voidstar_r(start + i * element_schema->width, cursor, elem, element_schema);\n                        UNPROTECT(1);\n                    }\n                    break;\n                case INTSXP:\n                    *cursor = (void*)(*(char**)cursor + array->size * element_schema->width);\n                    start = R_TRY(rel2abs, array->data);\n                    for (int i = 0; i < array->size; i++) {\n                        SEXP elem = PROTECT(ScalarInteger(INTEGER(obj)[i]));\n                        to_voidstar_r(start + i * element_schema->width, cursor, elem, element_schema);\n                        UNPROTECT(1);\n                    }\n                    break;\n                case REALSXP:\n                    *cursor = (void*)(*(char**)cursor + array->size * element_schema->width);\n                    start = R_TRY(rel2abs, array->data);\n                    for (int i = 0; i < array->size; i++) {\n                        SEXP elem = PROTECT(ScalarReal(REAL(obj)[i]));\n                        to_voidstar_r(start + i * element_schema->width, cursor, elem, element_schema);\n                        UNPROTECT(1);\n                    }\n                    break;\n                default:\n                    MORLOC_ERROR(\"Unsupported type in to_voidstar array: %s\", type2char(TYPEOF(obj)));\n            }\n            break;\n\n\n\n        case MORLOC_TUPLE:\n            if (!isVectorList(obj)) {\n                MORLOC_ERROR(\"Expected list for MORLOC_TUPLE, but got %s\", type2char(TYPEOF(obj)));\n            }\n\n            {\n                R_xlen_t size = xlength(obj);\n                if ((size_t)size != schema->size) {\n                    MORLOC_ERROR(\"Expected tuple of length %zu, but found list of length %zu\", schema->size, size);\n                }\n                for (R_xlen_t i = 0; i < size; ++i) {\n                    SEXP item = VECTOR_ELT(obj, i);\n                    to_voidstar_r(dest + schema->offsets[i], cursor, item, schema->parameters[i]);\n                }\n            }\n            break;\n\n        case MORLOC_MAP:\n            {\n                if (isNewList(obj)) {\n                    // Handle named list\n                    SEXP names = getAttrib(obj, R_NamesSymbol);\n                    if (names == R_NilValue) {\n                        MORLOC_ERROR(\"List must have names for MORLOC_MAP\");\n                    }\n                    for (size_t i = 0; i < schema->size; ++i) {\n                        SEXP key = PROTECT(mkChar(schema->keys[i]));\n                        int index = -1;\n                        for (int j = 0; j < length(obj); j++) {\n                            if (strcmp(CHAR(STRING_ELT(names, j)), CHAR(key)) == 0) {\n                                index = j;\n                                break;\n                            }\n                        }\n                        if (index != -1) {\n                            SEXP value = VECTOR_ELT(obj, index);\n                            to_voidstar_r(dest + schema->offsets[i], cursor, value, schema->parameters[i]);\n                        }\n                        UNPROTECT(1);\n                    }\n                } else {\n                    MORLOC_ERROR(\"Expected a named list for MORLOC_MAP\");\n                }\n            }\n            break;\n\n        case MORLOC_OPTIONAL:\n            if (obj == R_NilValue) {\n                *((uint8_t*)dest) = 0;\n                memset((char*)dest + schema->offsets[0], 0, schema->parameters[0]->width);\n            } else {\n                *((uint8_t*)dest) = 1;\n                to_voidstar_r((char*)dest + schema->offsets[0], cursor, obj, schema->parameters[0]);\n            }\n            break;\n\n        case MORLOC_TENSOR:\n            {\n                size_t ndim = schema_tensor_ndim(schema);\n                size_t elem_width = schema->parameters[0]->width;\n\n                // Get shape from dim attribute (or length for 1D)\n                SEXP dim = getAttrib(obj, R_DimSymbol);\n                int64_t shape[5];\n                size_t total = 1;\n                if (dim != R_NilValue) {\n                    for (size_t i = 0; i < ndim; i++) {\n                        shape[i] = (int64_t)INTEGER(dim)[i];\n                        total *= (size_t)shape[i];\n                    }\n                } else {\n                    shape[0] = (int64_t)XLENGTH(obj);\n                    total = (size_t)shape[0];\n                }\n\n                Tensor* tensor = (Tensor*)dest;\n                tensor->total_elements = total;\n                tensor->device_type = 0;\n                tensor->device_id = 0;\n\n                if (total == 0) {\n                    tensor->shape = RELNULL;\n                    tensor->data = RELNULL;\n                    break;\n                }\n\n                // Write shape\n                *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, _Alignof(int64_t));\n                tensor->shape = R_TRY(abs2rel, (absptr_t)*cursor);\n                int64_t* shape_dst = (int64_t*)*cursor;\n                for (size_t i = 0; i < ndim; i++) shape_dst[i] = shape[i];\n                *cursor = (char*)*cursor + ndim * sizeof(int64_t);\n\n                // Write data: transpose from column-major (R) to row-major (C)\n                size_t data_align = schema_alignment(schema->parameters[0]);\n                *cursor = (void*)ALIGN_UP((uintptr_t)*cursor, data_align);\n                tensor->data = R_TRY(abs2rel, (absptr_t)*cursor);\n\n                    // Coerce R object to match schema element type\n                SEXP coerced = obj;\n                int need_protect = 0;\n                morloc_serial_type etype = schema->parameters[0]->type;\n                if ((etype == MORLOC_FLOAT64 || etype == MORLOC_FLOAT32) && !isReal(obj)) {\n                    coerced = PROTECT(coerceVector(obj, REALSXP));\n                    need_protect = 1;\n                } else if (etype != MORLOC_FLOAT64 && etype != MORLOC_FLOAT32 && etype != MORLOC_BOOL && !isInteger(obj)) {\n                    coerced = PROTECT(coerceVector(obj, INTSXP));\n                    need_protect = 1;\n                }\n\n                if (ndim == 1) {\n                    // 1D: no transpose needed\n                    if (isReal(coerced)) {\n                        memcpy(*cursor, REAL(coerced), total * elem_width);\n                    } else if (isInteger(coerced)) {\n                        memcpy(*cursor, INTEGER(coerced), total * elem_width);\n                    } else if (isLogical(coerced)) {\n                        int* src = LOGICAL(coerced);\n                        uint8_t* dst = (uint8_t*)*cursor;\n                        for (size_t i = 0; i < total; i++) dst[i] = (uint8_t)(src[i] != 0);\n                    }\n                } else if (ndim == 2) {\n                    size_t nrows = (size_t)shape[0];\n                    size_t ncols = (size_t)shape[1];\n                    if (isReal(coerced)) {\n                        double* src = REAL(coerced);\n                        double* dst = (double*)*cursor;\n                        for (size_t r = 0; r < nrows; r++)\n                            for (size_t c = 0; c < ncols; c++)\n                                dst[r * ncols + c] = src[c * nrows + r];\n                    } else if (isInteger(coerced)) {\n                        int* src = INTEGER(coerced);\n                        int* dst = (int*)*cursor;\n                        for (size_t r = 0; r < nrows; r++)\n                            for (size_t c = 0; c < ncols; c++)\n                                dst[r * ncols + c] = src[c * nrows + r];\n                    }\n                } else {\n                    size_t col_strides[5];\n                    col_strides[0] = 1;\n                    for (size_t d = 1; d < ndim; d++)\n                        col_strides[d] = col_strides[d-1] * (size_t)shape[d-1];\n                    size_t row_strides[5];\n                    row_strides[ndim-1] = 1;\n                    for (size_t d = ndim-1; d > 0; d--)\n                        row_strides[d-1] = row_strides[d] * (size_t)shape[d];\n\n                    if (isReal(coerced)) {\n                        double* src = REAL(coerced);\n                        double* dst = (double*)*cursor;\n                        for (size_t i = 0; i < total; i++) {\n                            size_t rem = i;\n                            size_t col_idx = 0;\n                            for (size_t d = 0; d < ndim; d++) {\n                                size_t coord = rem / row_strides[d];\n                                rem %= row_strides[d];\n                                col_idx += coord * col_strides[d];\n                            }\n                            dst[i] = src[col_idx];\n                        }\n                    } else if (isInteger(coerced)) {\n                        int* src = INTEGER(coerced);\n                        int* dst = (int*)*cursor;\n                        for (size_t i = 0; i < total; i++) {\n                            size_t rem = i;\n                            size_t col_idx = 0;\n                            for (size_t d = 0; d < ndim; d++) {\n                                size_t coord = rem / row_strides[d];\n                                rem %= row_strides[d];\n                                col_idx += coord * col_strides[d];\n                            }\n                            dst[i] = src[col_idx];\n                        }\n                    }\n                }\n                if (need_protect) UNPROTECT(1);\n                *cursor = (char*)*cursor + total * elem_width;\n            }\n            break;\n\n        default:\n            MORLOC_ERROR(\"Unhandled schema type\");\n            break;\n    }\n\n    return dest;\n\n}\n\n\n// NOTE: If to_voidstar_r calls error() (via MORLOC_ERROR or R_TRY), the shared\n// memory at dest leaks. This only happens on type mismatches (a development-time\n// bug) and the memory is reclaimed when the pool process exits.\nstatic void* to_voidstar(SEXP obj, const Schema* schema) {\n    MAYFAIL\n\n    size_t total_size = get_shm_size(schema, obj);\n\n    void* dest = R_TRY(shmalloc, total_size);\n\n    void* cursor = (void*)((char*)dest + schema->width);\n\n    return to_voidstar_r(dest, &cursor, obj, schema);\n}\n\n// }}} to_voidstar\n\n// {{{ from_voidstar\n\nstatic SEXP from_voidstar(const void* data, const Schema* schema, const void* base_ptr) {\n    MAYFAIL\n\n    if(data == NULL){\n        MORLOC_ERROR(\"NULL data (%s:%d in %s)\", __FILE__, __LINE__, __func__);\n    }\n\n    if(schema == NULL){\n        MORLOC_ERROR(\"NULL schema (%s:%d in %s)\", __FILE__, __LINE__, __func__);\n    }\n\n    SEXP obj = R_NilValue;\n    switch (schema->type) {\n        case MORLOC_NIL:\n            return R_NilValue;\n        case MORLOC_BOOL:\n            obj = ScalarLogical((bool)*(uint8_t*)data);\n            break;\n        case MORLOC_SINT8:\n            obj = ScalarInteger((int)(*(int8_t*)data));\n            break;\n        case MORLOC_SINT16:\n            obj = ScalarInteger((int)(*(int16_t*)data));\n            break;\n        case MORLOC_SINT32:\n            obj = ScalarInteger(*(int32_t*)data);\n            break;\n        case MORLOC_SINT64:\n            obj = ScalarReal((double)(*(int64_t*)data));\n            break;\n        case MORLOC_UINT8:\n            obj = ScalarInteger((int)(*(uint8_t*)data));\n            break;\n        case MORLOC_UINT16:\n            obj = ScalarInteger((int)(*(uint16_t*)data));\n            break;\n        case MORLOC_UINT32:\n            obj = ScalarReal((double)(*(uint32_t*)data));\n            break;\n        case MORLOC_UINT64:\n            obj = ScalarReal((double)(*(uint64_t*)data));\n            break;\n        case MORLOC_FLOAT32:\n            obj = ScalarReal((double)(*(float*)data));\n            break;\n        case MORLOC_FLOAT64:\n            obj = ScalarReal(*(double*)data);\n            break;\n        case MORLOC_STRING: {\n                if (schema->hint != NULL && strcmp(schema->hint, \"raw\") == 0){\n                    Array* raw_array = (Array*)data;\n                    if(raw_array->size > 0){\n                        void* tmp_ptr = R_TRY(resolve_relptr, raw_array->data, base_ptr);\n                        obj = PROTECT(allocVector(RAWSXP, raw_array->size));\n                        memcpy(RAW(obj), tmp_ptr, raw_array->size);\n                    } else {\n                        obj = PROTECT(allocVector(RAWSXP, 0));\n                    }\n                    UNPROTECT(1);\n                } else {\n                    Array* str_array = (Array*)data;\n                    if(str_array->size > 0){\n                        void* tmp_ptr = R_TRY(resolve_relptr, str_array->data, base_ptr);\n                        SEXP chr = PROTECT(mkCharLen(tmp_ptr, str_array->size));\n                        obj = PROTECT(ScalarString(chr));\n                    } else {\n                        SEXP chr = PROTECT(mkChar(\"\"));\n                        obj = PROTECT(ScalarString(chr));\n                    }\n                    UNPROTECT(2);\n                }\n            }\n            break;\n        case MORLOC_ARRAY:\n            {\n                Array* array = (Array*)data;\n                Schema* element_schema = schema->parameters[0];\n                char* start;\n\n                switch(element_schema->type){\n                    case MORLOC_BOOL:\n                        obj = PROTECT(allocVector(LGLSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            LOGICAL(obj)[i] = (bool)*(uint8_t*)(start + i) ? TRUE : FALSE;\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_SINT8:\n                        obj = PROTECT(allocVector(INTSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            INTEGER(obj)[i] = (int)(*(int8_t*)(start + i * sizeof(int8_t)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_SINT16:\n                        obj = PROTECT(allocVector(INTSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            INTEGER(obj)[i] = (int)(*(int16_t*)(start + i * sizeof(int16_t)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_SINT32:\n                        obj = PROTECT(allocVector(INTSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        {\n                            void* tmp_ptr = R_TRY(resolve_relptr, array->data, base_ptr);\n                            memcpy(INTEGER(obj), tmp_ptr, array->size * sizeof(int32_t));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_SINT64:\n                        obj = PROTECT(allocVector(REALSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            REAL(obj)[i] = (double)(*(int64_t*)(start + i * sizeof(int64_t)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    // Interpret the uint8 as a raw vector\n                    case MORLOC_UINT8:\n                        obj = PROTECT(allocVector(RAWSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        memcpy(RAW(obj), start, array->size * sizeof(uint8_t));\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_UINT16:\n                        obj = PROTECT(allocVector(INTSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            INTEGER(obj)[i] = (int)(*(uint16_t*)(start + i * sizeof(uint16_t)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_UINT32:\n                        obj = PROTECT(allocVector(REALSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            REAL(obj)[i] = (double)(*(uint32_t*)(start + i * sizeof(uint32_t)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_UINT64:\n                        // NOTE: the R integer cannot store a 64 bit int\n                        obj = PROTECT(allocVector(REALSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            REAL(obj)[i] = (double)(*(uint64_t*)(start + i * sizeof(uint64_t)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_FLOAT32:\n                        obj = PROTECT(allocVector(REALSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        for (size_t i = 0; i < array->size; i++) {\n                            REAL(obj)[i] = (double)(*(float*)(start + i * sizeof(float)));\n                        }\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_FLOAT64:\n                        obj = PROTECT(allocVector(REALSXP, array->size));\n                        if(array->size == 0) {\n                            UNPROTECT(1);\n                            break;\n                        }\n                        start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                        memcpy(REAL(obj), start, array->size * sizeof(double));\n                        UNPROTECT(1);\n                        break;\n                    case MORLOC_STRING:\n                        {\n                            obj = PROTECT(allocVector(STRSXP, array->size));\n                            if(array->size == 0) {\n                                UNPROTECT(1);\n                                break;\n                            }\n                            start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                            size_t width = schema->width;\n                            for (size_t i = 0; i < array->size; i++) {\n                                Array* str_array = (Array*)(start + i * width);\n                                SEXP item;\n                                if(str_array->size == 0){\n                                    item = PROTECT(mkCharLen(\"\", 0));\n                                } else {\n                                    void* str_ptr = R_TRY_WITH(UNPROTECT(1), resolve_relptr, str_array->data, base_ptr);\n                                    item = PROTECT(mkCharLen(str_ptr, str_array->size));\n                                }\n                                UNPROTECT(1);\n                                SET_STRING_ELT(obj, i, item);\n                            }\n                            UNPROTECT(1);\n                        }\n                        break;\n                    default:\n                        {\n                            obj = PROTECT(allocVector(VECSXP, array->size));\n                            if(array->size == 0) {\n                                UNPROTECT(1);\n                                break;\n                            }\n                            start = (char*)R_TRY(resolve_relptr, array->data, base_ptr);\n                            size_t width = element_schema->width;\n                            for (size_t i = 0; i < array->size; i++) {\n                                SEXP item = from_voidstar(start + width * i, element_schema, base_ptr);\n                                if (item == R_NilValue) {\n                                    UNPROTECT(1);\n                                    obj = R_NilValue;\n                                    goto error;\n                                }\n                                SET_VECTOR_ELT(obj, i, item);\n                            }\n                            UNPROTECT(1);\n                        }\n                        break;\n                }\n            }\n            break;\n        case MORLOC_TUPLE: {\n            obj = PROTECT(allocVector(VECSXP, schema->size));\n            for (size_t i = 0; i < schema->size; i++) {\n                void* item_ptr = (char*)data + schema->offsets[i];\n                SEXP item = from_voidstar(item_ptr, schema->parameters[i], base_ptr);\n                if (item == R_NilValue) {\n                    UNPROTECT(1);\n                    obj = R_NilValue;\n                    goto error;\n                }\n                SET_VECTOR_ELT(obj, i, item);\n            }\n            UNPROTECT(1);\n            break;\n        }\n        case MORLOC_MAP: {\n            obj = PROTECT(allocVector(VECSXP, schema->size));\n            SEXP names = PROTECT(allocVector(STRSXP, schema->size));\n            for (size_t i = 0; i < schema->size; i++) {\n                void* item_ptr = (char*)data + schema->offsets[i];\n                SEXP value = from_voidstar(item_ptr, schema->parameters[i], base_ptr);\n                if (value == R_NilValue) {\n                    UNPROTECT(2);\n                    obj = R_NilValue;\n                    goto error;\n                }\n                SET_VECTOR_ELT(obj, i, value);\n                SET_STRING_ELT(names, i, mkChar(schema->keys[i]));\n            }\n            setAttrib(obj, R_NamesSymbol, names);\n            UNPROTECT(2);\n            break;\n        }\n        case MORLOC_OPTIONAL: {\n            uint8_t tag = *(const uint8_t*)data;\n            if (tag == 0) {\n                return R_NilValue;\n            }\n            obj = from_voidstar((const char*)data + schema->offsets[0], schema->parameters[0], base_ptr);\n            break;\n        }\n        case MORLOC_TENSOR: {\n            const Tensor* tensor = (const Tensor*)data;\n            size_t ndim = schema_tensor_ndim(schema);\n            size_t total = tensor->total_elements;\n\n            if (total == 0) {\n                if (isReal(obj)) {\n                    obj = PROTECT(allocVector(REALSXP, 0));\n                } else {\n                    obj = PROTECT(allocVector(INTSXP, 0));\n                }\n                UNPROTECT(1);\n                break;\n            }\n\n            const int64_t* shape = (const int64_t*)resolve_relptr(tensor->shape, base_ptr, NULL);\n            const void* tdata = resolve_relptr(tensor->data, base_ptr, NULL);\n\n            // Allocate R vector\n            int sexptype;\n            switch (schema->parameters[0]->type) {\n                case MORLOC_FLOAT32:\n                case MORLOC_FLOAT64: sexptype = REALSXP; break;\n                case MORLOC_BOOL:    sexptype = LGLSXP; break;\n                default:             sexptype = INTSXP; break;\n            }\n\n            obj = PROTECT(allocVector(sexptype, (R_xlen_t)total));\n\n            if (ndim == 1) {\n                // 1D: no transpose\n                if (sexptype == REALSXP) {\n                    if (schema->parameters[0]->type == MORLOC_FLOAT32) {\n                        const float* src = (const float*)tdata;\n                        double* dst = REAL(obj);\n                        for (size_t i = 0; i < total; i++) dst[i] = (double)src[i];\n                    } else {\n                        memcpy(REAL(obj), tdata, total * sizeof(double));\n                    }\n                } else if (sexptype == INTSXP) {\n                    size_t elem_w = schema->parameters[0]->width;\n                    if (elem_w == sizeof(int)) {\n                        memcpy(INTEGER(obj), tdata, total * sizeof(int));\n                    } else {\n                        // Widen or narrow to int\n                        int* dst = INTEGER(obj);\n                        const char* src = (const char*)tdata;\n                        for (size_t i = 0; i < total; i++) {\n                            int64_t v = 0;\n                            memcpy(&v, src + i * elem_w, elem_w);\n                            dst[i] = (int)v;\n                        }\n                    }\n                } else if (sexptype == LGLSXP) {\n                    const uint8_t* src = (const uint8_t*)tdata;\n                    int* dst = LOGICAL(obj);\n                    for (size_t i = 0; i < total; i++) dst[i] = src[i] ? 1 : 0;\n                }\n            } else if (ndim == 2) {\n                // 2D: row-major to col-major transpose\n                size_t nrows = (size_t)shape[0];\n                size_t ncols = (size_t)shape[1];\n                if (sexptype == REALSXP) {\n                    const double* src = (const double*)tdata;\n                    double* dst = REAL(obj);\n                    for (size_t r = 0; r < nrows; r++)\n                        for (size_t c = 0; c < ncols; c++)\n                            dst[c * nrows + r] = src[r * ncols + c];\n                } else if (sexptype == INTSXP) {\n                    const int* src = (const int*)tdata;\n                    int* dst = INTEGER(obj);\n                    for (size_t r = 0; r < nrows; r++)\n                        for (size_t c = 0; c < ncols; c++)\n                            dst[c * nrows + r] = src[r * ncols + c];\n                }\n            } else {\n                // General N-D: row-major to col-major\n                size_t col_strides[5];\n                col_strides[0] = 1;\n                for (size_t d = 1; d < ndim; d++)\n                    col_strides[d] = col_strides[d-1] * (size_t)shape[d-1];\n                size_t row_strides[5];\n                row_strides[ndim-1] = 1;\n                for (size_t d = ndim-1; d > 0; d--)\n                    row_strides[d-1] = row_strides[d] * (size_t)shape[d];\n\n                if (sexptype == REALSXP) {\n                    const double* src = (const double*)tdata;\n                    double* dst = REAL(obj);\n                    for (size_t i = 0; i < total; i++) {\n                        // i is row-major index, compute col-major index\n                        size_t rem = i;\n                        size_t col_idx = 0;\n                        for (size_t d = 0; d < ndim; d++) {\n                            size_t coord = rem / row_strides[d];\n                            rem %= row_strides[d];\n                            col_idx += coord * col_strides[d];\n                        }\n                        dst[col_idx] = src[i];\n                    }\n                } else if (sexptype == INTSXP) {\n                    const int* src = (const int*)tdata;\n                    int* dst = INTEGER(obj);\n                    for (size_t i = 0; i < total; i++) {\n                        size_t rem = i;\n                        size_t col_idx = 0;\n                        for (size_t d = 0; d < ndim; d++) {\n                            size_t coord = rem / row_strides[d];\n                            rem %= row_strides[d];\n                            col_idx += coord * col_strides[d];\n                        }\n                        dst[col_idx] = src[i];\n                    }\n                }\n            }\n\n            // Set dim attribute\n            SEXP r_dim = PROTECT(allocVector(INTSXP, (R_xlen_t)ndim));\n            for (size_t i = 0; i < ndim; i++)\n                INTEGER(r_dim)[i] = (int)shape[i];\n            setAttrib(obj, R_DimSymbol, r_dim);\n            UNPROTECT(2);\n            break;\n        }\n        default:\n            MORLOC_ERROR(\"Unsupported schema type\");\n            goto error;\n    }\n\n    return obj;\n\nerror:\n    return R_NilValue;\n}\n\n// }}} from_voidstar\n\n// {{{ exported morloc API functions\n\n// PID of the process that created the daemon (set in morloc_start_daemon)\nstatic pid_t daemon_creator_pid = 0;\n\n// Close the daemon when the R object dies\nstatic void daemon_finalizer(SEXP ptr) {\n    if (!R_ExternalPtrAddr(ptr)) return;\n    // Skip cleanup in forked children -- they must not unlink the socket file\n    if (daemon_creator_pid != 0 && getpid() != daemon_creator_pid) {\n        R_ClearExternalPtr(ptr);\n        return;\n    }\n    language_daemon_t* daemon = (language_daemon_t*)R_ExternalPtrAddr(ptr);\n    if(daemon != NULL){\n        close_daemon(&daemon);\n    }\n    R_ClearExternalPtr(ptr);\n}\n\n// Release daemon resources in a forked child WITHOUT unlinking the socket file.\n// Workers call this after fork so they don't hold the server_fd or accidentally\n// destroy the socket when they exit.\nSEXP morloc_detach_daemon(SEXP daemon_r) {\n    if (!R_ExternalPtrAddr(daemon_r)) return R_NilValue;\n    language_daemon_t* daemon = (language_daemon_t*)R_ExternalPtrAddr(daemon_r);\n    if (daemon != NULL) {\n        close_socket(daemon->server_fd);\n        client_list_t *current = daemon->client_fds;\n        while (current) {\n            client_list_t *next = current->next;\n            close(current->fd);\n            free(current);\n            current = next;\n        }\n        free(daemon->socket_path);\n        free(daemon->tmpdir);\n        free(daemon->shm_basename);\n        free(daemon);\n    }\n    R_ClearExternalPtr(daemon_r);\n    return R_NilValue;\n}\n\nSEXP morloc_start_daemon(\n    SEXP socket_path_r,\n    SEXP tmpdir_r,\n    SEXP shm_basename_r,\n    SEXP shm_default_size_r\n){ MAYFAIL\n    const char* socket_path = CHAR(STRING_ELT(socket_path_r, 0));\n    const char* tmpdir = CHAR(STRING_ELT(tmpdir_r, 0));\n    const char* shm_basename = CHAR(STRING_ELT(shm_basename_r, 0));\n    size_t shm_default_size = (size_t)asInteger(shm_default_size_r);\n\n    language_daemon_t* daemon = R_TRY(\n        start_daemon,\n        socket_path,\n        tmpdir,\n        shm_basename,\n        shm_default_size\n    );\n\n    // Wrap pointer in external pointer\n    SEXP result = PROTECT(R_MakeExternalPtr(daemon, R_NilValue, R_NilValue));\n\n    // Record which process owns the daemon (for the PID guard in daemon_finalizer)\n    daemon_creator_pid = getpid();\n\n    // Register finalizer with wrapper\n    R_RegisterCFinalizerEx(result, daemon_finalizer, TRUE);\n\n    // Set class attribute\n    SEXP class_name = PROTECT(mkString(\"language_daemon\"));\n    SET_CLASS(result, class_name);\n\n    UNPROTECT(2);\n    return result;\n}\n\n\n\nSEXP morloc_shinit(SEXP shm_basename_r, SEXP volume_index_r, SEXP shm_size_r) { MAYFAIL\n    const char* shm_basename = CHAR(STRING_ELT(shm_basename_r, 0));\n    size_t volume_index = (size_t)asInteger(volume_index_r);\n    size_t shm_size = (size_t)asInteger(shm_size_r);\n\n    R_TRY(shinit, shm_basename, volume_index, shm_size);\n\n    return R_NilValue;\n}\n\n\n// {{{ signal handling for graceful shutdown\n\nstatic volatile sig_atomic_t r_shutting_down = 0;\n\nstatic void r_sigterm_handler(int sig) {\n    (void)sig;\n    r_shutting_down = 1;\n}\n\nSEXP morloc_install_sigterm_handler(void) {\n    struct sigaction sa;\n    sa.sa_handler = r_sigterm_handler;\n    sigemptyset(&sa.sa_mask);\n    sa.sa_flags = 0;\n    sigaction(SIGTERM, &sa, NULL);\n    return R_NilValue;\n}\n\nSEXP morloc_is_shutting_down(void) {\n    return ScalarLogical(r_shutting_down != 0);\n}\n\nSEXP morloc_set_line_buffered(void) {\n    // Only stderr - stdout is left fully buffered for performance\n    // and flushed explicitly after each job.\n    setvbuf(stderr, NULL, _IOLBF, 0);\n    return R_NilValue;\n}\n\n// }}} signal handling\n\nSEXP morloc_wait_for_client(SEXP daemon_r){ MAYFAIL\n    if (!R_ExternalPtrAddr(daemon_r)) {\n        MORLOC_ERROR(\"Expected a daemon pointer\");\n    }\n\n    // Return immediately if shutdown was requested\n    if (r_shutting_down) {\n        return ScalarInteger(-1);\n    }\n\n    language_daemon_t* daemon = (language_daemon_t*)R_ExternalPtrAddr(daemon_r);\n\n    // Use pselect directly (not wait_for_client_with_timeout) so we can\n    // return immediately on EINTR from SIGTERM instead of retrying via WAIT\n    fd_set read_fds;\n    FD_ZERO(&read_fds);\n    FD_SET(daemon->server_fd, &read_fds);\n    int max_fd = daemon->server_fd;\n\n    for (client_list_t* cl = daemon->client_fds; cl != NULL; cl = cl->next) {\n        FD_SET(cl->fd, &read_fds);\n        if (cl->fd > max_fd) max_fd = cl->fd;\n    }\n\n    // 100ms timeout -- short enough for responsive SIGTERM handling\n    struct timespec ts = { .tv_sec = 0, .tv_nsec = 100000000 };\n    sigset_t emptymask;\n    sigemptyset(&emptymask);\n\n    int ready = pselect(max_fd + 1, &read_fds, NULL, NULL, &ts, &emptymask);\n\n    // Check shutdown after pselect (signal may have arrived during the call)\n    if (r_shutting_down) {\n        return ScalarInteger(-1);\n    }\n\n    // Timeout or interrupted -- return 0 (no client)\n    if (ready <= 0) {\n        return ScalarInteger(0);\n    }\n\n    // Accept new connection if server_fd is ready\n    if (FD_ISSET(daemon->server_fd, &read_fds)) {\n        int fd = accept(daemon->server_fd, NULL, NULL);\n        if (fd >= 0) {\n            fcntl(fd, F_SETFL, O_NONBLOCK);\n            client_list_t* new_client = (client_list_t*)calloc(1, sizeof(client_list_t));\n            if (new_client == NULL) {\n                close(fd);\n                MORLOC_ERROR(\"calloc failed\");\n            }\n            new_client->fd = fd;\n            new_client->next = NULL;\n            if (daemon->client_fds == NULL) {\n                daemon->client_fds = new_client;\n            } else {\n                client_list_t* last = daemon->client_fds;\n                while (last->next) last = last->next;\n                last->next = new_client;\n            }\n        }\n    }\n\n    // Return first ready client fd\n    if (daemon->client_fds != NULL) {\n        client_list_t* first = daemon->client_fds;\n        int client_fd = first->fd;\n        daemon->client_fds = first->next;\n        free(first);\n        return ScalarInteger(client_fd);\n    }\n\n    return ScalarInteger(0);\n}\n\n\nSEXP morloc_read_morloc_call_packet(SEXP packet_r) { MAYFAIL\n    uint8_t* packet = RAW(packet_r);\n    morloc_call_t* call_packet = R_TRY(read_morloc_call_packet, packet);\n\n    // Create two element R list\n    //  1: manifold id\n    //  2: argument list of raw packets\n    SEXP r_list = PROTECT(allocVector(VECSXP, 2));\n\n    // Convert midx to R integer\n    SEXP r_mid = PROTECT(ScalarInteger(call_packet->midx));\n\n    // Create arguments list\n    SEXP r_args = PROTECT(allocVector(VECSXP, call_packet->nargs));\n\n    for(size_t i = 0; i < call_packet->nargs; i++) {\n        size_t arg_packet_size = R_TRY_WITH(UNPROTECT(3), morloc_packet_size, call_packet->args[i]);\n        SEXP r_arg = PROTECT(allocVector(RAWSXP, arg_packet_size));\n        memcpy(RAW(r_arg), call_packet->args[i], arg_packet_size);\n        SET_VECTOR_ELT(r_args, i, r_arg);\n        UNPROTECT(1);  // r_arg\n    }\n\n    // Assemble final list\n    SET_VECTOR_ELT(r_list, 0, r_mid);\n    SET_VECTOR_ELT(r_list, 1, r_args);\n\n    free_morloc_call(call_packet);\n\n    UNPROTECT(3);  // r_list, r_mid, r_args\n    return r_list;\n}\n\n\nSEXP morloc_send_packet_to_foreign_server(SEXP client_fd_r, SEXP packet_r) { MAYFAIL\n    if (TYPEOF(client_fd_r) != INTSXP || LENGTH(client_fd_r) != 1) {\n        MORLOC_ERROR(\"client_fd must be a single integer\");\n    }\n    if (TYPEOF(packet_r) != RAWSXP) {\n        MORLOC_ERROR(\"packet must be a raw vector\");\n    }\n\n    // Extract arguments\n    int client_fd = INTEGER(client_fd_r)[0];\n    uint8_t* packet = RAW(packet_r);\n    size_t packet_size = (size_t)LENGTH(packet_r);\n\n    // Call underlying implementation\n    size_t bytes_sent = R_TRY(send_packet_to_foreign_server, client_fd, packet);\n\n    // This could in theory be problematic, since int is smaller than size_t\n    // In practice it should not be, since packets are typically small\n    // However, if I refactor to send large packets in the future, this could be\n    // problematic. Then I would need to convert to a double return.\n    return ScalarInteger((int)bytes_sent);\n}\n\n\n// Read from socket returning raw vector of received data\nSEXP morloc_stream_from_client(SEXP client_fd_r) { MAYFAIL\n    if (TYPEOF(client_fd_r) != INTSXP || LENGTH(client_fd_r) != 1) {\n        MORLOC_ERROR(\"client_fd must be a single integer\");\n    }\n\n    int client_fd = INTEGER(client_fd_r)[0];\n\n    // Read packet from socket\n    uint8_t* packet = R_TRY(stream_from_client, client_fd);\n\n    // Read the packet size from the header (free packet before longjmp on error)\n    size_t packet_size = R_TRY_WITH(free(packet), morloc_packet_size, packet);\n\n    // Create raw vector for result\n    SEXP result = PROTECT(allocVector(RAWSXP, packet_size));\n    memcpy(RAW(result), packet, packet_size);\n    free(packet);\n\n    UNPROTECT(1);\n    return result;\n}\n\n\n// close_socket\nSEXP morloc_close_socket(SEXP socket_id_r) {\n    if (TYPEOF(socket_id_r) != INTSXP || LENGTH(socket_id_r) != 1) {\n        MORLOC_ERROR(\"socket_id must be a single integer\");\n    }\n    int socket_id = INTEGER(socket_id_r)[0];\n    close_socket(socket_id);\n    // Return invisible NULL\n    return R_NilValue;\n}\n\n\n// put_value\nSEXP morloc_put_value(SEXP obj_r, SEXP schema_str_r) { MAYFAIL\n    if (TYPEOF(schema_str_r) != STRSXP || LENGTH(schema_str_r) != 1) {\n        MORLOC_ERROR(\"schema must be a single string\");\n    }\n\n    const char* schema_cstr = CHAR(STRING_ELT(schema_str_r, 0));\n\n    char* schema_str = strdup(schema_cstr);\n    Schema* schema = R_TRY_WITH(free(schema_str), parse_schema, schema_str);\n    free(schema_str);\n\n    // Arrow dispatch: if schema hint is \"arrow\", use Arrow C Data Interface\n    if (schema->hint && strcmp(schema->hint, \"arrow\") == 0) {\n        // Export R arrow RecordBatch via C Data Interface -> copy to shm -> packet\n        // arrow::ExportRecordBatch(batch, array_ptr, schema_ptr)\n        struct ArrowSchema arrow_schema;\n        struct ArrowArray arrow_array;\n        memset(&arrow_schema, 0, sizeof(arrow_schema));\n        memset(&arrow_array, 0, sizeof(arrow_array));\n\n        SEXP arrow_ns = PROTECT(R_FindNamespace(mkString(\"arrow\")));\n        SEXP export_fn = PROTECT(findVarInFrame(arrow_ns, install(\"ExportRecordBatch\")));\n        if (export_fn == R_UnboundValue) {\n            UNPROTECT(2);\n            free_schema(schema);\n            MORLOC_ERROR(\"arrow::ExportRecordBatch not found; is the arrow package installed?\");\n        }\n\n        SEXP array_ptr_r = PROTECT(R_MakeExternalPtr(&arrow_array, R_NilValue, R_NilValue));\n        SEXP schema_ptr_r = PROTECT(R_MakeExternalPtr(&arrow_schema, R_NilValue, R_NilValue));\n        SEXP call = PROTECT(lang4(export_fn, obj_r, array_ptr_r, schema_ptr_r));\n        eval(call, arrow_ns);\n        UNPROTECT(5);\n\n        char* errmsg = NULL;\n        relptr_t relptr = arrow_to_shm(&arrow_array, &arrow_schema, &errmsg);\n\n        if (arrow_schema.release) arrow_schema.release(&arrow_schema);\n        if (arrow_array.release) arrow_array.release(&arrow_array);\n\n        if (errmsg) {\n            free_schema(schema);\n            MORLOC_ERROR(\"Arrow export failed: %s\", errmsg);\n        }\n\n        uint8_t* packet = make_arrow_data_packet(relptr, schema);\n        if (!packet) {\n            free_schema(schema);\n            MORLOC_ERROR(\"Failed to create arrow data packet\");\n        }\n\n        size_t packet_size = R_TRY_WITH({free(packet); free_schema(schema);}, morloc_packet_size, packet);\n        SEXP result = PROTECT(allocVector(RAWSXP, packet_size));\n        memcpy(RAW(result), packet, packet_size);\n        free(packet);\n        free_schema(schema);\n        UNPROTECT(1);\n        return result;\n    }\n\n    void* voidstar = to_voidstar(obj_r, schema);\n    if (!voidstar) {\n        free_schema(schema);\n        MORLOC_ERROR(\"Failed to convert R object to internal representation\");\n    }\n\n    relptr_t relptr = R_TRY_WITH(free_schema(schema), abs2rel, voidstar);\n\n    uint8_t* packet = R_TRY_WITH(free_schema(schema), make_data_packet_auto, voidstar, relptr, schema);\n\n    const morloc_packet_header_t* hdr = (const morloc_packet_header_t*)packet;\n    if (hdr->command.data.source != PACKET_SOURCE_RPTR) {\n        // Data inlined in packet -- free SHM immediately\n        char* free_err = NULL;\n        shfree_by_schema((absptr_t)voidstar, schema, &free_err);\n        if (free_err) { free(free_err); free_err = NULL; }\n        shfree((absptr_t)voidstar, &free_err);\n        if (free_err) { free(free_err); }\n    }\n\n    size_t packet_size = R_TRY_WITH({free(packet); free_schema(schema);}, morloc_packet_size, packet);\n\n    SEXP result = PROTECT(allocVector(RAWSXP, packet_size));\n    memcpy(RAW(result), packet, packet_size);\n    free(packet);\n    free_schema(schema);\n\n    UNPROTECT(1);\n    return result;\n}\n\n\n// mlc_show: serialize a value to a JSON string\nSEXP morloc_mlc_show(SEXP obj_r, SEXP schema_str_r) { MAYFAIL\n    if (TYPEOF(schema_str_r) != STRSXP || LENGTH(schema_str_r) != 1) {\n        MORLOC_ERROR(\"schema must be a single string\");\n    }\n\n    char* schema_str = strdup(CHAR(STRING_ELT(schema_str_r, 0)));\n    Schema* schema = R_TRY_WITH(free(schema_str), parse_schema, schema_str);\n    free(schema_str);\n\n    void* voidstar = to_voidstar(obj_r, schema);\n    if (!voidstar) {\n        free_schema(schema);\n        MORLOC_ERROR(\"Failed to convert R object to internal representation\");\n    }\n\n    char* json = R_TRY_WITH(free_schema(schema), mlc_show, voidstar, schema);\n\n    {\n        char* shfree_errmsg = NULL;\n        shfree(voidstar, &shfree_errmsg);\n        free(shfree_errmsg);\n    }\n    free_schema(schema);\n\n    SEXP result = PROTECT(mkString(json));\n    free(json);\n    UNPROTECT(1);\n    return result;\n}\n\n\nSEXP morloc_get_value(SEXP packet_r, SEXP schema_str_r) { MAYFAIL\n    if (TYPEOF(packet_r) != RAWSXP) {\n        MORLOC_ERROR(\"packet must be a raw vector\");\n    }\n    if (TYPEOF(schema_str_r) != STRSXP || LENGTH(schema_str_r) != 1) {\n        MORLOC_ERROR(\"schema must be a single string\");\n    }\n\n    // Extract arguments\n    uint8_t* packet = RAW(packet_r);\n    size_t packet_size = (size_t)LENGTH(packet_r);\n\n    const morloc_packet_header_t* header = (const morloc_packet_header_t*)packet;\n    uint8_t source = header->command.data.source;\n    uint8_t format = header->command.data.format;\n\n    const char* schema_cstr = CHAR(STRING_ELT(schema_str_r, 0));\n\n    char* schema_str = strdup(schema_cstr);\n    Schema* schema = R_TRY_WITH(free(schema_str), parse_schema, schema_str);\n    free(schema_str);\n\n    // Arrow dispatch: if packet format is Arrow, import via C Data Interface\n    if (format == PACKET_FORMAT_ARROW) {\n        uint8_t* arrow_ptr = R_TRY_WITH(free_schema(schema),\n            get_morloc_data_packet_value, packet, schema);\n        const arrow_shm_header_t* arrow_hdr = (const arrow_shm_header_t*)arrow_ptr;\n\n        struct ArrowSchema arrow_schema;\n        struct ArrowArray arrow_array;\n        char* arrow_err = NULL;\n        arrow_from_shm(arrow_hdr, &arrow_schema, &arrow_array, &arrow_err);\n        if (arrow_err) {\n            if (arrow_schema.release) arrow_schema.release(&arrow_schema);\n            if (arrow_array.release) arrow_array.release(&arrow_array);\n            free_schema(schema);\n            MORLOC_ERROR(\"Arrow import failed: %s\", arrow_err);\n        }\n\n        // Import via R arrow package: arrow::ImportRecordBatch(array_ptr, schema_ptr)\n        SEXP arrow_ns = PROTECT(R_FindNamespace(mkString(\"arrow\")));\n        SEXP import_fn = PROTECT(findVarInFrame(arrow_ns, install(\"ImportRecordBatch\")));\n        if (import_fn == R_UnboundValue) {\n            if (arrow_schema.release) arrow_schema.release(&arrow_schema);\n            if (arrow_array.release) arrow_array.release(&arrow_array);\n            UNPROTECT(2);\n            free_schema(schema);\n            MORLOC_ERROR(\"arrow::ImportRecordBatch not found; is the arrow package installed?\");\n        }\n\n        SEXP array_ptr_r = PROTECT(R_MakeExternalPtr(&arrow_array, R_NilValue, R_NilValue));\n        SEXP schema_ptr_r = PROTECT(R_MakeExternalPtr(&arrow_schema, R_NilValue, R_NilValue));\n        SEXP call = PROTECT(lang3(import_fn, array_ptr_r, schema_ptr_r));\n        SEXP obj_r = PROTECT(eval(call, arrow_ns));\n        UNPROTECT(6);\n\n        // Incref shm so data stays alive\n        char* incref_err = NULL;\n        shincref((absptr_t)arrow_ptr, &incref_err);\n        if (incref_err) { free(incref_err); }\n\n        free_schema(schema);\n        return obj_r;\n    }\n\n    // Fast path: inline voidstar -- read directly from packet, no SHM needed\n    if (source == PACKET_SOURCE_MESG && format == PACKET_FORMAT_VOIDSTAR) {\n        const uint8_t* payload = packet + sizeof(morloc_packet_header_t) + header->offset;\n        SEXP obj_r = from_voidstar((const void*)payload, schema, (const void*)payload);\n        free_schema(schema);\n        if (obj_r == NULL) {\n            MORLOC_ERROR(\"Failed to convert internal representation to R object\");\n        }\n        return obj_r;\n    }\n\n    // SHM paths\n    uint8_t* voidstar = R_TRY_WITH(free_schema(schema), get_morloc_data_packet_value, packet, schema);\n\n    SEXP obj_r = from_voidstar(voidstar, schema, NULL);\n    if (obj_r == NULL) {\n        free_schema(schema);\n        MORLOC_ERROR(\"Failed to convert internal representation to R object\");\n    }\n\n    free_schema(schema);\n\n    return obj_r;\n}\n\n\nSEXP morloc_foreign_call(SEXP socket_path_r, SEXP mid_r, SEXP args_r) { MAYFAIL\n    // Validate inputs\n    if (TYPEOF(socket_path_r) != STRSXP || LENGTH(socket_path_r) != 1) {\n        MORLOC_ERROR(\"socket_path must be a single string\");\n    }\n    if (TYPEOF(mid_r) != INTSXP || LENGTH(mid_r) != 1) {\n        MORLOC_ERROR(\"mid must be a single integer\");\n    }\n    if (TYPEOF(args_r) != VECSXP) {\n        MORLOC_ERROR(\"args must be a list of raw vectors\");\n    }\n\n    // Extract arguments\n    const char* socket_path = CHAR(STRING_ELT(socket_path_r, 0));\n    int mid = INTEGER(mid_r)[0];\n    size_t nargs = (size_t)LENGTH(args_r);\n\n    // Allocate temporary storage\n    const uint8_t** arg_packets = (const uint8_t**)R_alloc(nargs, sizeof(uint8_t*));\n\n    // Convert R raw vectors to C buffers\n    for (size_t i = 0; i < nargs; i++) {\n        SEXP arg = VECTOR_ELT(args_r, i);\n        if (TYPEOF(arg) != RAWSXP) {\n            MORLOC_ERROR(\"All arguments must be raw vectors (argument %zu)\", i+1);\n        }\n        arg_packets[i] = RAW(arg);\n    }\n\n    // Create call packet\n    uint8_t* packet = R_TRY(\n        make_morloc_local_call_packet,\n        (uint32_t)mid,\n        arg_packets,\n        nargs\n    );\n\n    // Send/receive over socket\n    uint8_t* result = R_TRY_WITH(free(packet),\n        send_and_receive_over_socket,\n        socket_path,\n        packet\n    );\n\n    // Get result size\n    size_t result_length = R_TRY_WITH({free(packet); free(result);}, morloc_packet_size, result);\n\n    // Create result raw vector\n    SEXP result_r = PROTECT(allocVector(RAWSXP, result_length));\n    memcpy(RAW(result_r), result, result_length);\n    free(packet);\n    free(result);\n\n    // Cleanup\n    UNPROTECT(1);\n    return result_r;\n}\n\n\nSEXP morloc_is_ping(SEXP packet_r) { MAYFAIL\n    if (TYPEOF(packet_r) != RAWSXP) {\n        MORLOC_ERROR(\"packet must be a raw vector\");\n    }\n\n    bool is_ping = R_TRY(packet_is_ping, RAW(packet_r));\n\n    return ScalarLogical(is_ping);\n}\n\n\nSEXP morloc_is_local_call(SEXP packet_r) { MAYFAIL\n    if (TYPEOF(packet_r) != RAWSXP) {\n        MORLOC_ERROR(\"packet must be a raw vector\");\n    }\n\n    bool is_local_call = R_TRY(packet_is_local_call, RAW(packet_r));\n\n    return ScalarLogical(is_local_call);\n}\n\n\nSEXP morloc_is_remote_call(SEXP packet_r) { MAYFAIL\n    if (TYPEOF(packet_r) != RAWSXP) {\n        MORLOC_ERROR(\"packet must be a raw vector\");\n    }\n\n    bool is_remote_call = R_TRY(packet_is_remote_call, RAW(packet_r));\n\n    return ScalarLogical(is_remote_call);\n}\n\n\nSEXP morloc_pong(SEXP packet_r) { MAYFAIL\n    if (TYPEOF(packet_r) != RAWSXP) {\n        MORLOC_ERROR(\"packet must be a raw vector\");\n    }\n\n    // Generate a response to ping\n    uint8_t* pong = R_TRY(return_ping, RAW(packet_r));\n\n    size_t pong_size = R_TRY_WITH(free(pong), morloc_packet_size, pong);\n\n    SEXP result_r = PROTECT(allocVector(RAWSXP, pong_size));\n    memcpy(RAW(result_r), pong, pong_size);\n    free(pong);\n\n    UNPROTECT(1);\n    return result_r;\n}\n\n\nSEXP morloc_make_fail_packet(SEXP failure_message_r) { MAYFAIL\n    const char* failure_message = CHAR(STRING_ELT(failure_message_r, 0));\n    uint8_t* fail_packet = make_fail_packet(failure_message);\n\n    size_t packet_size = R_TRY(morloc_packet_size, fail_packet);\n\n    SEXP packet_r = PROTECT(allocVector(RAWSXP, packet_size));\n    memcpy(RAW(packet_r), fail_packet, packet_size);\n    free(fail_packet);\n\n    UNPROTECT(1);\n    return packet_r;\n}\n\n\nSEXP extract_element_by_name(SEXP list, const char* key) {\n  // Ensure inputs are correct types\n  if (TYPEOF(list) != VECSXP) MORLOC_ERROR(\"Input must be a list\");\n\n  // Get list names attribute\n  SEXP names = Rf_getAttrib(list, R_NamesSymbol);\n  if (names == R_NilValue) MORLOC_ERROR(\"List must have names\");\n\n  // Iterate through list elements\n  for (int i = 0; i < Rf_length(list); i++) {\n    const char *current_name = CHAR(STRING_ELT(names, i));\n\n    if (strcmp(key, current_name) == 0) {\n      return VECTOR_ELT(list, i);  // Return matching element\n    }\n  }\n\n  return R_NilValue;  // Return NULL if name not found\n}\n\n\nSEXP morloc_remote_call(SEXP midx, SEXP socket_path, SEXP cache_path, SEXP resources, SEXP arg_packets) { MAYFAIL\n    // Protect all R inputs immediately\n    PROTECT(socket_path);\n    PROTECT(cache_path);\n    PROTECT(resources);\n    PROTECT(arg_packets = coerceVector(arg_packets, VECSXP));\n\n    // Convert basic parameters\n    int c_midx = INTEGER(midx)[0];\n    const char* c_socket_path = CHAR(STRING_ELT(socket_path, 0));\n    const char* c_cache_path = CHAR(STRING_ELT(cache_path, 0));\n\n    // Extract resources with validation\n    resources_t c_resources;\n    SEXP mem = extract_element_by_name(resources, \"memory\");\n    SEXP tim = extract_element_by_name(resources, \"time\");\n    SEXP cpu = extract_element_by_name(resources, \"cpus\");\n    SEXP gpu = extract_element_by_name(resources, \"gpus\");\n    if (mem == R_NilValue || tim == R_NilValue || cpu == R_NilValue || gpu == R_NilValue) {\n        UNPROTECT(4);\n        MORLOC_ERROR(\"Missing required resource field (memory, time, cpus, or gpus)\");\n    }\n    c_resources.memory = INTEGER(mem)[0];\n    c_resources.time = INTEGER(tim)[0];\n    c_resources.cpus = INTEGER(cpu)[0];\n    c_resources.gpus = INTEGER(gpu)[0];\n\n    // Process argument packets with type checking\n    size_t nargs = LENGTH(arg_packets);\n    const uint8_t** c_arg_packets = (const uint8_t**) R_alloc(nargs, sizeof(uint8_t*));\n\n    for(size_t i = 0; i < nargs; i++) {\n        SEXP raw_vec = VECTOR_ELT(arg_packets, i);\n        if(TYPEOF(raw_vec) != RAWSXP) {\n            UNPROTECT(4);\n            MORLOC_ERROR(\"arg_packets must contain only raw vectors\");\n        }\n        c_arg_packets[i] = (uint8_t*)RAW(raw_vec);\n    }\n\n    // Execute remote call\n    uint8_t* result_packet = R_TRY_WITH(UNPROTECT(4),\n        remote_call,\n        c_midx,\n        c_socket_path,\n        c_cache_path,\n        &c_resources,\n        c_arg_packets,\n        nargs\n    );\n\n    // Validate and copy result\n    size_t packet_size = R_TRY_WITH({free(result_packet); UNPROTECT(4);}, morloc_packet_size, result_packet);\n    if(!result_packet || packet_size == 0) {\n        if(result_packet) free(result_packet);\n        UNPROTECT(4);\n        MORLOC_ERROR(\"Invalid result packet from remote call\");\n    }\n\n    SEXP result_packet_r = PROTECT(allocVector(RAWSXP, packet_size));\n    memcpy(RAW(result_packet_r), result_packet, packet_size);\n    free(result_packet);\n\n    // Cleanup and return\n    UNPROTECT(5);  // socket_path, cache_path, resources, arg_packets, result_packet_r\n    return result_packet_r;\n}\n\n\n// {{{ fork and fd-passing functions\n\nSEXP morloc_socketpair(void) {\n    int sv[2];\n    if (socketpair(AF_UNIX, SOCK_STREAM, 0, sv) < 0) {\n        error(\"socketpair failed: %s\", strerror(errno));\n    }\n    SEXP result = PROTECT(allocVector(INTSXP, 2));\n    INTEGER(result)[0] = sv[0];\n    INTEGER(result)[1] = sv[1];\n    UNPROTECT(1);\n    return result;\n}\n\nSEXP morloc_fork(void) {\n    pid_t pid = fork();\n    if (pid < 0) {\n        error(\"fork failed: %s\", strerror(errno));\n    }\n    return ScalarInteger((int)pid);\n}\n\n// Immediately terminate the process without running any cleanup.\n// Must be used by forked worker children instead of R's quit().\n// R's quit() runs finalizers that try to free objects allocated by the\n// parent process, which corrupts the heap on glibc >= 2.39.\nSEXP morloc_exit(SEXP status_r) {\n    int status = INTEGER(status_r)[0];\n    _exit(status);\n    return R_NilValue; // unreachable\n}\n\nSEXP morloc_send_fd(SEXP pipe_fd_r, SEXP client_fd_r) {\n    int pipe_fd = INTEGER(pipe_fd_r)[0];\n    int client_fd = INTEGER(client_fd_r)[0];\n\n    struct msghdr msg = {0};\n    struct iovec iov;\n    char buf[1] = {0};\n    char cmsgbuf[CMSG_SPACE(sizeof(int))];\n\n    iov.iov_base = buf;\n    iov.iov_len = 1;\n    msg.msg_iov = &iov;\n    msg.msg_iovlen = 1;\n    msg.msg_control = cmsgbuf;\n    msg.msg_controllen = sizeof(cmsgbuf);\n\n    struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);\n    cmsg->cmsg_level = SOL_SOCKET;\n    cmsg->cmsg_type = SCM_RIGHTS;\n    cmsg->cmsg_len = CMSG_LEN(sizeof(int));\n    memcpy(CMSG_DATA(cmsg), &client_fd, sizeof(int));\n\n    ssize_t n = sendmsg(pipe_fd, &msg, 0);\n    if (n < 0) {\n        error(\"sendmsg SCM_RIGHTS failed: %s\", strerror(errno));\n    }\n    return R_NilValue;\n}\n\nSEXP morloc_recv_fd(SEXP pipe_fd_r) {\n    int pipe_fd = INTEGER(pipe_fd_r)[0];\n\n    struct msghdr msg = {0};\n    struct iovec iov;\n    char buf[1];\n    char cmsgbuf[CMSG_SPACE(sizeof(int))];\n\n    iov.iov_base = buf;\n    iov.iov_len = 1;\n    msg.msg_iov = &iov;\n    msg.msg_iovlen = 1;\n    msg.msg_control = cmsgbuf;\n    msg.msg_controllen = sizeof(cmsgbuf);\n\n    ssize_t n = recvmsg(pipe_fd, &msg, 0);\n    if (n <= 0) {\n        return ScalarInteger(-1);\n    }\n\n    struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);\n    if (cmsg == NULL || cmsg->cmsg_level != SOL_SOCKET || cmsg->cmsg_type != SCM_RIGHTS) {\n        return ScalarInteger(-1);\n    }\n\n    int fd;\n    memcpy(&fd, CMSG_DATA(cmsg), sizeof(int));\n    return ScalarInteger(fd);\n}\n\nSEXP morloc_kill(SEXP pid_r, SEXP sig_r) {\n    pid_t pid = (pid_t)INTEGER(pid_r)[0];\n    int sig = INTEGER(sig_r)[0];\n    int ret = kill(pid, sig);\n    return ScalarInteger(ret);\n}\n\nSEXP morloc_waitpid(SEXP pid_r) {\n    pid_t pid = (pid_t)INTEGER(pid_r)[0];\n    int status;\n    pid_t result = waitpid(pid, &status, WNOHANG);\n    return ScalarInteger((int)result);\n}\n\nSEXP morloc_waitpid_blocking(SEXP pid_r) {\n    pid_t pid = (pid_t)INTEGER(pid_r)[0];\n    int status;\n    pid_t result = waitpid(pid, &status, 0);\n    return ScalarInteger((int)result);\n}\n\n// }}} fork and fd-passing functions\n\n// {{{ shared counter functions (for dynamic worker spawning)\n\nstatic void shared_counter_finalizer(SEXP ptr) {\n    int* p = (int*)R_ExternalPtrAddr(ptr);\n    if (p != NULL) {\n        munmap(p, sizeof(int));\n        R_ClearExternalPtr(ptr);\n    }\n}\n\nSEXP morloc_shared_counter_create(void) {\n    int* p = (int*)mmap(NULL, sizeof(int),\n                        PROT_READ | PROT_WRITE,\n                        MAP_SHARED | MAP_ANONYMOUS, -1, 0);\n    if (p == MAP_FAILED) {\n        error(\"mmap failed for shared counter: %s\", strerror(errno));\n    }\n    *p = 0;\n    SEXP ptr = PROTECT(R_MakeExternalPtr(p, R_NilValue, R_NilValue));\n    R_RegisterCFinalizerEx(ptr, shared_counter_finalizer, TRUE);\n    UNPROTECT(1);\n    return ptr;\n}\n\nSEXP morloc_shared_counter_inc(SEXP ptr_r) {\n    int* p = (int*)R_ExternalPtrAddr(ptr_r);\n    if (p == NULL) error(\"shared counter is NULL\");\n    int val = __atomic_add_fetch(p, 1, __ATOMIC_RELAXED);\n    return ScalarInteger(val);\n}\n\nSEXP morloc_shared_counter_dec(SEXP ptr_r) {\n    int* p = (int*)R_ExternalPtrAddr(ptr_r);\n    if (p == NULL) error(\"shared counter is NULL\");\n    int val = __atomic_sub_fetch(p, 1, __ATOMIC_RELAXED);\n    return ScalarInteger(val);\n}\n\nSEXP morloc_shared_counter_read(SEXP ptr_r) {\n    int* p = (int*)R_ExternalPtrAddr(ptr_r);\n    if (p == NULL) error(\"shared counter is NULL\");\n    int val = __atomic_load_n(p, __ATOMIC_RELAXED);\n    return ScalarInteger(val);\n}\n\nSEXP morloc_pipe(void) {\n    int fds[2];\n    if (pipe(fds) != 0) {\n        error(\"pipe failed: %s\", strerror(errno));\n    }\n    SEXP result = PROTECT(allocVector(INTSXP, 2));\n    INTEGER(result)[0] = fds[0];  /* read end */\n    INTEGER(result)[1] = fds[1];  /* write end */\n    UNPROTECT(1);\n    return result;\n}\n\nSEXP morloc_write_byte(SEXP fd_r, SEXP byte_r) {\n    int fd = INTEGER(fd_r)[0];\n    unsigned char b = (unsigned char)RAW(byte_r)[0];\n    ssize_t n = write(fd, &b, 1);\n    return ScalarInteger((int)n);\n}\n\nSEXP morloc_close_fd(SEXP fd_r) {\n    int fd = INTEGER(fd_r)[0];\n    close(fd);\n    return R_NilValue;\n}\n\n// }}} shared counter functions\n\n// {{{ C-level worker loop\n\n// Receive a file descriptor over a Unix domain socket (C-level helper).\nstatic int recv_fd_c(int pipe_fd) {\n    struct msghdr msg = {0};\n    struct iovec iov;\n    char buf[1];\n    char cmsgbuf[CMSG_SPACE(sizeof(int))];\n\n    iov.iov_base = buf;\n    iov.iov_len = 1;\n    msg.msg_iov = &iov;\n    msg.msg_iovlen = 1;\n    msg.msg_control = cmsgbuf;\n    msg.msg_controllen = sizeof(cmsgbuf);\n\n    ssize_t n = recvmsg(pipe_fd, &msg, 0);\n    if (n <= 0) return -1;\n\n    struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msg);\n    if (!cmsg || cmsg->cmsg_level != SOL_SOCKET || cmsg->cmsg_type != SCM_RIGHTS)\n        return -1;\n\n    int fd;\n    memcpy(&fd, CMSG_DATA(cmsg), sizeof(int));\n    return fd;\n}\n\n// Send a fail packet to the client (best-effort, ignores send errors).\nstatic void send_fail_to_client(int client_fd, const char* msg) {\n    char* errmsg = NULL;\n    uint8_t* fail = make_fail_packet(msg);\n    send_packet_to_foreign_server(client_fd, fail, &errmsg);\n    free(fail);\n}\n\n// Dispatch a call to a manifold function. All packet handling is in C;\n// only the manifold evaluation crosses into R via R_tryEval.\nstatic void dispatch_manifold_c(int client_fd, const uint8_t* packet,\n                                SEXP dispatch, const char* label) {\n    char* errmsg = NULL;\n\n    morloc_call_t* call = read_morloc_call_packet(packet, &errmsg);\n    if (errmsg) {\n        send_fail_to_client(client_fd, errmsg);\n        return;\n    }\n\n    int midx = (int)call->midx;\n    SEXP fn = (midx >= 1 && midx <= LENGTH(dispatch))\n              ? VECTOR_ELT(dispatch, midx - 1) : R_NilValue;\n\n    if (fn == R_NilValue) {\n        char msg[128];\n        snprintf(msg, sizeof(msg), \"%s function not found: m%d\", label, midx);\n        send_fail_to_client(client_fd, msg);\n        free_morloc_call(call);\n        return;\n    }\n\n    // Build R pairlist of raw-vector arguments: fn(arg1, arg2, ...)\n    int nprotect = 0;\n    SEXP pairlist = R_NilValue;\n    for (int i = (int)call->nargs - 1; i >= 0; i--) {\n        size_t arg_size = morloc_packet_size(call->args[i], &errmsg);\n        if (errmsg) {\n            UNPROTECT(nprotect);\n            send_fail_to_client(client_fd, errmsg);\n            free_morloc_call(call);\n            return;\n        }\n        SEXP r_arg = PROTECT(allocVector(RAWSXP, arg_size));\n        nprotect++;\n        memcpy(RAW(r_arg), call->args[i], arg_size);\n        pairlist = PROTECT(Rf_cons(r_arg, pairlist));\n        nprotect++;\n    }\n    free_morloc_call(call);\n\n    SEXP r_call = PROTECT(Rf_lcons(fn, pairlist));\n    nprotect++;\n\n    // Single crossing into R: evaluate the manifold\n    int eval_err = 0;\n    SEXP result = R_tryEvalSilent(r_call, R_GlobalEnv, &eval_err);\n\n    if (eval_err || result == R_NilValue || TYPEOF(result) != RAWSXP) {\n        UNPROTECT(nprotect);\n        send_fail_to_client(client_fd,\n            eval_err ? R_curErrorBuf() : \"manifold returned non-raw result\");\n        return;\n    }\n\n    PROTECT(result);\n    nprotect++;\n\n    send_packet_to_foreign_server(client_fd, RAW(result), &errmsg);\n    UNPROTECT(nprotect);\n}\n\n// Process one client job entirely in C. Only crosses into R for\n// the actual manifold evaluation.\nstatic void run_job_c(int client_fd, SEXP dispatch, SEXP remote_dispatch) {\n    char* errmsg = NULL;\n\n    uint8_t* packet = stream_from_client(client_fd, &errmsg);\n    if (errmsg) {\n        send_fail_to_client(client_fd, errmsg);\n        free(errmsg);\n        close_socket(client_fd);\n        return;\n    }\n\n    bool is_local = packet_is_local_call(packet, &errmsg);\n    if (!errmsg && is_local) {\n        dispatch_manifold_c(client_fd, packet, dispatch, \"Local\");\n    } else if (!errmsg) {\n        bool is_remote = packet_is_remote_call(packet, &errmsg);\n        if (!errmsg && is_remote) {\n            dispatch_manifold_c(client_fd, packet, remote_dispatch, \"Remote\");\n        } else if (!errmsg) {\n            bool is_ping_pkt = packet_is_ping(packet, &errmsg);\n            if (!errmsg && is_ping_pkt) {\n                uint8_t* pong = return_ping(packet, &errmsg);\n                if (!errmsg) {\n                    send_packet_to_foreign_server(client_fd, pong, &errmsg);\n                    free(pong);\n                }\n            } else if (!errmsg) {\n                send_fail_to_client(client_fd, \"Unexpected packet type\");\n            }\n        }\n    }\n\n    if (errmsg) {\n        send_fail_to_client(client_fd, errmsg);\n    }\n\n    free(packet);\n    close_socket(client_fd);\n}\n\n// Tight C worker loop. Receives fds from the job queue and processes them,\n// crossing into R only for manifold evaluation.\nSEXP morloc_worker_loop_c(SEXP pipe_fd_r, SEXP dispatch_r, SEXP remote_dispatch_r) {\n    int pipe_fd = INTEGER(pipe_fd_r)[0];\n    PROTECT(dispatch_r);\n    PROTECT(remote_dispatch_r);\n\n    while (!r_shutting_down) {\n        int client_fd = recv_fd_c(pipe_fd);\n        if (client_fd < 0) break;\n        run_job_c(client_fd, dispatch_r, remote_dispatch_r);\n        fflush(stdout);\n    }\n\n    UNPROTECT(2);\n    return R_NilValue;\n}\n\n// }}} C-level worker loop\n\n// }}} exported functions\n\n\nvoid R_init_rmorloc(DllInfo *info) {\n    R_CallMethodDef callMethods[] = {\n        {\"morloc_start_daemon\", (DL_FUNC) &morloc_start_daemon, 4},\n        {\"morloc_wait_for_client\", (DL_FUNC) &morloc_wait_for_client, 1},\n        {\"morloc_read_morloc_call_packet\", (DL_FUNC) &morloc_read_morloc_call_packet, 1},\n        {\"morloc_send_packet_to_foreign_server\", (DL_FUNC) &morloc_send_packet_to_foreign_server, 2},\n        {\"morloc_stream_from_client\", (DL_FUNC) &morloc_stream_from_client, 1},\n        {\"morloc_close_socket\", (DL_FUNC) &morloc_close_socket, 1},\n        {\"morloc_foreign_call\", (DL_FUNC) &morloc_foreign_call, 3},\n        {\"morloc_get_value\", (DL_FUNC) &morloc_get_value, 2},\n        {\"morloc_put_value\", (DL_FUNC) &morloc_put_value, 2},\n        {\"morloc_mlc_show\", (DL_FUNC) &morloc_mlc_show, 2},\n        {\"morloc_is_ping\", (DL_FUNC) &morloc_is_ping, 1},\n        {\"morloc_is_local_call\", (DL_FUNC) &morloc_is_local_call, 1},\n        {\"morloc_is_remote_call\", (DL_FUNC) &morloc_is_remote_call, 1},\n        {\"morloc_remote_call\", (DL_FUNC) &morloc_remote_call, 5},\n        {\"morloc_pong\", (DL_FUNC) &morloc_pong, 1},\n        {\"morloc_make_fail_packet\", (DL_FUNC) &morloc_make_fail_packet, 1},\n        {\"morloc_shinit\", (DL_FUNC) &morloc_shinit, 3},\n        {\"morloc_socketpair\", (DL_FUNC) &morloc_socketpair, 0},\n        {\"morloc_fork\", (DL_FUNC) &morloc_fork, 0},\n        {\"morloc_exit\", (DL_FUNC) &morloc_exit, 1},\n        {\"morloc_send_fd\", (DL_FUNC) &morloc_send_fd, 2},\n        {\"morloc_recv_fd\", (DL_FUNC) &morloc_recv_fd, 1},\n        {\"morloc_kill\", (DL_FUNC) &morloc_kill, 2},\n        {\"morloc_waitpid\", (DL_FUNC) &morloc_waitpid, 1},\n        {\"morloc_waitpid_blocking\", (DL_FUNC) &morloc_waitpid_blocking, 1},\n        {\"morloc_install_sigterm_handler\", (DL_FUNC) &morloc_install_sigterm_handler, 0},\n        {\"morloc_set_line_buffered\", (DL_FUNC) &morloc_set_line_buffered, 0},\n        {\"morloc_is_shutting_down\", (DL_FUNC) &morloc_is_shutting_down, 0},\n        {\"morloc_detach_daemon\", (DL_FUNC) &morloc_detach_daemon, 1},\n        {\"morloc_shared_counter_create\", (DL_FUNC) &morloc_shared_counter_create, 0},\n        {\"morloc_shared_counter_inc\", (DL_FUNC) &morloc_shared_counter_inc, 1},\n        {\"morloc_shared_counter_dec\", (DL_FUNC) &morloc_shared_counter_dec, 1},\n        {\"morloc_shared_counter_read\", (DL_FUNC) &morloc_shared_counter_read, 1},\n        {\"morloc_pipe\", (DL_FUNC) &morloc_pipe, 0},\n        {\"morloc_write_byte\", (DL_FUNC) &morloc_write_byte, 2},\n        {\"morloc_close_fd\", (DL_FUNC) &morloc_close_fd, 1},\n        {\"morloc_worker_loop_c\", (DL_FUNC) &morloc_worker_loop_c, 3},\n        {NULL, NULL, 0}\n    };\n\n    R_registerRoutines(info, NULL, callMethods, NULL, NULL);\n    R_useDynamicSymbols(info, FALSE);\n}\n"
  },
  {
    "path": "data/misc/mlccpptypes.hpp",
    "content": ""
  },
  {
    "path": "data/morloc/morloc.h",
    "content": "// morloc.h -- C ABI contract for libmorloc.so\n//\n// This is the single public header for consumers of the morloc runtime library.\n// It defines all types and function declarations exported by libmorloc.so.\n\n#ifndef __MORLOC_H__\n#define __MORLOC_H__\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n// ========================================================================\n// Section 1: System includes and basic typedefs\n// ========================================================================\n\n#include <stdarg.h>     // va_list, va_start, va_arg, va_end (used by pool templates)\n#include <stdbool.h>\n#include <stddef.h>\n#include <stdint.h>\n#include <stdio.h>      // FILE* for read_binary_fd\n#include <sys/select.h> // fd_set\n#include <sys/socket.h>\n#include <sys/types.h>  // pid_t, ssize_t\n#include <sys/un.h>     // struct sockaddr_un\n\n// Error message output parameter: all fallible functions take this as their\n// last argument. On failure the callee sets *errmsg_ to a heap-allocated\n// string describing the error. On success *errmsg_ is set to NULL.\ntypedef char** errmsg_;\n#define ERRMSG char** errmsg_\n\n// Exit codes used by several functions.\n#define EXIT_PASS 0\n#define EXIT_FAIL 1\n\n// Convenience macros used by language extensions (pymorloc.c, rmorloc.c, cppmorloc.cpp)\n#include <stdlib.h>  // free\n#define FREE(ptr) if(ptr != NULL){ free(ptr); ptr = NULL; }\n\n// Size limits shared between library and consumers.\n#define MAX_FILENAME_SIZE 128\n#define MAX_ERRMSG_SIZE   1024\n#define MAX_PATH_SIZE     512\n#define BUFFER_SIZE       4096\n\n// Opaque JSON builder handle (Rust uses its own struct internally).\ntypedef void* json_buf_t;\n\n// ========================================================================\n// Section 2: Memory / SHM types\n// ========================================================================\n\n// Pointer types for the multi-volume shared memory pool.\n//\n// relptr_t  -- index into the logical (multi-volume) pool; shared between\n//              processes.\n// volptr_t  -- index into a single volume (0 = first block after shm header).\n// absptr_t  -- absolute virtual address in the current process.\ntypedef ssize_t relptr_t;\ntypedef ssize_t volptr_t;\ntypedef void*   absptr_t;\n\n#define VOLNULL -1\n#define RELNULL -1\n\n// Magic numbers for integrity checks.\n#define SHM_MAGIC 0xFECA0DF0\n#define BLK_MAGIC 0x0CB10DF0\n\n#define MAX_VOLUME_NUMBER 32\n\n// Shared memory volume header (lives at the start of each mmap'd region).\ntypedef struct shm_s {\n    unsigned int magic;\n    char volume_name[MAX_FILENAME_SIZE];\n    int volume_index;\n    size_t volume_size;\n    size_t relative_offset;\n    // Note: pthread_rwlock_t is opaque; consumers should not access it directly.\n    // It is included here so that sizeof(shm_t) is correct for mmap calculations.\n    // On Linux x86_64 this is typically 56 bytes.\n    char _rwlock_storage[56]; // placeholder for pthread_rwlock_t\n    volptr_t cursor;\n} shm_t;\n\n// Block header preceding every allocation inside a shared memory volume.\n// Atomic reference count for thread safety. Layout is stable (no padding).\ntypedef struct block_header_s {\n    unsigned int magic;\n    unsigned int reference_count; // actually _Atomic in the C impl\n    size_t size;\n} block_header_t;\n\n// ========================================================================\n// Section 3: Schema types\n// ========================================================================\n\ntypedef enum {\n    MORLOC_NIL,\n    MORLOC_BOOL,\n    MORLOC_SINT8,\n    MORLOC_SINT16,\n    MORLOC_SINT32,\n    MORLOC_SINT64,\n    MORLOC_UINT8,\n    MORLOC_UINT16,\n    MORLOC_UINT32,\n    MORLOC_UINT64,\n    MORLOC_FLOAT32,\n    MORLOC_FLOAT64,\n    MORLOC_TENSOR,\n    MORLOC_STRING,\n    MORLOC_ARRAY,\n    MORLOC_TUPLE,\n    MORLOC_MAP,\n    MORLOC_OPTIONAL\n} morloc_serial_type;\n\n// Single-character schema encoding tokens.\n#define SCHEMA_NIL      'z'\n#define SCHEMA_BOOL     'b'\n#define SCHEMA_SINT     'i'\n#define SCHEMA_UINT     'u'\n#define SCHEMA_FLOAT    'f'\n#define SCHEMA_STRING   's'\n#define SCHEMA_ARRAY    'a'\n#define SCHEMA_TENSOR   'T'\n#define SCHEMA_TUPLE    't'\n#define SCHEMA_MAP      'm'\n#define SCHEMA_OPTIONAL '?'\n\n// Schema: recursive type descriptor used for serialisation/deserialisation.\nstruct Schema;\ntypedef struct Schema {\n    morloc_serial_type type;\n    size_t size;       // number of parameters\n    size_t width;      // bytes per element when stored in a fixed-width array\n    size_t* offsets;   // field offsets (tuples) or ndim (tensors, in offsets[0])\n    char* hint;\n    struct Schema** parameters;\n    char** keys;       // field names (records only)\n} Schema;\n\n// Variable-length array in voidstar representation.\ntypedef struct Array {\n    size_t size;\n    relptr_t data;\n} Array;\n\n// Dense N-dimensional tensor in voidstar representation (row-major / C order).\ntypedef struct Tensor {\n    size_t total_elements;\n    uint32_t device_type;   // reserved: 0 = CPU\n    uint32_t device_id;     // reserved: 0\n    relptr_t data;          // relptr to contiguous element data\n    relptr_t shape;         // relptr to int64_t[ndim]\n} Tensor;\n\n// ========================================================================\n// Section 4: Packet types\n// ========================================================================\n\n#define MORLOC_PACKET_MAGIC 0x0707f86d\n\n// Packet type discriminator.\ntypedef uint8_t command_type_t;\n#define PACKET_TYPE_DATA  ((command_type_t)0)\n#define PACKET_TYPE_CALL  ((command_type_t)1)\n#define PACKET_TYPE_PING  ((command_type_t)2)\n\n// Packed command sub-structs (all 8 bytes wide).\ntypedef struct __attribute__((packed)) packet_command_type_s {\n    command_type_t type;\n    uint8_t padding[7];\n} packet_command_type_t;\n\n#define PACKET_ENTRYPOINT_LOCAL      0x00\n#define PACKET_ENTRYPOINT_REMOTE_SFS 0x01\n\ntypedef struct __attribute__((packed)) packet_command_call_s {\n    command_type_t type;\n    uint8_t entrypoint;\n    uint8_t padding[2];\n    uint32_t midx;\n} packet_command_call_t;\n\n// Data source, format, compression, encryption, and status constants.\n#define PACKET_SOURCE_MESG  0x00\n#define PACKET_SOURCE_FILE  0x01\n#define PACKET_SOURCE_RPTR  0x02\n\n#define PACKET_FORMAT_JSON     0x00\n#define PACKET_FORMAT_MSGPACK  0x01\n#define PACKET_FORMAT_TEXT     0x02\n#define PACKET_FORMAT_DATA     0x03\n#define PACKET_FORMAT_VOIDSTAR 0x04\n#define PACKET_FORMAT_ARROW    0x05\n\n#define PACKET_COMPRESSION_NONE 0x00\n#define PACKET_ENCRYPTION_NONE  0x00\n\n#define PACKET_STATUS_PASS 0x00\n#define PACKET_STATUS_FAIL 0x01\n\ntypedef struct __attribute__((packed)) packet_command_data_s {\n    command_type_t type;\n    uint8_t source;\n    uint8_t format;\n    uint8_t compression;\n    uint8_t encryption;\n    uint8_t status;\n    uint8_t padding[2];\n} packet_command_data_t;\n\ntypedef struct __attribute__((packed)) packet_command_ping_s {\n    command_type_t type;\n    uint8_t padding[7];\n} packet_command_ping_t;\n\ntypedef union __attribute__((packed)) packet_command_u {\n    packet_command_type_t cmd_type;\n    packet_command_call_t call;\n    packet_command_data_t data;\n    packet_command_ping_t ping;\n} packet_command_t;\n\n// 32-byte packet header (stable binary format, packed).\ntypedef struct __attribute__((packed)) morloc_packet_header_s {\n    uint32_t magic;\n    uint16_t plain;\n    uint16_t version;\n    uint16_t flavor;\n    uint16_t mode;\n    packet_command_t command;\n    uint32_t offset;\n    uint64_t length;\n} morloc_packet_header_t;\n\n// Inline threshold: voidstar data <= this size is embedded in packet payload.\n#define MORLOC_INLINE_THRESHOLD (64 * 1024)\n\n// Metadata sub-header in packet metadata sections.\n#define MORLOC_METADATA_TYPE_SCHEMA_STRING 0x01\n#define MORLOC_METADATA_TYPE_XXHASH        0x02\n#define MORLOC_METADATA_HEADER_MAGIC       \"mmh\"\n\ntypedef struct __attribute__((packed)) morloc_metadata_header_s {\n    char magic[3];\n    uint8_t type;\n    uint32_t size;\n} morloc_metadata_header_t;\n\n// ========================================================================\n// Section 5: Expression / eval types\n// ========================================================================\n\ntypedef struct argument_s {\n    char* value;\n    char** fields;\n    char** default_fields;\n    size_t size;\n} argument_t;\n\ntypedef enum {\n    MORLOC_X_DAT,\n    MORLOC_X_APP,\n    MORLOC_X_LAM,\n    MORLOC_X_BND,\n    MORLOC_X_PAT,\n    MORLOC_X_FMT,\n    MORLOC_X_SHOW,\n    MORLOC_X_READ\n} morloc_expression_type;\n\ntypedef enum { APPLY_PATTERN, APPLY_LAMBDA, APPLY_FORMAT } morloc_app_expression_type;\n\ntypedef enum { SELECT_BY_KEY, SELECT_BY_INDEX, SELECT_END } morloc_pattern_type;\n\n// Forward declarations.\ntypedef struct morloc_expression_s morloc_expression_t;\ntypedef struct morloc_app_expression_s morloc_app_expression_t;\ntypedef struct morloc_lam_expression_s morloc_lam_expression_t;\ntypedef struct morloc_data_s morloc_data_t;\ntypedef struct morloc_pattern_s morloc_pattern_t;\n\ntypedef union primitive_u {\n    char*    s;\n    uint8_t  z;\n    bool     b;\n    int8_t   i1;\n    int16_t  i2;\n    int32_t  i4;\n    int64_t  i8;\n    uint8_t  u1;\n    uint16_t u2;\n    uint32_t u4;\n    uint64_t u8;\n    float    f4;\n    double   f8;\n} primitive_t;\n\ntypedef struct morloc_data_array_s {\n    Schema* schema;\n    size_t size;\n    morloc_expression_t** values;\n} morloc_data_array_t;\n\ntypedef struct morloc_data_s {\n    bool is_voidstar;\n    union {\n        primitive_t lit_val;\n        morloc_expression_t** tuple_val;\n        morloc_data_array_t* array_val;\n        void* voidstar;\n    } data;\n} morloc_data_t;\n\ntypedef struct morloc_app_expression_s {\n    morloc_app_expression_type type;\n    union {\n        morloc_pattern_t* pattern;\n        morloc_lam_expression_t* lambda;\n        char** fmt;\n    } function;\n    morloc_expression_t** args;\n    size_t nargs;\n} morloc_app_expression_t;\n\ntypedef struct morloc_lam_expression_s {\n    size_t nargs;\n    char** args;\n    morloc_expression_t* body;\n} morloc_lam_expression_t;\n\ntypedef struct morloc_pattern_s {\n    morloc_pattern_type type;\n    size_t size;\n    union {\n        size_t* indices;\n        char** keys;\n    } fields;\n    morloc_pattern_t** selectors;\n} morloc_pattern_t;\n\ntypedef struct morloc_expression_s {\n    morloc_expression_type type;\n    Schema* schema;\n    union {\n        morloc_app_expression_t* app_expr;\n        morloc_lam_expression_t* lam_expr;\n        char* bnd_expr;\n        char** interpolation;\n        morloc_pattern_t* pattern_expr;\n        morloc_data_t* data_expr;\n        morloc_expression_t* unary_expr;\n    } expr;\n} morloc_expression_t;\n\n// ========================================================================\n// Section 6: Manifest types\n// ========================================================================\n\ntypedef struct {\n    char* lang;\n    char** exec;      // NULL-terminated array\n    char* socket;     // socket basename\n} manifest_pool_t;\n\ntypedef enum {\n    MARG_POS = 0,\n    MARG_OPT,\n    MARG_FLAG,\n    MARG_GRP\n} manifest_arg_kind_t;\n\ntypedef struct manifest_arg_s manifest_arg_t;\n\ntypedef struct {\n    char* key;\n    manifest_arg_t* arg;\n} manifest_grp_entry_t;\n\nstruct manifest_arg_s {\n    manifest_arg_kind_t kind;\n    char** desc;\n    char* metavar;\n    char* type_desc;\n    bool quoted;\n    char short_opt;\n    char* long_opt;\n    char* long_rev;\n    char* default_val;\n    char grp_short;\n    char* grp_long;\n    manifest_grp_entry_t* entries;\n    size_t n_entries;\n};\n\ntypedef struct {\n    char* name;\n    char** desc;\n} manifest_cmd_group_t;\n\ntypedef struct {\n    char* name;\n    bool is_pure;\n    uint32_t mid;\n    size_t pool_index;\n    size_t* needed_pools;\n    size_t n_needed_pools;\n    char** arg_schemas;\n    char* return_schema;\n    char** desc;\n    char* return_type;\n    char** return_desc;\n    manifest_arg_t* args;\n    size_t n_args;\n    morloc_expression_t* expr;\n    char* group;\n} manifest_command_t;\n\ntypedef struct {\n    char* type;\n    char* host;\n    int port;\n    char* socket;\n} manifest_service_t;\n\ntypedef struct {\n    int version;\n    char* name;\n    char* build_dir;\n    manifest_pool_t* pools;\n    size_t n_pools;\n    manifest_command_t* commands;\n    size_t n_commands;\n    manifest_cmd_group_t* groups;\n    size_t n_groups;\n    manifest_service_t* service;\n} manifest_t;\n\n// ========================================================================\n// Section 7: Daemon / HTTP / Router types\n// ========================================================================\n\n// -- Call types --\n\ntypedef struct morloc_call_s {\n    uint32_t midx;\n    uint8_t** args;\n    size_t nargs;\n    int owns_args;\n} morloc_call_t;\n\ntypedef struct client_list_s {\n    int fd;\n    struct client_list_s* next;\n} client_list_t;\n\ntypedef struct language_daemon_s {\n    char* socket_path;\n    char* tmpdir;\n    char* shm_basename;\n    shm_t* shm;\n    size_t shm_default_size;\n    int server_fd;\n    fd_set read_fds;\n    client_list_t* client_fds;\n} language_daemon_t;\n\ntypedef struct morloc_socket_s {\n    char* lang;\n    char** syscmd;\n    char* socket_filename;\n    int pid;\n} morloc_socket_t;\n\n// -- Binding store --\n\ntypedef struct binding_entry_s {\n    uint64_t hash;\n    char* expr;\n    char* artifact_dir;\n    char* type_sig;\n    char** names;\n    size_t n_names;\n} binding_entry_t;\n\ntypedef struct binding_store_s {\n    binding_entry_t* entries;\n    size_t capacity;\n    size_t count;\n    char* base_dir;\n    char* names_path;\n} binding_store_t;\n\n// -- Daemon config and request/response --\n\ntypedef void (*pool_check_fn_t)(morloc_socket_t* sockets, size_t n_pools);\ntypedef bool (*pool_alive_fn_t)(size_t pool_index);\n\ntypedef struct daemon_config_s {\n    const char* unix_socket_path;\n    int tcp_port;\n    int http_port;\n    pool_check_fn_t pool_check_fn;\n    pool_alive_fn_t pool_alive_fn;\n    size_t n_pools;\n    int eval_timeout;\n} daemon_config_t;\n\ntypedef enum {\n    DAEMON_CALL,\n    DAEMON_DISCOVER,\n    DAEMON_HEALTH,\n    DAEMON_EVAL,\n    DAEMON_TYPECHECK,\n    DAEMON_BIND,\n    DAEMON_BINDINGS,\n    DAEMON_UNBIND\n} daemon_method_t;\n\ntypedef struct daemon_request_s {\n    char* id;\n    daemon_method_t method;\n    char* command;\n    char* args_json;\n    char* expr;\n    char* name;\n} daemon_request_t;\n\ntypedef struct daemon_response_s {\n    char* id;\n    bool success;\n    char* result_json;\n    char* error;\n} daemon_response_t;\n\n// -- HTTP types --\n\ntypedef enum {\n    HTTP_GET,\n    HTTP_POST,\n    HTTP_DELETE,\n    HTTP_OPTIONS\n} http_method_t;\n\ntypedef struct http_request_s {\n    http_method_t method;\n    char path[256];\n    char* body;\n    size_t body_len;\n} http_request_t;\n\n// -- Router types --\n\ntypedef struct router_program_s {\n    char* name;\n    char* manifest_path;\n    manifest_t* manifest;\n    pid_t daemon_pid;\n    char daemon_socket[sizeof(((struct sockaddr_un*)0)->sun_path)];\n} router_program_t;\n\ntypedef struct router_s {\n    router_program_t* programs;\n    size_t n_programs;\n    char* fdb_path;\n} router_t;\n\n// ========================================================================\n// Section 8: Pool types\n// ========================================================================\n\ntypedef uint8_t* (*pool_dispatch_fn_t)(\n    uint32_t mid,\n    const uint8_t** args,\n    size_t nargs,\n    void* ctx\n);\n\ntypedef enum {\n    POOL_THREADS,\n    POOL_FORK,\n    POOL_SINGLE\n} pool_concurrency_t;\n\ntypedef struct {\n    pool_dispatch_fn_t local_dispatch;\n    pool_dispatch_fn_t remote_dispatch;\n    void* dispatch_ctx;\n    pool_concurrency_t concurrency;\n    int initial_workers;\n    bool dynamic_scaling;\n    void (*post_fork_child)(void* ctx);\n} pool_config_t;\n\ntypedef struct pool_state_s pool_state_t;\n\n// ========================================================================\n// Section 9: Arrow types\n// ========================================================================\n\n#ifndef ARROW_C_DATA_INTERFACE\n#define ARROW_C_DATA_INTERFACE\n\nstruct ArrowSchema {\n    const char* format;\n    const char* name;\n    const char* metadata;\n    int64_t flags;\n    int64_t n_children;\n    struct ArrowSchema** children;\n    struct ArrowSchema* dictionary;\n    void (*release)(struct ArrowSchema*);\n    void* private_data;\n};\n\nstruct ArrowArray {\n    int64_t length;\n    int64_t null_count;\n    int64_t offset;\n    int64_t n_buffers;\n    int64_t n_children;\n    const void** buffers;\n    struct ArrowArray** children;\n    struct ArrowArray* dictionary;\n    void (*release)(struct ArrowArray*);\n    void* private_data;\n};\n\n#endif // ARROW_C_DATA_INTERFACE\n\n#define ARROW_SHM_MAGIC    0xA770DA7A\n#define ARROW_BUFFER_ALIGN 64\n#define ARROW_ALIGN_UP(x)  (((x) + ARROW_BUFFER_ALIGN - 1) & ~((size_t)ARROW_BUFFER_ALIGN - 1))\n\ntypedef struct arrow_column_desc {\n    morloc_serial_type type;\n    uint64_t length;\n    uint64_t null_count;\n    uint32_t name_offset;\n    uint16_t name_length;\n    uint64_t data_offset;\n    uint64_t data_size;\n} arrow_column_desc_t;\n\ntypedef struct arrow_shm_header {\n    uint32_t magic;\n    uint32_t n_columns;\n    uint64_t n_rows;\n    uint64_t total_size;\n} arrow_shm_header_t;\n\n// ========================================================================\n// Section 10: Slurm / resource types\n// ========================================================================\n\n#define MAX_SLURM_COMMAND_LENGTH 1024\n\ntypedef struct resources_s {\n    int memory; // in GB\n    int time;   // walltime in seconds\n    int cpus;\n    int gpus;\n} resources_t;\n\n// ========================================================================\n// Section 11: Function declarations -- Memory / SHM\n// ========================================================================\n\nshm_t* shinit(const char* shm_basename, size_t volume_index, size_t shm_size, ERRMSG);\nshm_t* shopen(size_t volume_index, ERRMSG);\nbool shclose(ERRMSG);\nvoid shm_set_fallback_dir(const char* dir);\nvoid* shmalloc(size_t size, ERRMSG);\nvoid* shmemcpy(void* src, size_t size, ERRMSG);\nbool shfree(absptr_t ptr, ERRMSG);\nbool shincref(absptr_t ptr, ERRMSG);\nbool shfree_by_schema(absptr_t ptr, const Schema* schema, ERRMSG);\nvoid* shcalloc(size_t nmemb, size_t size, ERRMSG);\nvoid* shrealloc(void* ptr, size_t size, ERRMSG);\nsize_t total_shm_size(void);\nvolptr_t rel2vol(relptr_t ptr, ERRMSG);\nabsptr_t rel2abs(relptr_t ptr, ERRMSG);\n\n// Convenience: resolve a relptr, using base_ptr if available (no SHM lookup needed).\nstatic inline void* resolve_relptr(relptr_t relptr, const void* base_ptr, ERRMSG) {\n    if (base_ptr) {\n        return (char*)base_ptr + relptr;\n    }\n    return rel2abs(relptr, errmsg_);\n}\nrelptr_t vol2rel(volptr_t ptr, shm_t* shm);\nabsptr_t vol2abs(volptr_t ptr, shm_t* shm);\nrelptr_t abs2rel(absptr_t ptr, ERRMSG);\nshm_t* abs2shm(absptr_t ptr, ERRMSG);\nblock_header_t* abs2blk(void* ptr, ERRMSG);\n\n// ========================================================================\n// Section 12: Function declarations -- Schema\n// ========================================================================\n\nSchema* parse_schema(const char* schema, ERRMSG);\nchar* schema_to_string(const Schema* schema);\nvoid* get_ptr(const Schema* schema, ERRMSG);\nvoid free_schema(Schema* schema);\nbool schema_is_fixed_width(const Schema* schema);\nsize_t schema_alignment(const Schema* schema);\nsize_t calculate_voidstar_size(const void* data, const Schema* schema, ERRMSG);\n\n// Inline helpers used by language extensions (pymorloc.c, rmorloc.c)\n#define ALIGN_UP(x, align) (((x) + (align) - 1) & ~((size_t)(align) - 1))\n\nstatic inline size_t schema_tensor_ndim(const Schema* schema) {\n    if (schema == NULL || schema->size == 0) return 0;\n    // ndim is stored in offsets[0] for tensor schemas\n    return schema->offsets ? schema->offsets[0] : 0;\n}\n\n// ========================================================================\n// Section 13: Function declarations -- Serialisation (pack/unpack)\n// ========================================================================\n\nint pack(const void* mlc, const char* schema_str, char** mpkptr, size_t* mpk_size, ERRMSG);\nint pack_with_schema(const void* mlc, const Schema* schema, char** mpkptr, size_t* mpk_size, ERRMSG);\nint unpack_with_schema(const char* mpk, size_t mpk_size, const Schema* schema, void** mlcptr, ERRMSG);\n\n// ========================================================================\n// Section 14: Function declarations -- Packets\n// ========================================================================\n\nmorloc_packet_header_t* read_morloc_packet_header(const uint8_t* msg, ERRMSG);\nbool packet_is_ping(const uint8_t* packet, ERRMSG);\nbool packet_is_local_call(const uint8_t* packet, ERRMSG);\nbool packet_is_remote_call(const uint8_t* packet, ERRMSG);\nsize_t morloc_packet_size_from_header(const morloc_packet_header_t* header);\nsize_t morloc_packet_size(const uint8_t* packet, ERRMSG);\nuint8_t* return_ping(const uint8_t* packet, ERRMSG);\nuint8_t* make_ping_packet(void);\nuint8_t* make_standard_data_packet(relptr_t ptr, const Schema* schema);\nuint8_t* make_arrow_data_packet(relptr_t ptr, const Schema* schema);\nuint8_t* make_mpk_data_packet(const char* mpk_filename, const Schema* schema);\nuint8_t* make_data_packet_from_mpk(const char* mpk, size_t mpk_size, const Schema* schema);\nint get_data_packet_as_mpk(const uint8_t* packet, const Schema* schema, char** mpk_out, size_t* mpk_size_out, ERRMSG);\nchar* read_schema_from_packet_meta(const uint8_t* packet, ERRMSG);\nuint8_t* make_fail_packet(const char* failure_message);\nchar* get_morloc_data_packet_error_message(const uint8_t* data, ERRMSG);\nuint8_t* get_morloc_data_packet_value(const uint8_t* data, const Schema* schema, ERRMSG);\nuint8_t* make_morloc_local_call_packet(uint32_t midx, const uint8_t** arg_packets, size_t nargs, ERRMSG);\nuint8_t* make_morloc_remote_call_packet(uint32_t midx, const uint8_t** arg_packets, size_t nargs, ERRMSG);\nmorloc_call_t* read_morloc_call_packet(const uint8_t* packet, ERRMSG);\nvoid free_morloc_call(morloc_call_t* call);\nint print_morloc_data_packet(const uint8_t* packet, const Schema* schema, ERRMSG);\nint flatten_voidstar_to_buffer(const void* data, const Schema* schema, uint8_t** out_buf, size_t* out_size, ERRMSG);\nuint8_t* make_data_packet_auto(void* voidstar, relptr_t relptr, const Schema* schema, ERRMSG);\nint adjust_voidstar_relptrs(void* data, const Schema* schema, relptr_t base_rel, ERRMSG);\nvoid* read_voidstar_binary(const uint8_t* blob, size_t blob_size, const Schema* schema, ERRMSG);\nbool parse_morloc_call_arguments(uint8_t* packet, uint8_t** args, size_t* nargs, ERRMSG);\nbool hash_morloc_packet(const uint8_t* packet, const Schema* schema, uint64_t seed, uint64_t* hash, ERRMSG);\n\n// ========================================================================\n// Section 15: Function declarations -- Printing / output\n// ========================================================================\n\nchar* quoted(const char* input);\nbool print_voidstar(const void* voidstar, const Schema* schema, ERRMSG);\nbool pretty_print_voidstar(const void* voidstar, const Schema* schema, ERRMSG);\nbool print_arrow_as_json(const void* data, ERRMSG);\nbool print_arrow_as_table(const void* data, ERRMSG);\nbool print_hex_dump(const uint8_t* data, size_t size, ERRMSG);\nchar* voidstar_to_json_string(const void* voidstar, const Schema* schema, ERRMSG);\n\n// ========================================================================\n// Section 16: Function declarations -- Daemon / socket communication\n// ========================================================================\n\nvoid close_socket(int socket_id);\nvoid close_daemon(language_daemon_t** daemon_ptr);\nlanguage_daemon_t* start_daemon(\n    const char* socket_path, const char* tmpdir,\n    const char* shm_basename, size_t shm_default_size, ERRMSG);\nuint8_t* stream_from_client_wait(int client_fd, int pselect_timeout_us, int recv_timeout_us, ERRMSG);\nuint8_t* stream_from_client(int client_fd, ERRMSG);\nuint8_t* send_and_receive_over_socket_wait(\n    const char* socket_path, const uint8_t* packet,\n    int pselect_timeout_us, int recv_timeout_us, ERRMSG);\nuint8_t* send_and_receive_over_socket(const char* socket_path, const uint8_t* packet, ERRMSG);\nsize_t send_packet_to_foreign_server(int client_fd, uint8_t* packet, ERRMSG);\nint wait_for_client_with_timeout(language_daemon_t* daemon, int timeout_us, ERRMSG);\nint wait_for_client(language_daemon_t* daemon, ERRMSG);\n\n// Daemon event loop and dispatch.\nvoid daemon_run(daemon_config_t* config, manifest_t* manifest,\n                morloc_socket_t* sockets, size_t n_pools,\n                const char* shm_basename);\ndaemon_response_t* daemon_dispatch(manifest_t* manifest,\n                                   daemon_request_t* request,\n                                   morloc_socket_t* sockets,\n                                   const char* shm_basename);\ndaemon_request_t* daemon_parse_request(const char* json, size_t len, ERRMSG);\ndaemon_response_t* daemon_parse_response(const char* json, size_t len, ERRMSG);\nchar* daemon_serialize_response(daemon_response_t* response, size_t* out_len);\nchar* daemon_build_discovery(manifest_t* manifest);\nvoid daemon_set_eval_timeout(int timeout_sec);\nvoid daemon_free_request(daemon_request_t* req);\nvoid daemon_free_response(daemon_response_t* resp);\n\n// Binding store (public types only; internal hash table functions are Rust-side).\nbinding_store_t* binding_store_init(const char* base_dir);\nvoid binding_store_free(binding_store_t* store);\n\n// ========================================================================\n// Section 17: Function declarations -- HTTP\n// ========================================================================\n\nhttp_request_t* http_parse_request(int fd, ERRMSG);\nbool http_write_response(int fd, int status, const char* content_type,\n                         const char* body, size_t body_len);\ndaemon_request_t* http_to_daemon_request(http_request_t* req, ERRMSG);\nvoid http_free_request(http_request_t* req);\n\n// ========================================================================\n// Section 18: Function declarations -- Router\n// ========================================================================\n\nrouter_t* router_init(const char* fdb_path, ERRMSG);\nvoid router_run(daemon_config_t* config, router_t* router);\nbool router_start_program(router_program_t* prog, ERRMSG);\ndaemon_response_t* router_forward(router_t* router, const char* program,\n                                  daemon_request_t* request, ERRMSG);\nchar* router_build_discovery(router_t* router);\nvoid router_free(router_t* router);\n\n// ========================================================================\n// Section 19: Function declarations -- Pool\n// ========================================================================\n\nint pool_main(int argc, char** argv, pool_config_t* config);\nuint8_t* pool_dispatch_packet(\n    const uint8_t* packet,\n    pool_dispatch_fn_t local_dispatch,\n    pool_dispatch_fn_t remote_dispatch,\n    void* ctx);\nvoid pool_mark_busy(void);\nvoid pool_mark_idle(void);\n\n// ========================================================================\n// Section 20: Function declarations -- Arrow\n// ========================================================================\n\nsize_t arrow_element_size(morloc_serial_type type);\nconst char* arrow_format_string(morloc_serial_type type);\nmorloc_serial_type arrow_format_to_type(const char* format);\nrelptr_t arrow_to_shm(const struct ArrowArray* array, const struct ArrowSchema* schema, ERRMSG);\nint arrow_validate(const arrow_shm_header_t* header, const Schema* schema, ERRMSG);\nconst void* arrow_column_data(const arrow_shm_header_t* header, uint32_t col_index);\nconst arrow_column_desc_t* arrow_column_desc(const arrow_shm_header_t* header, uint32_t col_index);\nconst char* arrow_column_name(const arrow_shm_header_t* header, uint32_t col_index);\nint arrow_from_shm(const arrow_shm_header_t* header,\n                   struct ArrowSchema* out_schema,\n                   struct ArrowArray* out_array, ERRMSG);\n\n// ========================================================================\n// Section 21: Function declarations -- Cache\n// ========================================================================\n\nchar* put_cache_packet(const uint8_t* voidstar, const Schema* schema, uint64_t key, const char* cache_path, ERRMSG);\nuint8_t* get_cache_packet(uint64_t key, const char* cache_path, ERRMSG);\nbool del_cache_packet(uint64_t key, const char* cache_path, ERRMSG);\nchar* check_cache_packet(uint64_t key, const char* cache_path, ERRMSG);\n\n// ========================================================================\n// Section 22: Function declarations -- CLI / argument parsing\n// ========================================================================\n\nargument_t* initialize_positional(char* value);\nargument_t* initialize_unrolled(size_t size, char* default_value, char** fields, char** default_fields);\nvoid free_argument_t(argument_t* arg);\nuint8_t* parse_cli_data_argument(uint8_t* dest, const argument_t* arg, const Schema* schema, ERRMSG);\nuint8_t* make_call_packet_from_cli(\n    uint8_t* dest, uint32_t mid,\n    argument_t** args, char** arg_schema_strs, ERRMSG);\nvoid* load_morloc_data_file(const char* path, uint8_t* data, size_t data_size, const Schema* schema, ERRMSG);\n\n// ========================================================================\n// Section 23: Function declarations -- Expression evaluation\n// ========================================================================\n\nmorloc_expression_t* make_morloc_bound_var(const char* schema_str, char* varname, ERRMSG);\nmorloc_expression_t* make_morloc_literal(const char* schema_str, primitive_t lit, ERRMSG);\nmorloc_expression_t* make_morloc_pattern(const char* schema_str, morloc_pattern_t* pattern, ERRMSG);\nmorloc_pattern_t* make_morloc_pattern_end(void);\nabsptr_t morloc_eval(\n    morloc_expression_t* expr, Schema* return_schema,\n    uint8_t** arg_voidstar, Schema** arg_schemas, size_t nargs, ERRMSG);\n\n// ========================================================================\n// Section 24: Function declarations -- Manifest\n// ========================================================================\n\nmanifest_t* parse_manifest(const char* text, ERRMSG);\nmanifest_t* read_manifest(const char* path, ERRMSG);\nvoid free_manifest(manifest_t* manifest);\nmorloc_expression_t* build_manifest_expr(const char* json_str, ERRMSG);\nchar* manifest_to_discovery_json(const manifest_t* manifest);\n\n// ========================================================================\n// Section 25: Function declarations -- Intrinsics\n// ========================================================================\n\nint mlc_save(const absptr_t data, const Schema* schema, const char* path, ERRMSG);\nint mlc_save_json(const absptr_t data, const Schema* schema, const char* path, ERRMSG);\nint mlc_save_voidstar(const absptr_t data, const Schema* schema, const char* path, ERRMSG);\nvoid* mlc_load(const char* path, const Schema* schema, ERRMSG);\nchar* mlc_hash(const absptr_t data, const Schema* schema, ERRMSG);\nchar* mlc_show(const absptr_t data, const Schema* schema, ERRMSG);\nvoid* mlc_read(const char* json_str, const Schema* schema, ERRMSG);\nrelptr_t write_voidstar_binary(int fd, const void* data, const Schema* schema, ERRMSG);\n\n// ========================================================================\n// Section 26: Function declarations -- Slurm\n// ========================================================================\n\nsize_t parse_slurm_time(const char* time_str, ERRMSG);\nchar* write_slurm_time(int seconds);\nbool slurm_job_is_complete(uint32_t job_id);\nuint32_t submit_morloc_slurm_job(\n    const char* nexus_path,\n    const char* socket_basename,\n    const char* call_packet_filename,\n    const char* result_cache_filename,\n    const char* output_filename,\n    const char* error_filename,\n    const resources_t* resources,\n    ERRMSG);\nuint8_t* remote_call(\n    int midx,\n    const char* socket_basename,\n    const char* cache_path,\n    const resources_t* resources,\n    const uint8_t** arg_packets,\n    size_t nargs,\n    ERRMSG);\n\n// ========================================================================\n// Section 27: Function declarations -- Utility\n// ========================================================================\n\nvoid hex(const void* ptr, size_t size);\nbool file_exists(const char* filename);\nint mkdir_p(const char* path, ERRMSG);\nvoid delete_directory(const char* path);\nbool has_suffix(const char* x, const char* suffix);\nint write_atomic(const char* filename, const uint8_t* data, size_t size, ERRMSG);\nint write_binary_fd(int fd, const char* buf, size_t count, ERRMSG);\nint print_binary(const char* buf, size_t count, ERRMSG);\nuint8_t* read_binary_fd(FILE* file, size_t* file_size, ERRMSG);\nuint8_t* read_binary_file(const char* filename, size_t* file_size, ERRMSG);\n\n// ========================================================================\n// Section 28: Function declarations -- Hashing\n// ========================================================================\n\nuint64_t morloc_xxh64(const void* input, size_t length, uint64_t seed);\n\n// ========================================================================\n// Section 29: Function declarations -- JSON reader\n// ========================================================================\n\nuint8_t* read_json_with_schema(uint8_t* voidstar, char* json_data, const Schema* schema, ERRMSG);\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif // __MORLOC_H__\n"
  },
  {
    "path": "data/rust/.gitignore",
    "content": "target/\n"
  },
  {
    "path": "data/rust/Cargo.toml",
    "content": "[workspace]\nmembers = [\"morloc-manifest\", \"morloc-runtime\", \"morloc-nexus\", \"morloc-manager\"]\nresolver = \"2\"\n\n[workspace.dependencies]\nlibc = \"0.2\"\nserde = { version = \"1\", features = [\"derive\"] }\nserde_json = \"1\"\nrmp-serde = \"1\"\ntwox-hash = \"2\"\nnix = { version = \"0.29\", features = [\"signal\", \"socket\", \"mman\", \"process\", \"fs\"] }\nclap = { version = \"4\", features = [\"derive\"] }\nthiserror = \"2\"\n\n[profile.release]\nopt-level = 2\nlto = \"thin\"\n"
  },
  {
    "path": "data/rust/morloc-manager/Cargo.toml",
    "content": "[package]\nname = \"morloc-manager\"\nversion = \"0.23.1\"\nedition = \"2021\"\ndescription = \"Container lifecycle manager for Morloc\"\n\n[[bin]]\nname = \"morloc-manager\"\npath = \"src/main.rs\"\n\n[dependencies]\nserde = { workspace = true }\nserde_json = { workspace = true }\nnix = { version = \"0.29\", features = [\"signal\", \"socket\", \"mman\", \"process\", \"fs\", \"user\"] }\nclap = { workspace = true }\nthiserror = { workspace = true }\nsha2 = \"0.10\"\nchrono = { version = \"0.4\", features = [\"serde\"] }\ndirs = \"6\"\n\n[dev-dependencies]\ntempfile = \"3\"\n"
  },
  {
    "path": "data/rust/morloc-manager/src/config.rs",
    "content": "use std::fs;\nuse std::os::unix::fs::{OpenOptionsExt, PermissionsExt};\nuse std::path::{Path, PathBuf};\nuse std::process::Command as StdCommand;\n\nuse crate::error::{ManagerError, Result};\nuse crate::types::*;\n\n// ======================================================================\n// Path utilities\n// ======================================================================\n\npub fn config_dir(scope: Scope) -> PathBuf {\n    match scope {\n        Scope::Local => dirs::config_dir()\n            .unwrap_or_else(|| PathBuf::from(\"~/.config\"))\n            .join(\"morloc\"),\n        Scope::System => PathBuf::from(\"/etc/morloc\"),\n    }\n}\n\npub fn config_path(scope: Scope) -> PathBuf {\n    config_dir(scope).join(\"config.json\")\n}\n\npub fn data_dir(scope: Scope) -> PathBuf {\n    match scope {\n        Scope::Local => dirs::data_dir()\n            .unwrap_or_else(|| PathBuf::from(\"~/.local/share\"))\n            .join(\"morloc\"),\n        Scope::System => PathBuf::from(\"/usr/local/share/morloc\"),\n    }\n}\n\n// Environment paths\n\npub fn env_config_dir(scope: Scope, name: &str) -> PathBuf {\n    config_dir(scope).join(\"environments\").join(name)\n}\n\npub fn env_config_path(scope: Scope, name: &str) -> PathBuf {\n    env_config_dir(scope, name).join(\"env.json\")\n}\n\npub fn env_dockerfile_path(scope: Scope, name: &str) -> PathBuf {\n    env_config_dir(scope, name).join(\"Dockerfile\")\n}\n\npub fn env_flags_path(scope: Scope, name: &str) -> PathBuf {\n    env_config_dir(scope, name).join(\"env.flags\")\n}\n\npub fn env_data_dir(scope: Scope, name: &str) -> PathBuf {\n    data_dir(scope).join(\"environments\").join(name)\n}\n\n// ======================================================================\n// Reading configuration\n// ======================================================================\n\npub fn read_config<T: serde::de::DeserializeOwned>(path: &Path) -> Result<T> {\n    let bytes = fs::read(path).map_err(|e| {\n        if e.kind() == std::io::ErrorKind::PermissionDenied {\n            ManagerError::ConfigPermissionDenied(path.display().to_string())\n        } else {\n            ManagerError::ConfigNotFound(path.display().to_string())\n        }\n    })?;\n    serde_json::from_slice(&bytes).map_err(|e| ManagerError::ConfigParseError {\n        path: path.display().to_string(),\n        msg: e.to_string(),\n    })\n}\n\npub fn read_active_config() -> Option<Config> {\n    let local_path = config_path(Scope::Local);\n    if let Ok(cfg) = read_config::<Config>(&local_path) {\n        return Some(cfg);\n    }\n    let system_path = config_path(Scope::System);\n    read_config::<Config>(&system_path).ok()\n}\n\npub fn read_env_config(scope: Scope, name: &str) -> Result<EnvironmentConfig> {\n    read_config(&env_config_path(scope, name))\n}\n\n// ======================================================================\n// Writing configuration\n// ======================================================================\n\npub fn write_config<T: serde::Serialize>(path: &Path, val: &T) -> Result<()> {\n    let dir = path.parent().unwrap();\n    fs::create_dir_all(dir).map_err(|e| ManagerError::ConfigParseError {\n        path: path.display().to_string(),\n        msg: e.to_string(),\n    })?;\n    best_effort_chmod(dir, 0o755);\n\n    let lock_path = format!(\"{}.lock\", path.display());\n    with_file_lock(&lock_path, || {\n        // Atomic write: temp file then rename\n        let tmp_path = path.with_extension(\"tmp\");\n        let json = serde_json::to_vec(val).map_err(|e| ManagerError::ConfigParseError {\n            path: path.display().to_string(),\n            msg: e.to_string(),\n        })?;\n        fs::write(&tmp_path, &json).map_err(|e| ManagerError::ConfigParseError {\n            path: path.display().to_string(),\n            msg: e.to_string(),\n        })?;\n        fs::rename(&tmp_path, path).map_err(|e| ManagerError::ConfigParseError {\n            path: path.display().to_string(),\n            msg: e.to_string(),\n        })?;\n        best_effort_chmod(path, 0o644);\n        Ok(())\n    })\n}\n\npub fn write_env_config(scope: Scope, name: &str, ec: &EnvironmentConfig) -> Result<()> {\n    write_config(&env_config_path(scope, name), ec)\n}\n\n// ======================================================================\n// Scope utilities\n// ======================================================================\n\n/// Find which scope an environment lives in. Checks local first, then system.\npub fn find_env_scope(name: &str) -> Result<Scope> {\n    let local_path = env_config_path(Scope::Local, name);\n    if local_path.is_file() {\n        return Ok(Scope::Local);\n    }\n    let sys_path = env_config_path(Scope::System, name);\n    if sys_path.is_file() {\n        return Ok(Scope::System);\n    }\n    Err(ManagerError::EnvironmentNotFound(name.to_string()))\n}\n\n/// List environment names in a given scope.\npub fn list_env_names(scope: Scope) -> Vec<String> {\n    let env_dir = config_dir(scope).join(\"environments\");\n    if !env_dir.is_dir() {\n        return Vec::new();\n    }\n    let Ok(entries) = fs::read_dir(&env_dir) else {\n        return Vec::new();\n    };\n    entries\n        .filter_map(|e| e.ok())\n        .filter(|e| e.path().join(\"env.json\").is_file())\n        .filter_map(|e| e.file_name().into_string().ok())\n        .collect()\n}\n\n\n// ======================================================================\n// Flags files\n// ======================================================================\n\npub fn read_flags_file(path: &Path) -> Vec<String> {\n    let Ok(contents) = fs::read_to_string(path) else {\n        return Vec::new();\n    };\n    contents\n        .lines()\n        .map(|line| line.trim())\n        .filter(|line| !line.is_empty() && !line.starts_with('#'))\n        .flat_map(shell_expand_line)\n        .collect()\n}\n\n/// Expand a single flagfile line through the shell, getting glob expansion,\n/// environment variable expansion, tilde expansion, and quote handling.\n/// Falls back to simple whitespace splitting if the shell invocation fails.\nfn shell_expand_line(line: &str) -> Vec<String> {\n    let output = StdCommand::new(\"sh\")\n        .args([\"-c\", &format!(\"printf '%s\\\\0' {}\", line)])\n        .output();\n    match output {\n        Ok(out) if out.status.success() => {\n            let stdout = String::from_utf8_lossy(&out.stdout);\n            let tokens: Vec<String> = stdout\n                .split('\\0')\n                .filter(|s| !s.is_empty())\n                .map(|s| s.to_string())\n                .collect();\n            if tokens.is_empty() {\n                line.split_whitespace().map(|s| s.to_string()).collect()\n            } else {\n                tokens\n            }\n        }\n        _ => line.split_whitespace().map(|s| s.to_string()).collect(),\n    }\n}\n\n/// Read flags file preserving one line per entry (for display).\npub fn read_flags_file_lines(path: &Path) -> Vec<String> {\n    let Ok(contents) = fs::read_to_string(path) else {\n        return Vec::new();\n    };\n    contents\n        .lines()\n        .map(|line| line.trim())\n        .filter(|line| !line.is_empty() && !line.starts_with('#'))\n        .map(|s| s.to_string())\n        .collect()\n}\n\n// ======================================================================\n// File locking\n// ======================================================================\n\nfn with_file_lock<F, T>(lock_path: &str, action: F) -> Result<T>\nwhere\n    F: FnOnce() -> Result<T>,\n{\n    if let Some(parent) = Path::new(lock_path).parent() {\n        let _ = fs::create_dir_all(parent);\n    }\n    let file = std::fs::OpenOptions::new()\n        .write(true)\n        .create(true)\n        .truncate(false)\n        .mode(0o644)\n        .open(lock_path)\n        .map_err(|e| {\n            if e.kind() == std::io::ErrorKind::PermissionDenied {\n                ManagerError::ConfigPermissionDenied(format!(\n                    \"{}. Use sudo for system-scope operations\", lock_path\n                ))\n            } else {\n                ManagerError::ConfigParseError {\n                    path: lock_path.to_string(),\n                    msg: format!(\"Failed to open lock file: {e}\"),\n                }\n            }\n        })?;\n\n    use nix::fcntl::FlockArg;\n    let locked = nix::fcntl::Flock::lock(file, FlockArg::LockExclusive).map_err(\n        |(_file, errno)| ManagerError::ConfigParseError {\n            path: lock_path.to_string(),\n            msg: format!(\"Failed to acquire lock: {errno}\"),\n        },\n    )?;\n\n    let result = action();\n\n    // Lock is released when Flock is dropped\n    drop(locked);\n    result\n}\n\n// ======================================================================\n// Internal\n// ======================================================================\n\nfn best_effort_chmod(path: &Path, mode: u32) {\n    let _ = fs::set_permissions(path, fs::Permissions::from_mode(mode));\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/container.rs",
    "content": "use std::io;\nuse std::process::{Command, ExitStatus, Stdio};\n\nuse crate::types::ContainerEngine;\n\n// ======================================================================\n// Configuration records\n// ======================================================================\n\n#[derive(Debug, Clone)]\npub struct RunConfig {\n    pub image: String,\n    pub bind_mounts: Vec<(String, String)>,\n    pub ports: Vec<(u16, u16)>,\n    pub env: Vec<(String, String)>,\n    pub read_only: bool,\n    pub interactive: bool,\n    pub remove_after: bool,\n    pub name: Option<String>,\n    pub shm_size: Option<String>,\n    pub command: Option<Vec<String>>,\n    pub work_dir: Option<String>,\n    pub selinux_suffix: String,\n    pub extra_flags: Vec<String>,\n}\n\nimpl RunConfig {\n    pub fn new(image: &str) -> Self {\n        Self {\n            image: image.to_string(),\n            bind_mounts: Vec::new(),\n            ports: Vec::new(),\n            env: Vec::new(),\n            read_only: false,\n            interactive: false,\n            remove_after: true,\n            name: None,\n            shm_size: None,\n            command: None,\n            work_dir: None,\n            selinux_suffix: String::new(),\n            extra_flags: Vec::new(),\n        }\n    }\n}\n\n#[derive(Debug, Clone)]\npub struct BuildConfig {\n    pub dockerfile: String,\n    pub context: String,\n    pub tag: String,\n    pub build_args: Vec<(String, String)>,\n}\n\n// ======================================================================\n// Engine detection\n// ======================================================================\n\npub fn engine_executable(engine: ContainerEngine) -> &'static str {\n    match engine {\n        ContainerEngine::Docker => \"docker\",\n        ContainerEngine::Podman => \"podman\",\n    }\n}\n\n// ======================================================================\n// Operations\n// ======================================================================\n\npub fn container_run(engine: ContainerEngine, cfg: &RunConfig) -> (ExitStatus, String, String) {\n    let exe = engine_executable(engine);\n    let extra = engine_specific_run_flags_io(engine);\n    let args = build_run_args(engine, &extra, cfg);\n    run_process(exe, &args)\n}\n\n/// Like `container_run` but captures both stdout and stderr (no streaming).\npub fn container_run_quiet(engine: ContainerEngine, cfg: &RunConfig) -> (ExitStatus, String, String) {\n    let exe = engine_executable(engine);\n    let extra = engine_specific_run_flags_io(engine);\n    let args = build_run_args(engine, &extra, cfg);\n    run_process_quiet(exe, &args)\n}\n\npub fn container_run_passthrough(\n    engine: ContainerEngine,\n    verbose: bool,\n    shell: bool,\n    cfg: &RunConfig,\n) -> ExitStatus {\n    let exe = engine_executable(engine);\n    let extra = engine_specific_run_flags_io(engine);\n    let args = build_run_args(engine, &extra, cfg);\n\n    if verbose || shell {\n        let quoted: Vec<String> = args\n            .iter()\n            .map(|a| {\n                if a.contains(' ') {\n                    format!(\"'{a}'\")\n                } else {\n                    a.clone()\n                }\n            })\n            .collect();\n        eprintln!(\"[morloc-manager] {exe} {}\", quoted.join(\" \"));\n    }\n\n    Command::new(exe)\n        .args(&args)\n        .stdin(Stdio::inherit())\n        .stdout(Stdio::inherit())\n        .stderr(Stdio::inherit())\n        .status()\n        .unwrap_or_else(|_| std::process::exit(1))\n}\n\npub fn container_build(engine: ContainerEngine, cfg: &BuildConfig) -> (ExitStatus, String, String) {\n    let exe = engine_executable(engine);\n    let args = build_build_args(cfg);\n    run_process(exe, &args)\n}\n\npub fn container_pull(engine: ContainerEngine, image: &str) -> (ExitStatus, String, String) {\n    let exe = engine_executable(engine);\n    run_process(exe, &[\"pull\".to_string(), image.to_string()])\n}\n\n/// Build a container image with all output (stdout+stderr) redirected to stderr.\n/// Use for IO () commands where stdout must stay clean.\npub fn container_build_visible(engine: ContainerEngine, cfg: &BuildConfig) -> ExitStatus {\n    let exe = engine_executable(engine);\n    let args = build_build_args(cfg);\n    run_process_to_stderr(exe, &args)\n}\n\n/// Pull a container image with all output (stdout+stderr) redirected to stderr.\n/// Use for IO () commands where stdout must stay clean.\npub fn container_pull_visible(engine: ContainerEngine, image: &str) -> ExitStatus {\n    let exe = engine_executable(engine);\n    run_process_to_stderr(exe, &[\"pull\".to_string(), image.to_string()])\n}\n\npub fn image_exists_locally(engine: ContainerEngine, image: &str) -> bool {\n    let exe = engine_executable(engine);\n    Command::new(exe)\n        .args([\"image\", \"inspect\", image])\n        .stdout(Stdio::null())\n        .stderr(Stdio::null())\n        .status()\n        .map(|s| s.success())\n        .unwrap_or(false)\n}\n\n/// Run `image inspect` and return the stderr if it fails.\n/// Returns None on success, Some(stderr) on failure.\npub fn image_inspect_stderr(engine: ContainerEngine, image: &str) -> Option<String> {\n    let exe = engine_executable(engine);\n    let output = Command::new(exe)\n        .args([\"image\", \"inspect\", image])\n        .stdout(Stdio::null())\n        .output()\n        .ok()?;\n    if output.status.success() {\n        None\n    } else {\n        Some(String::from_utf8_lossy(&output.stderr).to_string())\n    }\n}\n\n/// Result of checking whether a remote image exists.\npub enum RemoteImageStatus {\n    /// The image exists on the registry.\n    Exists,\n    /// The registry was reached but the image/tag was not found.\n    NotFound,\n    /// The check failed for an unknown reason (network, auth, etc).\n    /// Contains the stderr output from the container engine.\n    Unknown(String),\n}\n\npub fn check_remote_image(engine: ContainerEngine, image: &str) -> RemoteImageStatus {\n    let exe = engine_executable(engine);\n    let output = Command::new(exe)\n        .args([\"manifest\", \"inspect\", image])\n        .stdout(Stdio::null())\n        .output();\n\n    match output {\n        Ok(o) if o.status.success() => RemoteImageStatus::Exists,\n        Ok(o) => {\n            let stderr = String::from_utf8_lossy(&o.stderr).to_string();\n            let lower = stderr.to_lowercase();\n            // \"manifest unknown\" / \"not found\" / \"name unknown\" indicate\n            // the registry was reachable but the image doesn't exist.\n            if lower.contains(\"manifest unknown\")\n                || lower.contains(\"not found\")\n                || lower.contains(\"name unknown\")\n            {\n                RemoteImageStatus::NotFound\n            } else {\n                RemoteImageStatus::Unknown(stderr)\n            }\n        }\n        Err(e) => RemoteImageStatus::Unknown(format!(\"Failed to execute {exe}: {e}\")),\n    }\n}\n\npub fn container_stop(engine: ContainerEngine, name_or_id: &str) -> (ExitStatus, String) {\n    let exe = engine_executable(engine);\n    let (code, _, err) = run_process(exe, &[\"stop\".to_string(), name_or_id.to_string()]);\n    (code, err)\n}\n\npub fn container_remove(engine: ContainerEngine, name_or_id: &str) -> ExitStatus {\n    let exe = engine_executable(engine);\n    let (code, _, _) = run_process(exe, &[\"rm\".to_string(), \"-f\".to_string(), name_or_id.to_string()]);\n    code\n}\n\n/// Quiet container removal: suppresses stderr (for pre-emptive cleanup).\npub fn container_remove_quiet(engine: ContainerEngine, name_or_id: &str) -> ExitStatus {\n    let exe = engine_executable(engine);\n    let (code, _, _) = run_process_quiet(exe, &[\"rm\".to_string(), \"-f\".to_string(), name_or_id.to_string()]);\n    code\n}\n\n/// Check whether a container with this name exists (running or stopped).\npub fn container_exists(engine: ContainerEngine, name: &str) -> bool {\n    let exe = engine_executable(engine);\n    Command::new(exe)\n        .args([\"container\", \"inspect\", name])\n        .stdout(Stdio::null())\n        .stderr(Stdio::null())\n        .status()\n        .map(|s| s.success())\n        .unwrap_or(false)\n}\n\npub fn remove_image(engine: ContainerEngine, tag: &str) -> bool {\n    let exe = engine_executable(engine);\n    let (status, _, _) = run_process(exe, &[\"rmi\".to_string(), tag.to_string()]);\n    status.success()\n}\n\n// ======================================================================\n// CLI argument construction\n// ======================================================================\n\npub fn build_run_args(\n    engine: ContainerEngine,\n    extra_engine_flags: &[String],\n    cfg: &RunConfig,\n) -> Vec<String> {\n    let mut args = vec![\"run\".to_string()];\n    args.extend(extra_engine_flags.iter().cloned());\n\n    if cfg.remove_after {\n        args.push(\"--rm\".to_string());\n    }\n    if cfg.read_only {\n        args.push(\"--read-only\".to_string());\n        // Docker does not auto-mount a tmpfs at /tmp when --read-only is used\n        // (podman does). Pool daemons need a writable /tmp for temp files.\n        if engine == ContainerEngine::Docker {\n            args.push(\"--tmpfs\".to_string());\n            args.push(\"/tmp\".to_string());\n        }\n    }\n    // Always attach stdin so piped input works; only allocate a TTY for\n    // interactive (shell) sessions.\n    args.push(\"-i\".to_string());\n    if cfg.interactive {\n        args.push(\"-t\".to_string());\n    }\n    if let Some(ref n) = cfg.name {\n        args.push(\"--name\".to_string());\n        args.push(n.clone());\n    }\n    if let Some(ref s) = cfg.shm_size {\n        args.push(\"--shm-size\".to_string());\n        args.push(s.clone());\n    }\n    if let Some(ref w) = cfg.work_dir {\n        args.push(\"-w\".to_string());\n        args.push(w.clone());\n    }\n    for (host, container) in &cfg.bind_mounts {\n        args.push(\"-v\".to_string());\n        args.push(format!(\"{host}:{container}{}\", cfg.selinux_suffix));\n    }\n    for (host_port, container_port) in &cfg.ports {\n        args.push(\"-p\".to_string());\n        args.push(format!(\"{host_port}:{container_port}\"));\n    }\n    for (key, val) in &cfg.env {\n        args.push(\"-e\".to_string());\n        args.push(format!(\"{key}={val}\"));\n    }\n    args.extend(cfg.extra_flags.iter().cloned());\n    args.push(cfg.image.clone());\n    if let Some(ref cmd) = cfg.command {\n        args.extend(cmd.iter().cloned());\n    }\n    args\n}\n\npub fn engine_specific_run_flags_io(engine: ContainerEngine) -> Vec<String> {\n    let uid = nix::unistd::getuid();\n    match engine {\n        ContainerEngine::Podman => {\n            if uid.is_root() {\n                Vec::new()\n            } else {\n                vec![\"--userns=keep-id\".to_string()]\n            }\n        }\n        ContainerEngine::Docker => {\n            if uid.is_root() {\n                Vec::new()\n            } else {\n                let gid = nix::unistd::getgid();\n                vec![\"--user\".to_string(), format!(\"{}:{}\", uid, gid)]\n            }\n        }\n    }\n}\n\n/// Pure version for testing.\n#[cfg(test)]\npub fn engine_specific_run_flags(engine: ContainerEngine) -> Vec<String> {\n    match engine {\n        ContainerEngine::Podman => vec![\"--userns=keep-id\".to_string()],\n        ContainerEngine::Docker => Vec::new(),\n    }\n}\n\npub fn build_build_args(cfg: &BuildConfig) -> Vec<String> {\n    let mut args = vec![\n        \"build\".to_string(),\n        \"-f\".to_string(),\n        cfg.dockerfile.clone(),\n        \"-t\".to_string(),\n        cfg.tag.clone(),\n    ];\n    for (key, val) in &cfg.build_args {\n        args.push(\"--build-arg\".to_string());\n        args.push(format!(\"{key}={val}\"));\n    }\n    args.push(cfg.context.clone());\n    args\n}\n\n// ======================================================================\n// Process execution\n// ======================================================================\n\n/// Run a process with both stdout and stderr redirected to our stderr.\n/// Returns only the exit status. Use for IO () commands where morloc-manager's\n/// stdout must stay clean but the user should see all container output.\nfn run_process_to_stderr(exe: &str, args: &[String]) -> ExitStatus {\n    let mut child = Command::new(exe)\n        .args(args)\n        .stdin(Stdio::null())\n        .stdout(Stdio::piped())\n        .stderr(Stdio::inherit())\n        .spawn()\n        .unwrap_or_else(|e| {\n            eprintln!(\"Failed to execute {exe}: {e}\");\n            std::process::exit(1);\n        });\n    // Pump child stdout -> our stderr\n    if let Some(mut child_stdout) = child.stdout.take() {\n        let stderr = io::stderr();\n        let _ = io::copy(&mut child_stdout, &mut stderr.lock());\n    }\n    child.wait().unwrap_or_else(|e| {\n        eprintln!(\"Failed to wait for {exe}: {e}\");\n        std::process::exit(1);\n    })\n}\n\n/// Run a process with stderr streamed live to the terminal.\n/// Returns (exit_status, captured_stdout, \"\").\nfn run_process(exe: &str, args: &[String]) -> (ExitStatus, String, String) {\n    let output = Command::new(exe)\n        .args(args)\n        .stdin(Stdio::null())\n        .stdout(Stdio::piped())\n        .stderr(Stdio::inherit())\n        .output()\n        .unwrap_or_else(|e| {\n            eprintln!(\"Failed to execute {exe}: {e}\");\n            std::process::exit(1);\n        });\n    (\n        output.status,\n        String::from_utf8_lossy(&output.stdout).to_string(),\n        String::new(),\n    )\n}\n\n/// Run a process with all output captured (no streaming).\n/// Used when stderr must be parsed (e.g., for error classification).\nfn run_process_quiet(exe: &str, args: &[String]) -> (ExitStatus, String, String) {\n    let output = Command::new(exe)\n        .args(args)\n        .stdin(Stdio::null())\n        .output()\n        .unwrap_or_else(|e| {\n            eprintln!(\"Failed to execute {exe}: {e}\");\n            std::process::exit(1);\n        });\n    (\n        output.status,\n        String::from_utf8_lossy(&output.stdout).to_string(),\n        String::from_utf8_lossy(&output.stderr).to_string(),\n    )\n}\n\n// ======================================================================\n// Helpers\n// ======================================================================\n\npub fn exit_code_to_int(status: ExitStatus) -> i32 {\n    status.code().unwrap_or(1)\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/doctor.rs",
    "content": "use std::fs;\nuse std::path::Path;\nuse std::process::Command;\n\nuse crate::config as cfg;\nuse crate::container::{container_run_quiet, engine_executable, image_exists_locally, RunConfig};\nuse crate::environment;\nuse crate::error::Result;\nuse crate::types::*;\n\nconst MANIFEST_MARKER: &str = \"### MANIFEST ###\";\n\n#[derive(serde::Serialize)]\npub struct CheckResult {\n    pub category: String,\n    pub result: String,\n    pub message: String,\n}\n\n#[derive(serde::Serialize)]\npub struct DoctorSummary {\n    pub ok: u32,\n    pub warnings: u32,\n    pub errors: u32,\n}\n\nstruct Counts {\n    ok: u32,\n    warn: u32,\n    fail: u32,\n    json_mode: bool,\n    current_category: String,\n    checks: Vec<CheckResult>,\n}\n\nimpl Counts {\n    fn new(json_mode: bool) -> Self {\n        Self { ok: 0, warn: 0, fail: 0, json_mode, current_category: String::new(), checks: Vec::new() }\n    }\n\n    fn set_category(&mut self, cat: &str) {\n        self.current_category = cat.to_string();\n    }\n\n    fn pass(&mut self, msg: &str) {\n        self.ok += 1;\n        if self.json_mode {\n            self.checks.push(CheckResult {\n                category: self.current_category.clone(),\n                result: \"ok\".to_string(),\n                message: msg.to_string(),\n            });\n        } else {\n            println!(\"  [ok] {msg}\");\n        }\n    }\n\n    fn warn(&mut self, msg: &str) {\n        self.warn += 1;\n        if self.json_mode {\n            self.checks.push(CheckResult {\n                category: self.current_category.clone(),\n                result: \"warning\".to_string(),\n                message: msg.to_string(),\n            });\n        } else {\n            println!(\"  [!!] {msg}\");\n        }\n    }\n\n    fn fail(&mut self, msg: &str) {\n        self.fail += 1;\n        if self.json_mode {\n            self.checks.push(CheckResult {\n                category: self.current_category.clone(),\n                result: \"error\".to_string(),\n                message: msg.to_string(),\n            });\n        } else {\n            println!(\"  [EE] {msg}\");\n        }\n    }\n\n    fn skip(&mut self, msg: &str) {\n        if self.json_mode {\n            self.checks.push(CheckResult {\n                category: self.current_category.clone(),\n                result: \"skipped\".to_string(),\n                message: msg.to_string(),\n            });\n        } else {\n            println!(\"  [--] {msg}\");\n        }\n    }\n}\n\npub fn doctor(\n    engine: ContainerEngine,\n    verbose: bool,\n    env_name: &str,\n    scope: Scope,\n    ec: &EnvironmentConfig,\n    deep: bool,\n    strict: bool,\n    json_mode: bool,\n) -> Result<()> {\n    let scope_str = match scope {\n        Scope::Local => \"local\",\n        Scope::System => \"system\",\n    };\n    let engine_str = match engine {\n        ContainerEngine::Docker => \"docker\",\n        ContainerEngine::Podman => \"podman\",\n    };\n\n    if !json_mode {\n        println!(\"Environment: {env_name} ({scope_str})\");\n        println!(\"Engine:      {engine_str}\");\n        println!();\n    }\n\n    let mut c = Counts::new(json_mode);\n    let data_dir = cfg::env_data_dir(scope, env_name);\n\n    // ==== Prerequisites ====\n    if !json_mode { println!(\"Prerequisites\"); }\n    c.set_category(\"prerequisites\");\n    check_engine(&mut c, engine);\n    check_base_image(&mut c, engine, &ec.base_image);\n    check_built_image(&mut c, engine, ec, scope, env_name);\n    check_data_dirs(&mut c, &data_dir);\n    check_file_readability(&mut c, &data_dir);\n\n    // ==== Manifests ====\n    if !json_mode { println!(\"\\nManifests\"); }\n    c.set_category(\"manifests\");\n    check_manifests(&mut c, &data_dir, ec.morloc_version.as_ref());\n\n    // ==== Deep checks ====\n    c.set_category(\"deep\");\n    if deep {\n        if !json_mode { println!(\"\\nDeep checks\"); }\n        check_morloc_version(&mut c, engine, ec);\n        check_programs_deep(&mut c, engine, verbose, ec, &data_dir);\n    } else {\n        if !json_mode { println!(\"\\nDeep checks\"); }\n        c.skip(\"Use --deep to run container-side checks\");\n    }\n\n    let fail_count = c.fail;\n    let warn_count = c.warn;\n\n    if json_mode {\n        #[derive(serde::Serialize)]\n        struct DoctorOutput {\n            environment: String,\n            scope: String,\n            engine: String,\n            checks: Vec<CheckResult>,\n            summary: DoctorSummary,\n        }\n        let output = DoctorOutput {\n            environment: env_name.to_string(),\n            scope: scope_str.to_string(),\n            engine: engine_str.to_string(),\n            checks: c.checks,\n            summary: DoctorSummary { ok: c.ok, warnings: warn_count, errors: fail_count },\n        };\n        println!(\"{}\", serde_json::to_string_pretty(&output).unwrap());\n    } else {\n        // ==== Summary ====\n        println!();\n        println!(\n            \"{} passed, {} warnings, {} errors\",\n            c.ok, warn_count, fail_count\n        );\n    }\n\n    if fail_count > 0 {\n        return Err(crate::error::ManagerError::DoctorFailed(fail_count));\n    }\n    if strict && warn_count > 0 {\n        return Err(crate::error::ManagerError::DoctorFailed(warn_count));\n    }\n    Ok(())\n}\n\n// ======================================================================\n// Individual checks\n// ======================================================================\n\nfn check_engine(c: &mut Counts, engine: ContainerEngine) {\n    let exe = engine_executable(engine);\n    let fmt = match engine {\n        ContainerEngine::Podman => \"{{.Version.Version}}\",\n        ContainerEngine::Docker => \"{{.ServerVersion}}\",\n    };\n    let output = Command::new(exe)\n        .args([\"info\", \"--format\", fmt])\n        .output();\n    match output {\n        Ok(o) if o.status.success() => {\n            let ver = String::from_utf8_lossy(&o.stdout).trim().to_string();\n            if ver.is_empty() {\n                c.pass(&format!(\"{exe} engine reachable\"));\n            } else {\n                c.pass(&format!(\"{exe} engine reachable ({ver})\"));\n            }\n        }\n        Ok(o) => {\n            let err = String::from_utf8_lossy(&o.stderr).trim().to_string();\n            if err.contains(\"permission denied\") || err.contains(\"Permission denied\") {\n                c.fail(&format!(\"{exe} permission denied -- add user to {exe} group?\"));\n            } else {\n                c.fail(&format!(\"{exe} not reachable: {err}\"));\n            }\n        }\n        Err(e) => {\n            c.fail(&format!(\"{exe} not found: {e}\"));\n        }\n    }\n}\n\nfn check_base_image(c: &mut Counts, engine: ContainerEngine, base_image: &str) {\n    if image_exists_locally(engine, base_image) {\n        c.pass(&format!(\"Base image {base_image}\"));\n    } else {\n        c.fail(&format!(\n            \"Base image {base_image} not found locally\\n       \\\n             Run: morloc-manager run -- morloc --version  (triggers pull)\"\n        ));\n    }\n}\n\nfn check_built_image(c: &mut Counts, engine: ContainerEngine, ec: &EnvironmentConfig, scope: Scope, env_name: &str) {\n    if ec.dockerfile.is_none() {\n        return;\n    }\n    // Check if the Dockerfile file itself still exists\n    let df_path = cfg::env_dockerfile_path(scope, env_name);\n    if !df_path.exists() {\n        c.warn(&format!(\n            \"Dockerfile configured but file is missing: {}\\n       \\\n             Remove stale config or recreate the file, then run: morloc-manager update\",\n            df_path.display()\n        ));\n        return;\n    }\n    match &ec.built_image {\n        Some(img) => {\n            if image_exists_locally(engine, img) {\n                c.pass(&format!(\"Built image {img}\"));\n            } else {\n                c.fail(&format!(\n                    \"Built image {img} not found locally\\n       \\\n                     Run: morloc-manager update\"\n                ));\n            }\n        }\n        None => {\n            c.warn(\"Dockerfile configured but no image built yet\\n       \\\n                    Run: morloc-manager update\");\n        }\n    }\n}\n\nfn check_data_dirs(c: &mut Counts, data_dir: &Path) {\n    let expected = [\"lib\", \"bin\", \"opt\", \"fdb\", \"src/morloc/plane\", \"exe\"];\n\n    let mut missing: Vec<&str> = Vec::new();\n    for dir in &expected {\n        if !data_dir.join(dir).is_dir() {\n            missing.push(dir);\n        }\n    }\n    if missing.is_empty() {\n        c.pass(\"Data directories intact\");\n    } else {\n        c.fail(&format!(\n            \"Missing directories: {}\\n       \\\n             Run: morloc-manager run -- morloc init -f\",\n            missing.join(\", \")\n        ));\n    }\n}\n\n/// Walk exe/ and other data subdirectories, warning about files unreadable\n/// by the current user (which would cause freeze to fail).\nfn check_file_readability(c: &mut Counts, data_dir: &Path) {\n    let dirs_to_check = [\"exe\", \"bin\", \"lib\"];\n    let mut unreadable: Vec<String> = Vec::new();\n    for dir in &dirs_to_check {\n        let dir_path = data_dir.join(dir);\n        if dir_path.is_dir() {\n            collect_unreadable(&dir_path, &mut unreadable);\n        }\n    }\n    if unreadable.is_empty() {\n        c.pass(\"All data files readable\");\n    } else {\n        let shown: Vec<&str> = unreadable.iter().take(5).map(|s| s.as_str()).collect();\n        let suffix = if unreadable.len() > 5 {\n            format!(\" (and {} more)\", unreadable.len() - 5)\n        } else {\n            String::new()\n        };\n        c.fail(&format!(\n            \"Unreadable files (freeze will fail): {}{suffix}\\n       \\\n             Fix with: chmod -R a+rX <data-dir>\",\n            shown.join(\", \")\n        ));\n    }\n}\n\nfn collect_unreadable(dir: &Path, out: &mut Vec<String>) {\n    let Ok(entries) = fs::read_dir(dir) else {\n        out.push(dir.display().to_string());\n        return;\n    };\n    for entry in entries.flatten() {\n        let path = entry.path();\n        if path.is_dir() {\n            collect_unreadable(&path, out);\n        } else if fs::File::open(&path).is_err() {\n            out.push(path.display().to_string());\n        }\n    }\n}\n\nfn check_manifests(\n    c: &mut Counts,\n    data_dir: &Path,\n    expected_version: Option<&Version>,\n) {\n    let fdb_dir = data_dir.join(\"fdb\");\n    if !fdb_dir.is_dir() {\n        c.warn(\"No fdb/ directory found\");\n        return;\n    }\n\n    let entries = match fs::read_dir(&fdb_dir) {\n        Ok(e) => e,\n        Err(e) => {\n            c.fail(&format!(\"Cannot read fdb/: {e}\"));\n            return;\n        }\n    };\n\n    let mut found_any = false;\n    for entry in entries.flatten() {\n        let name = entry.file_name();\n        let name_str = name.to_string_lossy();\n        if !name_str.ends_with(\".manifest\") {\n            continue;\n        }\n        found_any = true;\n        let prog_name = &name_str[..name_str.len() - \".manifest\".len()];\n        check_one_manifest(c, &entry.path(), prog_name, data_dir, expected_version);\n    }\n\n    if !found_any {\n        c.warn(\"No program manifests found in fdb/\");\n    }\n}\n\nfn check_one_manifest(\n    c: &mut Counts,\n    path: &Path,\n    prog_name: &str,\n    data_dir: &Path,\n    expected_version: Option<&Version>,\n) {\n    let content = match fs::read_to_string(path) {\n        Ok(s) => s,\n        Err(e) => {\n            c.fail(&format!(\"{prog_name} -- cannot read manifest: {e}\"));\n            return;\n        }\n    };\n\n    let json_str = if content.starts_with(\"#!\") {\n        if let Some(marker_pos) = content.find(MANIFEST_MARKER) {\n            let after_marker = &content[marker_pos..];\n            let json_start = after_marker\n                .find('\\n')\n                .map(|i| marker_pos + i + 1)\n                .unwrap_or(content.len());\n            &content[json_start..]\n        } else {\n            c.fail(&format!(\"{prog_name} -- manifest missing ### MANIFEST ### marker\"));\n            return;\n        }\n    } else {\n        content.as_str()\n    };\n\n    let manifest: serde_json::Value = match serde_json::from_str(json_str) {\n        Ok(v) => v,\n        Err(e) => {\n            c.fail(&format!(\"{prog_name} -- invalid manifest JSON: {e}\"));\n            return;\n        }\n    };\n\n    let mut issues: Vec<String> = Vec::new();\n\n    // Check morloc_version\n    let build_ver = manifest\n        .get(\"build\")\n        .and_then(|b| b.get(\"morloc_version\"))\n        .and_then(|v| v.as_str());\n\n    if let (Some(build_ver_str), Some(expected)) = (build_ver, expected_version) {\n        let expected_str = expected.show();\n        if build_ver_str != expected_str {\n            issues.push(format!(\n                \"built with {build_ver_str}, expected {expected_str}\"\n            ));\n        }\n    }\n\n    // Check build.path exists\n    let build_path = manifest\n        .get(\"build\")\n        .and_then(|b| b.get(\"path\"))\n        .and_then(|v| v.as_str());\n\n    if let Some(bp) = build_path {\n        // Build paths inside containers are /opt/morloc/exe/..., on host they're\n        // under data_dir/exe/... Try the host path first.\n        let host_path = data_dir.join(\"exe\").join(prog_name);\n        if !host_path.is_dir() && !Path::new(bp).is_dir() {\n            issues.push(\"build directory missing\".to_string());\n        }\n    } else {\n        issues.push(\"no build.path in manifest\".to_string());\n    }\n\n    // Check pool files exist\n    let pool_count = manifest\n        .get(\"pools\")\n        .and_then(|p| p.as_array())\n        .map(|a| a.len())\n        .unwrap_or(0);\n\n    if pool_count == 0 {\n        issues.push(\"no pools defined\".to_string());\n    }\n\n    if issues.is_empty() {\n        let ver_str = build_ver.unwrap_or(\"unknown\");\n        c.pass(&format!(\n            \"{prog_name} -- built with {ver_str}, {pool_count} pools\"\n        ));\n    } else {\n        for issue in &issues {\n            c.warn(&format!(\n                \"{prog_name} -- {issue}\\n       \\\n                 Recompile: morloc-manager run -- morloc make --install\"\n            ));\n        }\n    }\n}\n\nfn check_morloc_version(c: &mut Counts, engine: ContainerEngine, ec: &EnvironmentConfig) {\n    let image = ec.active_image();\n    match environment::detect_morloc_version(engine, image) {\n        Ok(detected) => {\n            if let Some(ref expected) = ec.morloc_version {\n                if detected == *expected {\n                    c.pass(&format!(\"morloc {} (matches config)\", detected.show()));\n                } else {\n                    c.warn(&format!(\n                        \"morloc {} in container, config says {}\",\n                        detected.show(),\n                        expected.show()\n                    ));\n                }\n            } else {\n                c.pass(&format!(\"morloc {} (no version in config to compare)\", detected.show()));\n            }\n        }\n        Err(e) => {\n            c.fail(&format!(\"Cannot run morloc in container: {e}\"));\n        }\n    }\n}\n\nfn check_programs_deep(\n    c: &mut Counts,\n    engine: ContainerEngine,\n    verbose: bool,\n    ec: &EnvironmentConfig,\n    data_dir: &Path,\n) {\n    let image = ec.active_image();\n    let mh = \"/opt/morloc\";\n    let bind_mounts = vec![(data_dir.to_string_lossy().to_string(), mh.to_string())];\n    let env = vec![\n        (\"MORLOC_HOME\".to_string(), mh.to_string()),\n    ];\n\n    // Scan programs from fdb/ to get program names\n    let fdb_dir = format!(\"{mh}/fdb\");\n    let cfg = RunConfig {\n        command: Some(vec![\"ls\".to_string(), fdb_dir.clone()]),\n        bind_mounts: bind_mounts.clone(),\n        env: env.clone(),\n        ..RunConfig::new(image)\n    };\n    let (status, stdout, _) = container_run_quiet(engine, &cfg);\n    if !status.success() {\n        c.fail(\"Cannot list programs in container\");\n        return;\n    }\n\n    let programs: Vec<ProgramEntry> = stdout\n        .lines()\n        .filter(|l| l.ends_with(\".manifest\"))\n        .map(|l| {\n            let name = l.strip_suffix(\".manifest\").unwrap_or(l);\n            ProgramEntry {\n                name: name.to_string(),\n                commands: Vec::new(),\n            }\n        })\n        .collect();\n\n    if programs.is_empty() {\n        c.warn(\"No programs found in container\");\n        return;\n    }\n\n    if !c.json_mode {\n        println!(\"Running smoke tests for {} programs...\", programs.len());\n    }\n    for prog in &programs {\n        let exe_path = format!(\"{mh}/bin/{}\", prog.name);\n        let cfg = RunConfig {\n            command: Some(vec![exe_path.clone(), \"--help\".to_string()]),\n            bind_mounts: bind_mounts.clone(),\n            env: env.clone(),\n            ..RunConfig::new(image)\n        };\n        if verbose {\n            let exe = engine_executable(engine);\n            eprintln!(\"[morloc-manager] {exe} run --rm {image} {exe_path} --help\");\n        }\n        let (status, _, stderr) = container_run_quiet(engine, &cfg);\n        if status.success() {\n            c.pass(&format!(\"{} -- smoke test passed\", prog.name));\n        } else {\n            let snippet: String = stderr.lines().take(3).collect::<Vec<_>>().join(\"\\n       \");\n            c.fail(&format!(\"{} -- smoke test failed: {snippet}\", prog.name));\n        }\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/environment.rs",
    "content": "use std::fs;\nuse std::path::{Path, PathBuf};\nuse std::process::Command;\n\nuse serde::Serialize;\nuse sha2::{Digest, Sha256};\n\nuse crate::config;\nuse crate::container::{\n    self, check_remote_image, container_build_visible,\n    container_pull_visible, engine_executable, exit_code_to_int,\n    image_exists_locally, BuildConfig, RemoteImageStatus,\n};\nuse crate::error::{ManagerError, Result};\nuse crate::serve;\nuse crate::types::*;\n\n// ======================================================================\n// Public types\n// ======================================================================\n\n/// Options for creating or updating an environment.\n/// For `new` (is_new=true): all Option fields that are None use defaults.\n/// For `update` (is_new=false): None means keep the existing value.\npub struct ApplyOptions {\n    pub name: String,\n    pub scope: Scope,\n    pub is_new: bool,\n    pub base_image: Option<String>,\n    pub original_image: Option<String>,\n    pub morloc_version: Option<Version>,\n    pub dockerfile: Option<String>,\n    pub includes: Vec<String>,\n    pub flagfile: Option<String>,\n    pub engine_args: Vec<String>,\n    pub engine: Option<ContainerEngine>,\n    pub shm_size: Option<String>,\n    pub skip_dockerfile_build: bool,\n    pub verbose: bool,\n}\n\n/// Info returned by list_environments.\n#[derive(Serialize)]\npub struct EnvInfo {\n    pub name: String,\n    pub morloc_version: Option<Version>,\n    pub active: bool,\n}\n\n// ======================================================================\n// Image resolution\n// ======================================================================\n\nconst MORLOC_IMAGE_PREFIX: &str = \"ghcr.io/morloc-project/morloc/morloc-full\";\n\n/// Recognize engine errors that mean \"cannot chdir into the current working\n/// directory\" and rewrite them into a clearer message. This commonly happens\n/// when running `sudo -u <other-user> morloc-manager ...` from a directory\n/// that <other-user> cannot access (e.g., /root or another user's $HOME).\n/// Without this hint, the error bubbles up as \"Failed to check registry...\"\n/// which misleads users toward debugging network/auth problems.\nfn cwd_access_hint(stderr: &str) -> Option<String> {\n    let lower = stderr.to_lowercase();\n    let looks_like_cwd_denied = (lower.contains(\"chdir\") || lower.contains(\"getwd\")\n        || lower.contains(\"current working directory\"))\n        && (lower.contains(\"permission denied\") || lower.contains(\"no such file\"));\n    if looks_like_cwd_denied {\n        Some(format!(\n            \"Cannot change into the current working directory as the target user. \\\n             Run morloc-manager from a directory the target user can access \\\n             (for example /tmp or the user's home directory).\\nOriginal error: {}\",\n            stderr.trim()\n        ))\n    } else {\n        None\n    }\n}\n\n/// Resolve a morloc version string to a registry image reference.\npub fn version_to_image(ver: &Version) -> String {\n    format!(\"{MORLOC_IMAGE_PREFIX}:{}\", ver.show())\n}\n\n/// Pull an image by tag from the morloc registry, detect its version, and\n/// return (image_ref, version). The tag can be a semver string (\"0.77.0\"),\n/// a named tag (\"edge\", \"nightly\"), or any other valid container tag.\npub fn pull_tagged_image(engine: ContainerEngine, tag: &str) -> Result<(String, Version)> {\n    let image_ref = format!(\"{MORLOC_IMAGE_PREFIX}:{tag}\");\n\n    if !image_exists_locally(engine, &image_ref) {\n        match check_remote_image(engine, &image_ref) {\n            RemoteImageStatus::Exists => {}\n            RemoteImageStatus::NotFound => {\n                return Err(ManagerError::EnvError(format!(\n                    \"No container image found for tag '{tag}'\"\n                )));\n            }\n            RemoteImageStatus::Unknown(stderr) => {\n                if let Some(hint) = cwd_access_hint(&stderr) {\n                    return Err(ManagerError::EnvError(hint));\n                }\n                return Err(ManagerError::EnvError(format!(\n                    \"Failed to check registry for tag '{tag}': {}\",\n                    stderr.trim()\n                )));\n            }\n        }\n\n        eprintln!(\"Pulling {image_ref}...\");\n        let status = container_pull_visible(engine, &image_ref);\n        if !status.success() {\n            return Err(ManagerError::EngineError {\n                engine,\n                code: exit_code_to_int(status),\n                stderr: \"Pull failed (see output above)\".to_string(),\n            });\n        }\n    } else {\n        eprintln!(\"Using local copy of {image_ref}\");\n    }\n\n    let ver = detect_morloc_version(engine, &image_ref)?;\n\n    // Also tag with the detected version so future --version lookups find it\n    let versioned_image = version_to_image(&ver);\n    if versioned_image != image_ref {\n        let exe = engine_executable(engine);\n        let _ = Command::new(exe)\n            .args([\"tag\", &image_ref, &versioned_image])\n            .output();\n    }\n\n    Ok((versioned_image, ver))\n}\n\n/// Pull the :edge image. Convenience wrapper around pull_tagged_image.\npub fn resolve_latest(engine: ContainerEngine) -> Result<(String, Version)> {\n    pull_tagged_image(engine, \"edge\")\n}\n\n/// Pull a specific version image from the morloc registry.\npub fn pull_version_image(engine: ContainerEngine, ver: &Version) -> Result<String> {\n    let (img, _) = pull_tagged_image(engine, &ver.show())?;\n    Ok(img)\n}\n\n/// Detect the morloc version by running `morloc --version` inside the image.\npub fn detect_morloc_version(engine: ContainerEngine, image: &str) -> Result<Version> {\n    let exe = engine_executable(engine);\n    let output = Command::new(exe)\n        .args([\"run\", \"--rm\", image, \"morloc\", \"--version\"])\n        .stdin(std::process::Stdio::null())\n        .stdout(std::process::Stdio::piped())\n        .stderr(std::process::Stdio::piped())\n        .output()\n        .map_err(|e| ManagerError::EnvError(format!(\"Failed to run container: {e}\")))?;\n\n    if !output.status.success() {\n        return Err(ManagerError::EnvError(format!(\n            \"Image '{image}' does not have a working morloc binary: {}\",\n            String::from_utf8_lossy(&output.stderr).trim()\n        )));\n    }\n\n    let ver_out = String::from_utf8_lossy(&output.stdout).trim().to_string();\n    let ver_str = ver_out.split_whitespace().last().unwrap_or(&ver_out);\n    ver_str.parse().map_err(|_| {\n        ManagerError::EnvError(format!(\n            \"Could not parse morloc version from image '{image}' output: {ver_out}\"\n        ))\n    })\n}\n\n/// Pull a custom image (not from morloc registry).\npub fn pull_custom_image(engine: ContainerEngine, image: &str) -> Result<()> {\n    if image_exists_locally(engine, image) {\n        eprintln!(\"Using local copy of {image}\");\n        return Ok(());\n    }\n\n    eprintln!(\"Pulling {image}...\");\n    let status = container_pull_visible(engine, image);\n    if !status.success() {\n        return Err(ManagerError::EngineError {\n            engine,\n            code: exit_code_to_int(status),\n            stderr: \"Pull failed (see output above)\".to_string(),\n        });\n    }\n    Ok(())\n}\n\n// ======================================================================\n// Core operations\n// ======================================================================\n\n/// Create or update an environment.\n///\n/// When `is_new` is true: validates name uniqueness, creates data directories.\n/// Validate that an environment name contains only allowed characters.\npub fn validate_env_name(name: &str) -> Result<()> {\n    if name.is_empty()\n        || !name\n            .chars()\n            .all(|c| c.is_alphanumeric() || c == '-' || c == '_' || c == '.')\n    {\n        return Err(ManagerError::EnvError(format!(\n            \"Invalid environment name '{name}': must contain only alphanumeric characters, hyphens, underscores, or dots\"\n        )));\n    }\n    Ok(())\n}\n\n/// Parse an include spec into (resolved_source, destination).\n///\n/// Supports two forms:\n/// - `path`        — copies to cfg_dir/basename(path)\n/// - `src:dest`    — copies src to cfg_dir/dest\n///\n/// Rules for dest:\n/// - Must be relative (no leading `/`)\n/// - Cannot contain `..`\n/// - If dest ends with `/`, src's basename is appended\n///\n/// Source symlinks are resolved via canonicalize().\nfn parse_include_spec(spec: &str, cfg_dir: &Path) -> Result<(PathBuf, PathBuf)> {\n    let (src_str, dest_rel) = if let Some(idx) = spec.find(':') {\n        let s = &spec[..idx];\n        let d = &spec[idx + 1..];\n        if s.is_empty() || d.is_empty() {\n            return Err(ManagerError::EnvError(format!(\n                \"Invalid include spec: '{spec}'\"\n            )));\n        }\n        (s, d.to_string())\n    } else {\n        let src_path = Path::new(spec);\n        let fname = src_path.file_name().ok_or_else(|| {\n            ManagerError::EnvError(format!(\"Invalid include path: {spec}\"))\n        })?;\n        (spec.as_ref(), fname.to_string_lossy().to_string())\n    };\n\n    // Validate dest constraints\n    if dest_rel.starts_with('/') {\n        return Err(ManagerError::EnvError(format!(\n            \"Include destination must be relative, not absolute: '{dest_rel}'\"\n        )));\n    }\n    if dest_rel.contains(\"..\") {\n        return Err(ManagerError::EnvError(format!(\n            \"Include destination cannot contain '..': '{dest_rel}'\"\n        )));\n    }\n\n    // Resolve src (canonicalize follows symlinks, errors if path doesn't exist)\n    let real_src = Path::new(src_str).canonicalize().map_err(|e| {\n        ManagerError::EnvError(format!(\"Cannot resolve include path '{src_str}': {e}\"))\n    })?;\n\n    // Compute final dest\n    let dest = cfg_dir.join(&dest_rel);\n    let final_dest = if dest_rel.ends_with('/') {\n        dest.join(real_src.file_name().unwrap_or_default())\n    } else {\n        dest\n    };\n\n    Ok((real_src, final_dest))\n}\n\n/// When `is_new` is false: loads existing config, applies overrides.\npub fn apply_environment(opts: &ApplyOptions) -> Result<()> {\n    let scope = opts.scope;\n    let name = &opts.name;\n\n    validate_env_name(name)?;\n\n    // Load existing config or start fresh\n    let mut ec = if opts.is_new {\n        let cfg_path = config::env_config_path(scope, name);\n        if cfg_path.is_file() {\n            return Err(ManagerError::EnvError(format!(\n                \"Environment '{name}' already exists\"\n            )));\n        }\n        // Create data directories\n        let data_dir = config::env_data_dir(scope, name);\n        for sub in &[\"bin\", \"lib\", \"fdb\", \"include\", \"opt\", \"tmp\"] {\n            fs::create_dir_all(data_dir.join(sub)).map_err(|e| {\n                ManagerError::EnvError(format!(\"Failed to create directory: {e}\"))\n            })?;\n        }\n        if scope == Scope::System {\n            use std::os::unix::fs::PermissionsExt;\n            let dirs: Vec<_> = std::iter::once(data_dir.clone())\n                .chain(\n                    [\"bin\", \"lib\", \"fdb\", \"include\", \"opt\", \"tmp\"]\n                        .iter()\n                        .map(|d| data_dir.join(d)),\n                )\n                .collect();\n            for d in dirs {\n                let _ = fs::set_permissions(&d, fs::Permissions::from_mode(0o2775));\n            }\n        }\n        // Start with required fields from opts; the rest will be applied below\n        EnvironmentConfig {\n            name: name.clone(),\n            base_image: opts.base_image.clone().unwrap_or_default(),\n            original_image: None,\n            dockerfile: None,\n            content_hash: None,\n            built_image: None,\n            engine: opts.engine.unwrap_or(ContainerEngine::Podman),\n            shm_size: \"512m\".to_string(),\n            morloc_version: None,\n        }\n    } else {\n        config::read_env_config(scope, name)\n            .map_err(|_| ManagerError::EnvironmentNotFound(name.to_string()))?\n    };\n\n    // Apply overrides\n    if let Some(ref img) = opts.base_image {\n        ec.base_image = img.clone();\n    }\n    if let Some(ref img) = opts.original_image {\n        ec.original_image = Some(img.clone());\n    }\n    if let Some(ref ver) = opts.morloc_version {\n        ec.morloc_version = Some(ver.clone());\n    }\n    if let Some(engine) = opts.engine {\n        ec.engine = engine;\n    }\n    if let Some(ref shm) = opts.shm_size {\n        if !is_valid_shm_size(shm) {\n            return Err(ManagerError::EnvError(format!(\n                \"Invalid --shm-size '{shm}'. Use format like: 512m, 1g, 2048k\"\n            )));\n        }\n        ec.shm_size = shm.clone();\n    }\n\n    // Copy Dockerfile if a new one was provided\n    let dockerfile_changed = if let Some(ref src) = opts.dockerfile {\n        let dest = config::env_dockerfile_path(scope, name);\n        let dest_dir = dest.parent().unwrap();\n        fs::create_dir_all(dest_dir).map_err(|e| {\n            ManagerError::EnvError(format!(\"Failed to create config dir: {e}\"))\n        })?;\n        fs::copy(src, &dest).map_err(|e| {\n            ManagerError::EnvError(format!(\"Failed to copy Dockerfile '{}': {e}\", src))\n        })?;\n        ec.dockerfile = Some(\"Dockerfile\".to_string());\n        true\n    } else {\n        false\n    };\n\n    // Copy included files/directories into build context.\n    // Supports src:dest syntax (like Docker volume mounts) for explicit placement.\n    let cfg_dir = config::env_config_dir(scope, name);\n    fs::create_dir_all(&cfg_dir).map_err(|e| {\n        ManagerError::EnvError(format!(\"Failed to create config dir: {e}\"))\n    })?;\n    for spec in &opts.includes {\n        let (real_src, final_dest) = parse_include_spec(spec, &cfg_dir)?;\n        if let Some(parent) = final_dest.parent() {\n            fs::create_dir_all(parent).map_err(|e| {\n                ManagerError::EnvError(format!(\"Failed to create directory: {e}\"))\n            })?;\n        }\n        if real_src.is_dir() {\n            let status = Command::new(\"cp\")\n                .args([\"-a\", &real_src.to_string_lossy(), &final_dest.to_string_lossy()])\n                .stdin(std::process::Stdio::null())\n                .stdout(std::process::Stdio::null())\n                .stderr(std::process::Stdio::inherit())\n                .status()\n                .map_err(|e| ManagerError::EnvError(format!(\"Failed to copy '{spec}': {e}\")))?;\n            if !status.success() {\n                return Err(ManagerError::EnvError(format!(\n                    \"Failed to copy directory '{spec}'\"\n                )));\n            }\n        } else {\n            fs::copy(&real_src, &final_dest).map_err(|e| {\n                ManagerError::EnvError(format!(\"Failed to copy '{spec}': {e}\"))\n            })?;\n        }\n    }\n\n    // Write flags file: for new envs or when flagfile is provided, write fresh.\n    // For updates with only engine_args, append to existing.\n    let flags_path = config::env_flags_path(scope, name);\n    if opts.is_new || opts.flagfile.is_some() {\n        let mut flag_lines: Vec<String> = Vec::new();\n        if let Some(ref src) = opts.flagfile {\n            let content = fs::read_to_string(src).map_err(|e| {\n                ManagerError::EnvError(format!(\"Failed to read flagfile '{}': {e}\", src))\n            })?;\n            flag_lines.extend(\n                content\n                    .lines()\n                    .map(|l| l.trim().to_string())\n                    .filter(|l| !l.is_empty() && !l.starts_with('#')),\n            );\n        }\n        flag_lines.extend(opts.engine_args.iter().cloned());\n        let flags_content = if flag_lines.is_empty() {\n            String::new()\n        } else {\n            flag_lines.join(\"\\n\") + \"\\n\"\n        };\n        fs::write(&flags_path, &flags_content).map_err(|e| {\n            ManagerError::EnvError(format!(\"Failed to write flags file: {e}\"))\n        })?;\n    } else if !opts.engine_args.is_empty() {\n        // Append engine_args to existing flags file\n        let mut existing = config::read_flags_file_lines(&flags_path);\n        existing.extend(opts.engine_args.iter().cloned());\n        let flags_content = existing.join(\"\\n\") + \"\\n\";\n        fs::write(&flags_path, &flags_content).map_err(|e| {\n            ManagerError::EnvError(format!(\"Failed to write flags file: {e}\"))\n        })?;\n    }\n\n    // Build Dockerfile layer if present and not skipped\n    let has_dockerfile = ec.dockerfile.is_some();\n    let should_build = has_dockerfile\n        && !opts.skip_dockerfile_build\n        && (opts.is_new || dockerfile_changed || !opts.includes.is_empty()\n            || opts.base_image.is_some() || opts.engine.is_some()\n            // For update with no specific changes, rebuild if Dockerfile exists\n            || (!opts.is_new && opts.dockerfile.is_none() && opts.includes.is_empty()));\n\n    if should_build {\n        let tag = format!(\"localhost/morloc-env:{name}\");\n        let df_path = config::env_dockerfile_path(scope, name);\n        if df_path.exists() {\n            let hash = hash_file(&df_path)?;\n            // Skip rebuild when nothing has actually changed: same Dockerfile\n            // hash, no new includes, no base-image change, tagged image still\n            // present. Without this, `update` with no arguments silently\n            // re-runs the full build every time.\n            let unchanged = !opts.is_new\n                && !dockerfile_changed\n                && opts.includes.is_empty()\n                && opts.base_image.is_none()\n                && ec.content_hash.as_deref() == Some(hash.as_str())\n                && ec.built_image.as_ref()\n                    .map(|img| image_exists_locally(ec.engine, img))\n                    .unwrap_or(false);\n            if unchanged {\n                eprintln!(\"Dockerfile unchanged; skipping rebuild.\");\n            } else {\n                let build_cfg = BuildConfig {\n                    dockerfile: df_path.to_string_lossy().to_string(),\n                    context: cfg_dir.to_string_lossy().to_string(),\n                    tag: tag.clone(),\n                    build_args: vec![(\"CONTAINER_BASE\".to_string(), ec.base_image.clone())],\n                };\n                if opts.verbose {\n                    let exe = engine_executable(ec.engine);\n                    eprintln!(\n                        \"[morloc-manager] {exe} build -f {} -t {} {}\",\n                        build_cfg.dockerfile, build_cfg.tag, build_cfg.context\n                    );\n                }\n                let status = container_build_visible(ec.engine, &build_cfg);\n                if !status.success() {\n                    return Err(ManagerError::EngineError {\n                        engine: ec.engine,\n                        code: exit_code_to_int(status),\n                        stderr: \"Build failed (see output above)\".to_string(),\n                    });\n                }\n                ec.built_image = Some(tag);\n                ec.content_hash = Some(hash);\n            }\n        }\n    }\n\n    // Always reconcile the stored morloc version against the actual image.\n    // - For `new --version 0.77.0-rc.6`, the binary reports \"0.77.0\" (stack\n    //   does not expose prerelease tags), so keep the recorded value when\n    //   major.minor.patch match — the recorded tag is more informative.\n    // - For `new --image <custom>` or `update --image ...`, nothing was\n    //   recorded yet, so store the detected version.\n    // - If the image has no morloc binary (e.g., a bare base image staged\n    //   for a Dockerfile layer not yet built), silently leave the field\n    //   unchanged rather than failing the whole operation.\n    let detect_target = ec.built_image.clone().unwrap_or_else(|| ec.base_image.clone());\n    if !detect_target.is_empty() {\n        if let Ok(detected) = detect_morloc_version(ec.engine, &detect_target) {\n            ec.morloc_version = Some(match ec.morloc_version.take() {\n                Some(recorded) if recorded.major == detected.major\n                    && recorded.minor == detected.minor\n                    && recorded.patch == detected.patch => recorded,\n                _ => detected,\n            });\n        }\n    }\n\n    // Write environment config\n    config::write_env_config(scope, name, &ec)?;\n\n    Ok(())\n}\n\n/// Remove an environment and all its data.\npub fn remove_environment(engine: ContainerEngine, scope: Scope, name: &str) -> Result<()> {\n    let ec = config::read_env_config(scope, name)\n        .map_err(|_| ManagerError::EnvironmentNotFound(name.to_string()))?;\n\n    // Stop and remove any running serve container for this environment before\n    // removing its image. If we skipped this, the serve container would keep\n    // running and be unreachable through morloc-manager.\n    let serve_name = serve::serve_container_name(name);\n    if container::container_exists(engine, &serve_name) {\n        let _ = container::container_stop(engine, &serve_name);\n        let _ = container::container_remove_quiet(engine, &serve_name);\n    }\n\n    // Remove built Dockerfile layer image\n    if let Some(ref img) = ec.built_image {\n        if image_exists_locally(engine, img) {\n            container::remove_image(engine, img);\n        }\n    }\n\n    // Remove config directory\n    let cfg_dir = config::env_config_dir(scope, name);\n    if cfg_dir.is_dir() {\n        let _ = fs::remove_dir_all(&cfg_dir);\n    }\n\n    // Remove data directory\n    let data_dir = config::env_data_dir(scope, name);\n    if data_dir.is_dir() {\n        let _ = fs::remove_dir_all(&data_dir);\n    }\n\n    // If the active env was this one, clear it in both local and system configs\n    for cfg_scope in [Scope::Local, Scope::System] {\n        let cfg_path = config::config_path(cfg_scope);\n        if let Ok(cfg) = config::read_config::<Config>(&cfg_path) {\n            if cfg.active_env.as_deref() == Some(name) {\n                let new_cfg = Config {\n                    active_env: None,\n                    ..cfg\n                };\n                let _ = config::write_config(&cfg_path, &new_cfg);\n            }\n        }\n    }\n\n    Ok(())\n}\n\n/// List environments in the given scope.\npub fn list_environments(scope: Scope, active_env: Option<&str>) -> Vec<EnvInfo> {\n    let names = config::list_env_names(scope);\n    let mut result = Vec::new();\n    for name in names {\n        if let Ok(ec) = config::read_env_config(scope, &name) {\n            result.push(EnvInfo {\n                name: name.clone(),\n                morloc_version: ec.morloc_version,\n                active: active_env == Some(name.as_str()),\n            });\n        }\n    }\n    result\n}\n\n/// Select an environment by writing active_env to the given write_scope config.\npub fn select_environment(name: &str, write_scope: Scope) -> Result<()> {\n    // Verify the environment exists somewhere\n    config::find_env_scope(name)?;\n\n    let cfg_path = config::config_path(write_scope);\n    let base_cfg = config::read_config::<Config>(&cfg_path)\n        .or_else(|_| config::read_config::<Config>(&config::config_path(Scope::System)))\n        .unwrap_or_default();\n    let new_cfg = Config {\n        active_env: Some(name.to_string()),\n        ..base_cfg\n    };\n    config::write_config(&cfg_path, &new_cfg)\n}\n\n/// Resolve the active environment. Checks local config first, then system.\n/// Returns (name, scope where env config lives, EnvironmentConfig).\npub fn resolve_active_environment() -> Result<(String, Scope, EnvironmentConfig)> {\n    // Find active_env name from config (local first, then system)\n    let name = resolve_active_env_name()?;\n\n    // Find which scope has the environment config\n    let scope = config::find_env_scope(&name)?;\n    let ec = config::read_env_config(scope, &name)?;\n    Ok((name, scope, ec))\n}\n\n/// Resolve just the active environment name from config.\n/// Skips names that don't resolve to an actual environment (e.g., stale\n/// entries from old config formats).\nfn resolve_active_env_name() -> Result<String> {\n    if let Ok(cfg) = config::read_config::<Config>(&config::config_path(Scope::Local)) {\n        if let Some(ref name) = cfg.active_env {\n            if config::find_env_scope(name).is_ok() {\n                return Ok(name.clone());\n            }\n        }\n    }\n    if let Ok(cfg) = config::read_config::<Config>(&config::config_path(Scope::System)) {\n        if let Some(ref name) = cfg.active_env {\n            if config::find_env_scope(name).is_ok() {\n                return Ok(name.clone());\n            }\n        }\n    }\n    // Check if any environments exist to give a better suggestion\n    let local_envs = config::list_env_names(Scope::Local);\n    let system_envs = config::list_env_names(Scope::System);\n    if local_envs.is_empty() && system_envs.is_empty() {\n        Err(ManagerError::NoActiveEnvironment)\n    } else {\n        // Label each entry with its scope so same-named envs are distinguishable.\n        // System envs are flagged with --system to disambiguate in select.\n        let mut available: Vec<String> = local_envs\n            .iter()\n            .map(|n| format!(\"{n} (local)\"))\n            .collect();\n        available.extend(system_envs.iter().map(|n| format!(\"{n} (system)\")));\n        Err(ManagerError::EnvError(format!(\n            \"No active environment. Select one with: morloc-manager select <name>\\n\\\n             Available: {}\",\n            available.join(\", \")\n        )))\n    }\n}\n\n// ======================================================================\n// Internal\n// ======================================================================\n\npub fn is_valid_shm_size(s: &str) -> bool {\n    if s.is_empty() {\n        return false;\n    }\n    let (digits, suffix) = if s.ends_with(|c: char| \"bkmgBKMG\".contains(c)) {\n        (&s[..s.len() - 1], true)\n    } else {\n        (s, false)\n    };\n    !digits.is_empty() && digits.chars().all(|c| c.is_ascii_digit()) && (suffix || !digits.is_empty())\n}\n\nfn hash_file(path: &Path) -> Result<String> {\n    let contents = fs::read(path).map_err(|e| {\n        ManagerError::EnvError(format!(\"Failed to read file: {e}\"))\n    })?;\n    let digest = Sha256::digest(&contents);\n    Ok(hex_encode(&digest))\n}\n\nfn hex_encode(bytes: &[u8]) -> String {\n    bytes.iter().map(|b| format!(\"{b:02x}\")).collect()\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/error.rs",
    "content": "use std::fmt;\nuse thiserror::Error;\n\nuse crate::types::{ContainerEngine, Scope, Version};\n\n#[derive(Debug, Error, PartialEq, Eq)]\npub enum ManagerError {\n    #[error(\"Configuration not found: {0}\")]\n    ConfigNotFound(String),\n\n    #[error(\"Permission denied: {0}\")]\n    ConfigPermissionDenied(String),\n\n    #[error(\"Invalid configuration in {path}: {msg}\")]\n    ConfigParseError { path: String, msg: String },\n\n    #[error(\"No active environment. Run: morloc-manager new\")]\n    NoActiveEnvironment,\n\n    #[error(\"Environment not found: {0}\")]\n    EnvironmentNotFound(String),\n\n    #[error(\"Environment error: {0}\")]\n    EnvError(String),\n\n    #[error(\"Invalid version: {0}. Expected format: MAJOR.MINOR.PATCH. For named tags like 'edge', use --tag instead.\")]\n    InvalidVersion(String),\n\n    #[error(\"No command specified. Use --shell or provide a command after --.\")]\n    NoCommand,\n\n    #[error(\"No container engine found. Install podman or docker.\")]\n    EngineNotFound,\n\n    #[error(\"Container engine ({engine}) failed with exit code {code}:\\n{stderr}\")]\n    EngineError {\n        engine: ContainerEngine,\n        code: i32,\n        stderr: String,\n    },\n\n    #[error(\"Freeze failed: {0}\")]\n    FreezeError(String),\n\n    #[error(\"Unfreeze failed: {0}\")]\n    UnfreezeError(String),\n\n    #[error(\"SELinux error: {0}\")]\n    SELinuxError(String),\n\n    #[error(\"Doctor found {0} error(s)\")]\n    DoctorFailed(u32),\n\n    #[error(\"{}\", match .0 {\n        Scope::Local => \"No local configuration found. Run: morloc-manager new\",\n        Scope::System => \"No system configuration found. Run: sudo morloc-manager new --system\",\n    })]\n    SetupNotComplete(Scope),\n}\n\nimpl fmt::Display for Version {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        write!(f, \"{}.{}.{}\", self.major, self.minor, self.patch)\n    }\n}\n\nimpl fmt::Display for ContainerEngine {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        match self {\n            ContainerEngine::Docker => write!(f, \"Docker\"),\n            ContainerEngine::Podman => write!(f, \"Podman\"),\n        }\n    }\n}\n\npub type Result<T> = std::result::Result<T, ManagerError>;\n"
  },
  {
    "path": "data/rust/morloc-manager/src/freeze.rs",
    "content": "use std::fs;\nuse std::path::Path;\nuse std::process::{Command, Stdio};\n\nuse chrono::Utc;\nuse sha2::{Digest, Sha256};\nuse crate::config;\nuse crate::error::{ManagerError, Result};\nuse crate::types::*;\n\npub fn freeze_from_dir(\n    scope: Scope,\n    ver: Version,\n    engine: ContainerEngine,\n    image: &str,\n    v_data_dir: &str,\n    output_dir: &str,\n    verbose: bool,\n) -> Result<()> {\n    fs::create_dir_all(output_dir)\n        .map_err(|e| ManagerError::FreezeError(format!(\"Failed to create output dir: {e}\")))?;\n\n    if !Path::new(v_data_dir).is_dir() {\n        return Err(ManagerError::FreezeError(format!(\n            \"Data directory does not exist: {v_data_dir}\"\n        )));\n    }\n\n    // Validate programs exist before writing any files\n    let modules = scan_modules(&format!(\"{v_data_dir}/fdb\"));\n    let programs = scan_programs(&format!(\"{v_data_dir}/fdb\"));\n    if programs.is_empty() {\n        return Err(ManagerError::FreezeError(\n            \"No morloc programs are installed. Compile and install with 'morloc make --install' before freezing.\".to_string()\n        ));\n    }\n\n    // Validate programs work before freezing\n    let mh = \"/opt/morloc\";\n    let bind_mounts = vec![(v_data_dir.to_string(), mh.to_string())];\n    crate::serve::validate_programs(engine, image, &programs, bind_mounts, verbose)?;\n\n    eprintln!(\"Freezing installed state from {v_data_dir}...\");\n    let tar_path = Path::new(output_dir).join(\"state.tar.gz\");\n    let tar_path = tar_path.to_string_lossy();\n    let mut tar_dirs: Vec<&str> = Vec::new();\n    for dir in &[\"lib\", \"fdb\", \"bin\", \"exe\", \"opt\", \"src\"] {\n        if Path::new(&format!(\"{v_data_dir}/{dir}\")).is_dir() {\n            tar_dirs.push(dir);\n        }\n    }\n\n    // Pre-flight: verify all files are readable before invoking tar\n    for dir in &tar_dirs {\n        check_readable_recursive(&Path::new(v_data_dir).join(dir))?;\n    }\n\n    let tar_status = Command::new(\"tar\")\n        .args([\"-czf\", &tar_path, \"-C\", v_data_dir])\n        .args(&tar_dirs)\n        .stdin(Stdio::null())\n        .stdout(Stdio::null())\n        .stderr(Stdio::inherit())\n        .status()\n        .map_err(|e| ManagerError::FreezeError(format!(\"tar failed: {e}\")))?;\n\n    if !tar_status.success() {\n        return Err(ManagerError::FreezeError(\n            \"tar failed (see error output above)\".to_string()\n        ));\n    }\n    eprintln!(\"Created {tar_path}\");\n    let now = Utc::now();\n\n    // Get base image from the active environment config.\n    // Check local config first, then system config for the active env name\n    // (mirrors resolve_active_env_name in environment.rs).\n    let active_env_name: Option<String> = config::read_active_config()\n        .and_then(|c| c.active_env)\n        .or_else(|| {\n            let sys_path = config::config_path(Scope::System);\n            config::read_config::<Config>(&sys_path)\n                .ok()\n                .and_then(|c| c.active_env)\n        });\n\n    let (base_img, env_layer) = if let Some(ref env_name) = active_env_name {\n        let env_scope = config::find_env_scope(env_name).unwrap_or(scope);\n        match config::read_env_config(env_scope, env_name) {\n            Ok(ec) => {\n                let base = ec.base_image.clone();\n                // Capture env layer info if there's a Dockerfile\n                let layer = if ec.dockerfile.is_some() {\n                    let df_path = config::env_dockerfile_path(env_scope, env_name);\n                    if df_path.exists() {\n                        let df_contents = fs::read_to_string(&df_path).unwrap_or_default();\n                        let content_hash = ec.content_hash.unwrap_or_default();\n                        // Use the tagged image reference (not digest) so that\n                        // unfreeze can resolve it locally without network access.\n                        // Digest references like localhost/morloc-env@sha256:...\n                        // cause BuildKit to attempt HTTPS to localhost.\n                        let image_tag = ec.built_image.clone();\n                        Some(FrozenEnvLayer {\n                            name: env_name.to_string(),\n                            dockerfile: df_contents,\n                            content_hash,\n                            image_tag,\n                        })\n                    } else {\n                        None\n                    }\n                } else {\n                    None\n                };\n                (base, layer)\n            }\n            Err(_) => (\"unknown\".to_string(), None),\n        }\n    } else {\n        (\"unknown\".to_string(), None)\n    };\n\n    let manifest = FreezeManifest {\n        morloc_version: ver,\n        frozen_at: now,\n        modules,\n        programs,\n        base_image: base_img,\n        env_layer,\n        env_vars: Vec::new(),\n    };\n    let manifest_path = Path::new(output_dir).join(\"freeze-manifest.json\");\n    let manifest_path = manifest_path.to_string_lossy();\n    write_freeze_manifest(&manifest_path, &manifest)?;\n    eprintln!(\"Wrote {manifest_path}\");\n    eprintln!(\"Frozen state written to {output_dir}\");\n    Ok(())\n}\n\npub fn write_freeze_manifest(path: &str, manifest: &FreezeManifest) -> Result<()> {\n    let json = serde_json::to_vec(manifest)\n        .map_err(|e| ManagerError::FreezeError(format!(\"JSON encode failed: {e}\")))?;\n    fs::write(path, json)\n        .map_err(|e| ManagerError::FreezeError(format!(\"Write failed: {e}\")))?;\n    Ok(())\n}\n\npub fn read_freeze_manifest(path: &str) -> Result<FreezeManifest> {\n    let bytes =\n        fs::read(path).map_err(|e| ManagerError::FreezeError(format!(\"Read failed: {e}\")))?;\n    serde_json::from_slice(&bytes)\n        .map_err(|e| ManagerError::FreezeError(format!(\"Invalid manifest: {e}\")))\n}\n\n// ======================================================================\n// Internal: scanning installed state\n// ======================================================================\n\nfn scan_modules(fdb_dir: &str) -> Vec<ModuleEntry> {\n    let fdb_path = Path::new(fdb_dir);\n    if !fdb_path.is_dir() {\n        return Vec::new();\n    }\n    let Ok(entries) = fs::read_dir(fdb_path) else {\n        return Vec::new();\n    };\n\n    #[derive(serde::Deserialize)]\n    struct ModuleStub {\n        name: String,\n        #[serde(default)]\n        version: Option<String>,\n    }\n\n    entries\n        .flatten()\n        .filter(|e| {\n            e.file_name()\n                .to_string_lossy()\n                .ends_with(\".module\")\n        })\n        .filter_map(|e| {\n            let bytes = fs::read(e.path()).ok()?;\n            let stub: ModuleStub = serde_json::from_slice(&bytes).ok()?;\n            let digest = Sha256::digest(&bytes);\n            let sha256: String = digest.iter().map(|b| format!(\"{b:02x}\")).collect();\n            Some(ModuleEntry {\n                name: stub.name,\n                version: stub.version,\n                sha256,\n            })\n        })\n        .collect()\n}\n\nfn scan_programs(fdb_dir: &str) -> Vec<ProgramEntry> {\n    let fdb_path = Path::new(fdb_dir);\n    if !fdb_path.is_dir() {\n        return Vec::new();\n    }\n    let Ok(entries) = fs::read_dir(fdb_path) else {\n        return Vec::new();\n    };\n    entries\n        .flatten()\n        .filter(|e| {\n            e.file_name()\n                .to_string_lossy()\n                .ends_with(\".manifest\")\n        })\n        .map(|e| {\n            let filename = e.file_name().to_string_lossy().to_string();\n            let prog_name = filename.strip_suffix(\".manifest\").unwrap_or(&filename);\n            let commands = parse_manifest_commands(&e.path());\n            ProgramEntry {\n                name: prog_name.to_string(),\n                commands,\n            }\n        })\n        .collect()\n}\n\nfn parse_manifest_commands(path: &Path) -> Vec<String> {\n    let Ok(bytes) = fs::read(path) else {\n        return Vec::new();\n    };\n    #[derive(serde::Deserialize)]\n    struct ManifestStub {\n        #[serde(default)]\n        commands: Vec<ManifestStubCmd>,\n    }\n    #[derive(serde::Deserialize)]\n    struct ManifestStubCmd {\n        name: String,\n    }\n    match serde_json::from_slice::<ManifestStub>(&bytes) {\n        Ok(stub) => stub.commands.into_iter().map(|c| c.name).collect(),\n        Err(_) => Vec::new(),\n    }\n}\n\n/// Walk a directory tree and verify every file is readable by the current user.\nfn check_readable_recursive(dir: &Path) -> Result<()> {\n    if !dir.is_dir() {\n        return Ok(());\n    }\n    let entries = fs::read_dir(dir).map_err(|e| {\n        ManagerError::FreezeError(format!(\"Cannot read directory {}: {e}\", dir.display()))\n    })?;\n    for entry in entries {\n        let entry = entry.map_err(|e| {\n            ManagerError::FreezeError(format!(\n                \"Cannot read entry in {}: {e}\",\n                dir.display()\n            ))\n        })?;\n        let path = entry.path();\n        if path.is_dir() {\n            check_readable_recursive(&path)?;\n        } else if fs::File::open(&path).is_err() {\n            return Err(ManagerError::FreezeError(format!(\n                \"Unreadable file: {}. Fix permissions or remove before freezing.\",\n                path.display()\n            )));\n        }\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/main.rs",
    "content": "mod config;\nmod container;\nmod doctor;\nmod environment;\nmod error;\nmod freeze;\nmod selinux;\nmod serve;\nmod types;\n\nuse std::collections::HashSet;\nuse std::fs;\nuse std::io::{self, IsTerminal, Write};\nuse std::process::{Command, ExitCode, Stdio};\n\nuse clap::builder::styling::Style;\nuse clap::{CommandFactory, FromArgMatches, Parser, Subcommand, ValueEnum};\n\nuse crate::config as cfg;\nuse crate::container::{container_run_passthrough, RunConfig};\nuse crate::error::{ManagerError, Result};\nuse crate::selinux::{detect_selinux, volume_suffix, SELinuxMode};\nuse crate::types::*;\n\n// ======================================================================\n// CLI types\n// ======================================================================\n\nfn build_help_template() -> String {\n    let b = Style::new().bold().render();\n    let bu = Style::new().bold().underline().render();\n    let r = \"\\x1b[0m\"; // full ANSI reset\n\n    format!(\n        \"\\\n{{name}} - {{about}}\n\n{{usage-heading}} {{usage}}\n\n{bu}Development{r}\n  {b}setup{r}      Configure the default container engine\n  {b}new{r}        Build a new morloc environment\n  {b}run{r}        Run a command in the active environment\n  {b}rm{r}         Remove a morloc environment\n  {b}ls{r}         List morloc environments\n  {b}info{r}       Show configuration and installed environments\n  {b}select{r}     Select an environment\n  {b}update{r}     Rebuild an environment\n  {b}nuke{r}       Remove all morloc environments\n\n{bu}Deployment{r}\n  {b}start{r}      Serve an environment over the network\n  {b}stop{r}       Stop a running serve container\n  {b}logs{r}       Stream logs from a running serve container\n  {b}freeze{r}     Export installed state as a frozen artifact\n  {b}unfreeze{r}   Build a portable serve image from frozen state\n  {b}status{r}     List running serve containers\n  {b}doctor{r}     Check environment health and diagnose issues\n\n{bu}Options{r}\n{{options}}\"\n    )\n}\n\n#[derive(Parser)]\n#[command(name = \"morloc-manager\")]\n#[command(about = \"container lifecycle manager for Morloc\")]\n#[command(long_about = \"Manage containerized Morloc installations, dependency layers, and deployments\")]\n#[command(disable_version_flag = true)]\n#[command(arg_required_else_help = true)]\n#[command(hide_possible_values = true)]\nstruct Cli {\n    /// Print container commands to stderr before executing\n    #[arg(short, long, global = true)]\n    verbose: bool,\n\n    /// Output machine-readable JSON instead of human-readable text\n    #[arg(long, global = true)]\n    json: bool,\n\n    /// Print version and exit\n    #[arg(long)]\n    version: bool,\n\n    #[command(subcommand)]\n    command: Option<Cmd>,\n}\n\n#[derive(Subcommand)]\nenum Cmd {\n    // -- Development --\n    /// Configure the default container engine\n    #[command(display_order = 0)]\n    #[command(after_help = \"Examples:\\n  morloc-manager setup --engine podman\\n  morloc-manager setup --engine docker\\n  sudo morloc-manager setup --engine podman --system\")]\n    Setup {\n        /// Container engine: podman or docker\n        #[arg(long, value_enum)]\n        engine: Option<EngineArg>,\n        /// Apply to system scope (requires root)\n        #[arg(long)]\n        system: bool,\n    },\n    /// Build a new morloc environment\n    #[command(display_order = 1)]\n    #[command(after_help = \"Examples:\\n  morloc-manager new\\n  morloc-manager new myenv --version 0.73.0\\n  morloc-manager new myenv --tag edge\\n  morloc-manager new myenv --image ubuntu:22.04 --dockerfile ./Dockerfile\\n\\nDefault (when --version, --tag, and --image are all omitted): pulls the\\n:edge tag from the morloc registry and records the resolved version.\\n\\nIn non-interactive mode (no TTY), if no name is given, the latest edge\\nimage is pulled and the environment is named after the detected morloc\\nversion.\")]\n    New {\n        /// Environment name (default: derived from base image version)\n        name: Option<String>,\n        /// Base image from Docker Hub or a registry\n        #[arg(long)]\n        image: Option<String>,\n        /// Morloc version (MAJOR.MINOR.PATCH, leading 'v' stripped automatically)\n        #[arg(long)]\n        version: Option<String>,\n        /// Container image tag (e.g., 'edge', 'nightly')\n        #[arg(long, conflicts_with_all = [\"version\", \"image\"])]\n        tag: Option<String>,\n        /// Dockerfile to layer on top of the base image\n        #[arg(long)]\n        dockerfile: Option<String>,\n        /// Generate a stub Dockerfile for customization\n        #[arg(long)]\n        dockerfile_stub: bool,\n        /// Force overwrite of existing Dockerfile stub\n        #[arg(long)]\n        force: bool,\n        /// Include file/dir in build context; use src:dest for explicit placement (repeatable)\n        #[arg(short = 'i', long = \"include\")]\n        include: Vec<String>,\n        /// Path to a file with one engine argument per line\n        #[arg(long)]\n        flagfile: Option<String>,\n        /// A single engine flag (may be repeated)\n        #[arg(short = 'x', long = \"engine-arg\", allow_hyphen_values = true)]\n        engine_arg: Vec<String>,\n        /// Container engine: podman or docker\n        #[arg(long, value_enum)]\n        engine: Option<EngineArg>,\n        /// Shared memory size (default: 512m)\n        #[arg(long)]\n        shm_size: Option<String>,\n        /// Create in system scope (requires root)\n        #[arg(long)]\n        system: bool,\n        /// Skip morloc init after creation\n        #[arg(long)]\n        no_init: bool,\n        /// Skip interactive wizard, use defaults for unspecified options\n        #[arg(long)]\n        non_interactive: bool,\n    },\n    /// Run a command in the active environment\n    #[command(display_order = 2)]\n    #[command(after_help = \"\\\nExamples:\n  morloc-manager run -- morloc --version\n  morloc-manager run -- morloc make -o svc svc.loc\n  morloc-manager run -- morloc install math\n  morloc-manager run --shell\n\nUse -- to separate morloc-manager flags from the container command.\nWithout --, flags like --version are interpreted by morloc-manager itself.\")]\n    Run {\n        /// Command to run inside the container\n        command: Vec<String>,\n        /// Start an interactive shell\n        #[arg(long)]\n        shell: bool,\n        /// Pass environment variable to the container (KEY=VALUE)\n        #[arg(short, long = \"env\")]\n        env_vars: Vec<String>,\n        /// Read environment variables from a file (one KEY=VALUE per line)\n        #[arg(long)]\n        env_file: Option<String>,\n    },\n    /// Remove a morloc environment\n    #[command(display_order = 3)]\n    #[command(after_help = \"Examples:\\n  morloc-manager rm myenv\\n  sudo morloc-manager rm myenv --system\")]\n    Rm {\n        /// Environment name(s) to remove\n        names: Vec<String>,\n        /// Remove from system scope (requires root)\n        #[arg(long)]\n        system: bool,\n        /// Remove even if active (deactivates first)\n        #[arg(long)]\n        force: bool,\n    },\n    /// Remove all morloc environments\n    #[command(display_order = 8)]\n    #[command(after_help = \"Examples:\\n  morloc-manager nuke\\n  morloc-manager nuke --yes\\n  morloc-manager nuke --images\\n  sudo morloc-manager nuke --system\\n  sudo morloc-manager nuke --system --images --yes\")]\n    Nuke {\n        /// Remove system-scope environments instead of local (requires root)\n        #[arg(long)]\n        system: bool,\n        /// Also remove base container images\n        #[arg(long)]\n        images: bool,\n        /// Skip confirmation prompt\n        #[arg(long)]\n        yes: bool,\n    },\n    /// List morloc environments\n    #[command(display_order = 4)]\n    #[command(after_help = \"Examples:\\n  morloc-manager ls\\n  morloc-manager ls --system\")]\n    Ls {\n        /// Show only system environments\n        #[arg(long)]\n        system: bool,\n        /// Show only local environments\n        #[arg(long)]\n        local: bool,\n    },\n    /// Show configuration and installed environments\n    #[command(display_order = 5)]\n    #[command(after_help = \"Examples:\\n  morloc-manager info\\n  morloc-manager info myenv\")]\n    Info {\n        /// Environment name (show details for this environment)\n        name: Option<String>,\n        /// Look up the system-scope environment (when name is shadowed locally)\n        #[arg(long)]\n        system: bool,\n    },\n    /// Select an environment\n    #[command(display_order = 6)]\n    #[command(after_help = \"Examples:\\n  morloc-manager select myenv\\n  sudo morloc-manager select myenv --system\")]\n    Select {\n        /// Environment name\n        name: String,\n        /// Write to system config instead of local (requires root)\n        #[arg(long)]\n        system: bool,\n    },\n\n    /// Rebuild an environment\n    #[command(display_order = 7)]\n    #[command(after_help = \"Examples:\\n  morloc-manager update              # rebuild active environment\\n  morloc-manager update myenv        # rebuild a specific environment\\n  morloc-manager update --shm-size 1g\\n  morloc-manager update --dockerfile ./new.Dockerfile -i ./data\\n  morloc-manager update myenv --reinit  # re-run morloc init in myenv\")]\n    Update {\n        /// Environment name (default: active environment)\n        name: Option<String>,\n        /// Change the base image\n        #[arg(long)]\n        image: Option<String>,\n        /// Change to a specific morloc version (MAJOR.MINOR.PATCH, leading 'v' stripped)\n        #[arg(long)]\n        version: Option<String>,\n        /// Container image tag (e.g., 'edge', 'nightly')\n        #[arg(long, conflicts_with_all = [\"version\", \"image\"])]\n        tag: Option<String>,\n        /// Replace the Dockerfile\n        #[arg(long)]\n        dockerfile: Option<String>,\n        /// Include file/dir in build context; use src:dest for explicit placement (repeatable)\n        #[arg(short = 'i', long = \"include\")]\n        include: Vec<String>,\n        /// Replace the flags file\n        #[arg(long)]\n        flagfile: Option<String>,\n        /// Add an engine flag (repeatable; appends unless --flagfile replaces)\n        #[arg(short = 'x', long = \"engine-arg\", allow_hyphen_values = true)]\n        engine_arg: Vec<String>,\n        /// Change the container engine\n        #[arg(long, value_enum)]\n        engine: Option<EngineArg>,\n        /// Change shared memory size\n        #[arg(long)]\n        shm_size: Option<String>,\n        /// Generate a stub Dockerfile (fails if one already exists)\n        #[arg(long)]\n        dockerfile_stub: bool,\n        /// Force overwrite of existing Dockerfile stub\n        #[arg(long)]\n        force: bool,\n        /// Skip Dockerfile build\n        #[arg(long)]\n        no_build: bool,\n        /// Re-run morloc init\n        #[arg(long)]\n        reinit: bool,\n        /// Accepted for scripting uniformity with `new` (no effect)\n        #[arg(long, hide = true)]\n        non_interactive: bool,\n    },\n\n    // -- Deployment --\n    /// Serve an environment over the network\n    #[command(display_order = 20)]\n    #[command(after_help = \"Examples:\\n  morloc-manager start              # serve active environment\\n  morloc-manager start myenv -p 9090:8080\")]\n    Start {\n        /// Environment name (default: active environment)\n        name: Option<String>,\n        /// Port mapping HOST:CONTAINER (default: 8080:8080)\n        #[arg(short, long, value_parser = parse_port)]\n        port: Vec<(u16, u16)>,\n        /// Pass environment variable to the container (KEY=VALUE)\n        #[arg(short, long = \"env\")]\n        env_vars: Vec<String>,\n        /// Read environment variables from a file (one KEY=VALUE per line)\n        #[arg(long)]\n        env_file: Option<String>,\n        /// Replace an already-running serve container\n        #[arg(long)]\n        force: bool,\n    },\n    /// Stop a running serve container\n    #[command(display_order = 21)]\n    #[command(after_help = \"Examples:\\n  morloc-manager stop              # stop active environment\\n  morloc-manager stop myenv\")]\n    Stop {\n        /// Environment name (default: active environment)\n        name: Option<String>,\n    },\n    /// Stream logs from a running serve container\n    #[command(display_order = 22)]\n    #[command(after_help = \"Examples:\\n  morloc-manager logs              # logs from only running serve container\\n  morloc-manager logs myenv\\n  morloc-manager logs -f myenv     # follow mode\")]\n    Logs {\n        /// Environment name (default: auto-detect running container)\n        name: Option<String>,\n        /// Follow log output\n        #[arg(short, long)]\n        follow: bool,\n    },\n    /// Export installed state as a frozen artifact\n    #[command(display_order = 23)]\n    #[command(after_help = \"Examples:\\n  morloc-manager freeze\\n  morloc-manager freeze myenv\\n  morloc-manager freeze -o ./my-freeze\\n\\nRequires at least one program compiled with 'morloc make --install'.\")]\n    Freeze {\n        /// Environment name (default: active environment)\n        name: Option<String>,\n        /// Output directory (default: ./morloc-freeze)\n        #[arg(short, long)]\n        output: Option<String>,\n        /// Overwrite existing output directory\n        #[arg(long)]\n        force: bool,\n    },\n    /// Build a serve image from frozen state\n    #[command(display_order = 24)]\n    #[command(after_help = \"Examples:\\n  morloc-manager unfreeze --from ./morloc-freeze/state.tar.gz -t myservice:v1\\n  morloc-manager unfreeze --from ./state.tar.gz -t svc:v1 --engine docker\")]\n    Unfreeze {\n        /// Path to state.tar.gz from freeze\n        #[arg(long)]\n        from: String,\n        /// Image tag\n        #[arg(short, long)]\n        tag: String,\n        /// Base image override\n        #[arg(long)]\n        base: Option<String>,\n        /// Container engine override (default: configured engine).\n        /// Images frozen with engine-specific flags may not work with a different engine.\n        #[arg(long, value_enum)]\n        engine: Option<EngineArg>,\n        /// Rebuild image even if it already exists locally\n        #[arg(long)]\n        rebuild: bool,\n    },\n    /// Evaluate a morloc expression against a running serve container\n    #[command(display_order = 25)]\n    #[command(after_help = \"Examples:\\n  morloc-manager eval 'add 1 2'\\n  morloc-manager eval myenv 'map (add 1) [1,2,3]'\\n  morloc-manager eval -p 9090 'greet \\\"world\\\"'\")]\n    Eval {\n        /// Expression to evaluate (or environment name if two positional args)\n        first: String,\n        /// Expression to evaluate (when first arg is environment name)\n        second: Option<String>,\n        /// Port of the serve container (default: 8080)\n        #[arg(short, long, default_value = \"8080\")]\n        port: u16,\n    },\n    /// List running serve containers\n    #[command(display_order = 26)]\n    #[command(after_help = \"Examples:\\n  morloc-manager status\")]\n    Status,\n    /// Check environment health and diagnose issues\n    #[command(display_order = 26)]\n    #[command(after_help = \"Examples:\\n  morloc-manager doctor\\n  morloc-manager doctor myenv\\n  morloc-manager doctor --deep\")]\n    Doctor {\n        /// Environment name (default: active)\n        name: Option<String>,\n        /// Check system-scope environment\n        #[arg(long)]\n        system: bool,\n        /// Run checks inside the container (slower, more thorough)\n        #[arg(long)]\n        deep: bool,\n        /// Treat warnings as errors (non-zero exit on warnings)\n        #[arg(long)]\n        strict: bool,\n    },\n}\n\n#[derive(Clone, ValueEnum)]\nenum EngineArg {\n    Docker,\n    Podman,\n}\n\nimpl From<EngineArg> for ContainerEngine {\n    fn from(e: EngineArg) -> Self {\n        match e {\n            EngineArg::Docker => ContainerEngine::Docker,\n            EngineArg::Podman => ContainerEngine::Podman,\n        }\n    }\n}\n\nfn parse_port(s: &str) -> std::result::Result<(u16, u16), String> {\n    let parts: Vec<&str> = s.splitn(2, ':').collect();\n    if parts.len() != 2 {\n        return Err(format!(\"Expected HOST:CONTAINER format, got: {s}\"));\n    }\n    let host: u16 = parts[0]\n        .parse()\n        .map_err(|_| format!(\"Invalid host port: {}\", parts[0]))?;\n    let container: u16 = parts[1]\n        .parse()\n        .map_err(|_| format!(\"Invalid container port: {}\", parts[1]))?;\n    Ok((host, container))\n}\n\n/// Parse env vars from --env flags and --env-file, returning (key, value) pairs.\nfn collect_env_vars(\n    env_flags: &[String],\n    env_file: Option<&str>,\n) -> Result<Vec<(String, String)>> {\n    let mut result = Vec::new();\n\n    if let Some(path) = env_file {\n        let contents = std::fs::read_to_string(path).map_err(|e| {\n            ManagerError::EnvError(format!(\"Cannot read env file {path}: {e}\"))\n        })?;\n        for line in contents.lines() {\n            let trimmed = line.trim();\n            if trimmed.is_empty() || trimmed.starts_with('#') {\n                continue;\n            }\n            if let Some((k, v)) = trimmed.split_once('=') {\n                result.push((k.to_string(), v.to_string()));\n            }\n        }\n    }\n\n    for entry in env_flags {\n        if let Some((k, v)) = entry.split_once('=') {\n            result.push((k.to_string(), v.to_string()));\n        } else {\n            // Bare key — pass through from host environment\n            if let Ok(v) = std::env::var(entry) {\n                result.push((entry.clone(), v));\n            } else {\n                eprintln!(\"Warning: env var '{entry}' not set in host environment, skipping\");\n            }\n        }\n    }\n\n    Ok(result)\n}\n\n// ======================================================================\n// Main\n// ======================================================================\n\nfn main() -> ExitCode {\n    #[cfg(unix)]\n    {\n        use nix::sys::signal::{signal, SigHandler, Signal};\n        unsafe { let _ = signal(Signal::SIGPIPE, SigHandler::SigDfl); }\n    }\n\n    let matches = match Cli::command()\n        .help_template(build_help_template())\n        .try_get_matches()\n    {\n        Ok(m) => m,\n        Err(e) => {\n            // Detect missing -- separator for the run subcommand\n            let rendered = e.to_string();\n            if rendered.contains(\"unrecognized\") || rendered.contains(\"unexpected\") {\n                let args: Vec<String> = std::env::args().collect();\n                if args.len() > 1 && args[1] == \"run\" {\n                    let inner: Vec<&str> = args[2..].iter()\n                        .filter(|a| *a != \"--shell\")\n                        .map(|a| a.as_str())\n                        .collect();\n                    if !inner.is_empty() {\n                        eprintln!(\"Error: unrecognized arguments for 'run'.\");\n                        eprintln!();\n                        eprintln!(\"Use -- to separate morloc-manager flags from the container command:\");\n                        eprintln!(\"  morloc-manager run -- {}\", inner.join(\" \"));\n                        return ExitCode::from(2);\n                    }\n                }\n            }\n            e.exit();\n        }\n    };\n    let cli = Cli::from_arg_matches(&matches).unwrap();\n    if cli.version {\n        println!(\"morloc-manager {}\", env!(\"CARGO_PKG_VERSION\"));\n        return ExitCode::SUCCESS;\n    }\n    let Some(cmd) = cli.command else {\n        Cli::command()\n            .help_template(build_help_template())\n            .print_help()\n            .ok();\n        return ExitCode::from(2);\n    };\n    match dispatch(cli.verbose, cli.json, cmd) {\n        Ok(()) => ExitCode::SUCCESS,\n        Err(err) => {\n            if cli.json {\n                println!(\"{}\", serde_json::json!({\"error\": format!(\"{err}\")}));\n            } else {\n                eprintln!(\"{err}\");\n            }\n            if let ManagerError::EngineError { code, .. } = &err {\n                ExitCode::from(*code as u8)\n            } else {\n                ExitCode::FAILURE\n            }\n        }\n    }\n}\n\nfn resolve_scope(system: bool) -> Scope {\n    if system { Scope::System } else { Scope::Local }\n}\n\nfn check_system_write_access() -> Result<()> {\n    let sys_dir = cfg::config_dir(Scope::System);\n    if sys_dir.exists() {\n        let test_path = sys_dir.join(\".write-check\");\n        match fs::write(&test_path, b\"\") {\n            Ok(_) => { let _ = fs::remove_file(&test_path); Ok(()) }\n            Err(_) => Err(ManagerError::ConfigPermissionDenied(format!(\n                \"{}. System-scope operations require root. Re-run with sudo\",\n                sys_dir.display()\n            )))\n        }\n    } else {\n        match fs::create_dir_all(&sys_dir) {\n            Ok(_) => Ok(()),\n            Err(_) => Err(ManagerError::ConfigPermissionDenied(format!(\n                \"{}. System-scope operations require root. Re-run with sudo\",\n                sys_dir.display()\n            )))\n        }\n    }\n}\n\n/// Resolve an environment by explicit name or fall back to the active environment.\nfn resolve_env_or_active(name: Option<String>) -> Result<(String, Scope, EnvironmentConfig)> {\n    match name {\n        Some(n) => {\n            let scope = cfg::find_env_scope(&n)?;\n            let ec = cfg::read_env_config(scope, &n)?;\n            Ok((n, scope, ec))\n        }\n        None => environment::resolve_active_environment(),\n    }\n}\n\nfn ensure_engine() -> Result<ContainerEngine> {\n    if let Some(cfg) = cfg::read_active_config() {\n        return Ok(cfg.engine);\n    }\n    Err(ManagerError::SetupNotComplete(Scope::Local))\n}\n\nfn which(name: &str) -> bool {\n    Command::new(\"which\")\n        .arg(name)\n        .stdout(Stdio::null())\n        .stderr(Stdio::null())\n        .status()\n        .map(|s| s.success())\n        .unwrap_or(false)\n}\n\nfn display_engine(engine: ContainerEngine) -> &'static str {\n    match engine {\n        ContainerEngine::Docker => \"docker\",\n        ContainerEngine::Podman => \"podman\",\n    }\n}\n\nfn bold_green(msg: &str) -> String {\n    if io::stderr().is_terminal() {\n        format!(\"\\x1b[1;32m{msg}\\x1b[0m\")\n    } else {\n        msg.to_string()\n    }\n}\n\nfn check_docker_socket(engine: ContainerEngine) {\n    use std::path::Path;\n    if engine != ContainerEngine::Docker {\n        return;\n    }\n    let socket = Path::new(\"/var/run/docker.sock\");\n    if !socket.exists() {\n        eprintln!(\"Warning: Docker socket not found at /var/run/docker.sock\");\n        eprintln!(\"  Docker may not be installed or the daemon may not be running.\");\n    } else if nix::unistd::access(socket, nix::unistd::AccessFlags::R_OK).is_err() {\n        eprintln!(\"Warning: Cannot access Docker socket. You may need to:\");\n        eprintln!(\"  sudo usermod -aG docker $USER  # then log out and back in\");\n    }\n}\n\n/// Returns Err with a clear message if Docker is selected but its socket is unreachable.\nfn require_docker_socket(engine: ContainerEngine) -> Result<()> {\n    use std::path::Path;\n    if engine != ContainerEngine::Docker {\n        return Ok(());\n    }\n    let socket = Path::new(\"/var/run/docker.sock\");\n    if !socket.exists() {\n        return Err(ManagerError::EnvError(\n            \"Docker socket not found at /var/run/docker.sock. Ensure Docker is installed and the daemon is running.\".to_string()\n        ));\n    }\n    if nix::unistd::access(socket, nix::unistd::AccessFlags::R_OK).is_err() {\n        return Err(ManagerError::EnvError(\n            \"Cannot access Docker socket. Add your user to the docker group:\\n  \\\n             sudo usermod -aG docker $USER  # then log out and back in\".to_string()\n        ));\n    }\n    Ok(())\n}\n\n/// Check if Podman is configured to see rootful images from rootless contexts.\n/// Returns true if additionalimagestore is configured (or not needed).\nfn check_podman_additional_stores(engine: ContainerEngine) -> bool {\n    if engine != ContainerEngine::Podman {\n        return true;\n    }\n    // Root doesn't need additional stores — it owns the store\n    if nix::unistd::getuid().is_root() {\n        return true;\n    }\n    let rootful_store = std::path::Path::new(\"/var/lib/containers/storage\");\n    if !rootful_store.is_dir() {\n        // No rootful store exists, nothing to configure\n        return true;\n    }\n    // Check system and user storage.conf for additionalimagestores\n    for path in &[\n        \"/etc/containers/storage.conf\",\n        &format!(\n            \"{}/.config/containers/storage.conf\",\n            dirs::home_dir()\n                .unwrap_or_default()\n                .to_string_lossy()\n        ),\n    ] {\n        if let Ok(contents) = fs::read_to_string(path) {\n            if contents.contains(\"/var/lib/containers/storage\") {\n                return true;\n            }\n        }\n    }\n    false\n}\n\nfn warn_podman_additional_stores() {\n    eprintln!(\"Warning: Podman is not configured to see system (rootful) images.\");\n    eprintln!(\"  Non-root users will not be able to run system environments.\");\n    eprintln!(\"  Option 1 (recommended): Use Docker for system environments.\");\n    eprintln!(\"  Option 2: Add to [storage.options] in /etc/containers/storage.conf:\");\n    eprintln!();\n    eprintln!(\"    additionalimagestores = [\\\"/var/lib/containers/storage\\\"]\");\n    eprintln!();\n    eprintln!(\"  Note: Option 2 may cause storage locking conflicts on Fedora and Debian.\");\n}\n\n// ======================================================================\n// Dispatch\n// ======================================================================\n\nfn dispatch(verbose: bool, json: bool, cmd: Cmd) -> Result<()> {\n    match cmd {\n        // ---- setup ----\n        Cmd::Setup { engine, system } => {\n            // With no --engine, show the current engine settings\n            if engine.is_none() {\n                let local = cfg::read_config::<Config>(&cfg::config_path(Scope::Local)).ok();\n                let sys = cfg::read_config::<Config>(&cfg::config_path(Scope::System)).ok();\n                println!(\"Local engine:   {}\",\n                    local.as_ref().map(|c| display_engine(c.engine)).unwrap_or(\"unset\"));\n                println!(\"System engine:  {}\",\n                    sys.as_ref().map(|c| display_engine(c.engine)).unwrap_or(\"unset\"));\n                println!();\n                println!(\"Set with: morloc-manager setup --engine <podman|docker>\");\n                return Ok(());\n            }\n            if system { check_system_write_access()?; }\n            let scope = resolve_scope(system);\n            let eng: ContainerEngine = engine.unwrap().into();\n            check_docker_socket(eng);\n            let cfg_path = cfg::config_path(scope);\n            let base_cfg = cfg::read_config::<Config>(&cfg_path).unwrap_or_default();\n            let new_cfg = Config {\n                engine: eng,\n                ..base_cfg\n            };\n            cfg::write_config(&cfg_path, &new_cfg)?;\n            eprintln!(\"Engine set to: {}\", display_engine(eng));\n            Ok(())\n        }\n\n        // ---- new ----\n        Cmd::New {\n            name,\n            image,\n            version,\n            tag,\n            dockerfile,\n            dockerfile_stub,\n            force,\n            include,\n            flagfile,\n            engine_arg,\n            engine,\n            shm_size,\n            system,\n            no_init,\n            non_interactive,\n        } => {\n            if system { check_system_write_access()?; }\n            let scope = resolve_scope(system);\n\n            // Resolve engine: explicit flag > config default > auto-detect single > error\n            // For --system, prefer system config so the env uses the system engine.\n            let resolved_engine = if let Some(e) = engine {\n                let eng: ContainerEngine = e.into();\n                check_docker_socket(eng);\n                eng\n            } else if let Some(cfg) = if system {\n                // System scope: check system config first, then local\n                cfg::read_config::<Config>(&cfg::config_path(Scope::System)).ok()\n                    .or_else(|| cfg::read_active_config())\n            } else {\n                cfg::read_active_config()\n            } {\n                cfg.engine\n            } else {\n                // No config — try auto-detection\n                let has_podman = which(\"podman\");\n                let has_docker = which(\"docker\");\n                match (has_podman, has_docker) {\n                    (true, false) => ContainerEngine::Podman,\n                    (false, true) => {\n                        check_docker_socket(ContainerEngine::Docker);\n                        ContainerEngine::Docker\n                    }\n                    (true, true) => {\n                        let scope_flag = if system { \" --system\" } else { \"\" };\n                        return Err(ManagerError::EnvError(format!(\n                            \"Both podman and docker are installed and no default is set.\\n\\\n                             Pick one with:\\n  \\\n                             morloc-manager setup --engine podman{scope_flag}\\n  \\\n                             morloc-manager setup --engine docker{scope_flag}\\n\\\n                             Or pass --engine to this command directly.\"\n                        )));\n                    }\n                    (false, false) => return Err(ManagerError::EngineNotFound),\n                }\n            };\n\n            // Ensure config exists (write default if first run)\n            if cfg::read_active_config().is_none() {\n                let cfg_path = cfg::config_path(scope);\n                let new_cfg = Config {\n                    active_env: None,\n                    engine: resolved_engine,\n                };\n                cfg::write_config(&cfg_path, &new_cfg)?;\n            }\n\n            let interactive = !non_interactive && io::stdin().is_terminal();\n            if !non_interactive && !interactive {\n                eprintln!(\"Note: No TTY detected, running in non-interactive mode.\");\n            }\n\n            // Step 1: Resolve name (ask first so user isn't surprised after a long pull)\n            let env_name = if let Some(n) = name {\n                if cfg::env_config_path(scope, &n).is_file() {\n                    return Err(ManagerError::EnvError(format!(\n                        \"Environment '{n}' already exists\"\n                    )));\n                }\n                n\n            } else if interactive {\n                loop {\n                    eprint!(\"Environment name: \");\n                    io::stderr().flush().ok();\n                    let mut name_input = String::new();\n                    io::stdin().read_line(&mut name_input).ok();\n                    let n = name_input.trim().to_string();\n                    if n.is_empty() {\n                        eprintln!(\"Name cannot be empty.\");\n                        continue;\n                    }\n                    if cfg::env_config_path(scope, &n).is_file() {\n                        eprintln!(\"Environment '{n}' already exists. Choose a different name.\");\n                        continue;\n                    }\n                    break n;\n                }\n            } else {\n                // Non-interactive without a name: will be filled in after\n                // version resolution below (default to version string)\n                String::new()\n            };\n\n            // Validate name early (before potentially slow image pull)\n            if !env_name.is_empty() {\n                environment::validate_env_name(&env_name)?;\n            }\n\n            if version.is_some() && image.is_some() {\n                return Err(ManagerError::EnvError(\n                    \"--version and --image are mutually exclusive\".to_string()\n                ));\n            }\n\n            // Validate cheap-to-check parameters before any I/O\n            if let Some(ref shm) = shm_size {\n                if !environment::is_valid_shm_size(shm) {\n                    return Err(ManagerError::EnvError(format!(\n                        \"Invalid --shm-size '{shm}'. Use format like: 512m, 1g, 2048k\"\n                    )));\n                }\n            }\n\n            // Step 2: Resolve base image and version\n            let (base_image, original_image, morloc_ver) = if let Some(ref ver_str) = version {\n                // Strip leading 'v' for convenience (e.g., \"v0.77.0\" -> \"0.77.0\")\n                let clean = ver_str.strip_prefix('v').unwrap_or(ver_str);\n                let ver: Version = clean.parse().map_err(|_| {\n                    ManagerError::InvalidVersion(ver_str.clone())\n                })?;\n                let img = environment::pull_version_image(resolved_engine, &ver)?;\n                (img, None, Some(ver))\n            } else if let Some(ref t) = tag {\n                let (img, ver) = environment::pull_tagged_image(resolved_engine, t)?;\n                (img, None, Some(ver))\n            } else if let Some(ref img) = image {\n                environment::pull_custom_image(resolved_engine, img)?;\n                (img.clone(), None, None)\n            } else if interactive {\n                eprintln!(\"Choose a base image:\");\n                eprintln!(\"  [1] Latest morloc release (recommended)\");\n                eprintln!(\"  [2] Specific morloc version\");\n                eprintln!(\"  [3] Custom image\");\n                eprint!(\"Choose [1]: \");\n                io::stderr().flush().ok();\n                let mut input = String::new();\n                io::stdin().read_line(&mut input).ok();\n                match input.trim() {\n                    \"2\" => {\n                        eprint!(\"Morloc version: \");\n                        io::stderr().flush().ok();\n                        let mut ver_input = String::new();\n                        io::stdin().read_line(&mut ver_input).ok();\n                        let ver: Version = ver_input.trim().parse().map_err(|_| {\n                            ManagerError::InvalidVersion(ver_input.trim().to_string())\n                        })?;\n                        let img = environment::pull_version_image(resolved_engine, &ver)?;\n                        (img, None, Some(ver))\n                    }\n                    \"3\" => {\n                        eprint!(\"Image reference: \");\n                        io::stderr().flush().ok();\n                        let mut img_input = String::new();\n                        io::stdin().read_line(&mut img_input).ok();\n                        let img = img_input.trim().to_string();\n                        if img.is_empty() {\n                            return Err(ManagerError::EnvError(\"No image specified\".to_string()));\n                        }\n                        environment::pull_custom_image(resolved_engine, &img)?;\n                        (img, None, None)\n                    }\n                    _ => {\n                        let (img, ver) = environment::resolve_latest(resolved_engine)?;\n                        (img.clone(), Some(img), Some(ver))\n                    }\n                }\n            } else {\n                let (img, ver) = environment::resolve_latest(resolved_engine)?;\n                (img.clone(), Some(img), Some(ver))\n            };\n\n            // Fill in name for non-interactive mode if it wasn't provided\n            let env_name = if env_name.is_empty() {\n                if let Some(ref ver) = morloc_ver {\n                    let default_name = ver.show();\n                    if cfg::env_config_path(scope, &default_name).is_file() {\n                        return Err(ManagerError::EnvError(format!(\n                            \"Environment '{}' already exists. Specify a different name: morloc-manager new <NAME> ...\",\n                            default_name\n                        )));\n                    }\n                    default_name\n                } else {\n                    return Err(ManagerError::EnvError(\n                        \"Environment name required in non-interactive mode\".to_string(),\n                    ));\n                }\n            } else {\n                env_name\n            };\n\n            // Resolve dockerfile: explicit path takes precedence, then stub generation\n            let resolved_dockerfile = if dockerfile.is_some() {\n                if dockerfile_stub {\n                    return Err(ManagerError::EnvError(\n                        \"Cannot use both --dockerfile and --dockerfile-stub\".to_string(),\n                    ));\n                }\n                dockerfile\n            } else if dockerfile_stub {\n                let df_path = cfg::env_dockerfile_path(scope, &env_name);\n                if df_path.exists() && !force {\n                    return Err(ManagerError::EnvError(format!(\n                        \"Dockerfile already exists: {}\\nUse --force to overwrite.\",\n                        df_path.display()\n                    )));\n                }\n                let stub_dir = cfg::data_dir(scope).join(\"tmp\");\n                fs::create_dir_all(&stub_dir).map_err(|e| {\n                    ManagerError::EnvError(format!(\"Failed to create tmp dir: {e}\"))\n                })?;\n                let stub_path = stub_dir.join(format!(\"{env_name}.Dockerfile\"));\n                let stub_content = format!(\n                    \"# morloc environment: {env_name}\\n\\\n                     # Edit this file, then rebuild with: morloc-manager update\\n\\\n                     \\n\\\n                     # CONTAINER_BASE is replaced at build time with the environment's base image\\n\\\n                     ARG CONTAINER_BASE=scratch\\n\\\n                     FROM ${{CONTAINER_BASE}}\\n\\\n                     \\n\\\n                     # Example: install system packages\\n\\\n                     # RUN apt-get update && apt-get install -y jq && rm -rf /var/lib/apt/lists/*\\n\\\n                     \\n\\\n                     # Example: install Python packages\\n\\\n                     # RUN pip install scikit-learn pandas\\n\\\n                     \\n\\\n                     # Example: install R packages\\n\\\n                     # RUN R -e \\\"install.packages('ggplot2', repos='https://cloud.r-project.org')\\\"\\n\"\n                );\n                fs::write(&stub_path, &stub_content).map_err(|e| {\n                    ManagerError::EnvError(format!(\"Failed to write stub Dockerfile: {e}\"))\n                })?;\n                Some(stub_path.to_string_lossy().to_string())\n            } else {\n                None\n            };\n\n            let opts = environment::ApplyOptions {\n                name: env_name.clone(),\n                scope,\n                is_new: true,\n                base_image: Some(base_image),\n                original_image,\n                morloc_version: morloc_ver,\n                dockerfile: resolved_dockerfile,\n                includes: include,\n                flagfile,\n                engine_args: engine_arg,\n                engine: Some(resolved_engine),\n                shm_size: Some(shm_size.unwrap_or_else(|| \"512m\".to_string())),\n                skip_dockerfile_build: dockerfile_stub,\n                verbose,\n            };\n\n            environment::apply_environment(&opts)?;\n\n            if dockerfile_stub {\n                let df_path = cfg::env_dockerfile_path(scope, &env_name);\n                eprintln!(\"Stub Dockerfile: {}\", df_path.display());\n                eprintln!(\"Edit it, then run: morloc-manager update {env_name}\");\n            }\n\n            eprintln!(\"Created environment: {env_name}\");\n\n            // Run morloc init, passing the env explicitly (no active env needed)\n            if !no_init {\n                let ec = cfg::read_env_config(scope, &env_name)?;\n                run_morloc_init_for(Some((env_name.clone(), scope, ec)), verbose)?;\n            } else {\n                eprintln!(\"Warning: --no-init was used. Run 'morloc-manager run -- morloc init -f' before building morloc programs.\");\n            }\n\n            eprintln!(\"{}\", bold_green(&format!(\"Environment '{env_name}' is ready.\")));\n            eprintln!(\"Activate it with: morloc-manager select {env_name}\");\n\n            if system && !check_podman_additional_stores(resolved_engine) {\n                eprintln!();\n                warn_podman_additional_stores();\n            }\n\n            Ok(())\n        }\n\n        // ---- run ----\n        Cmd::Run { command, shell, env_vars, env_file } => {\n            if !shell && command.is_empty() {\n                return Err(ManagerError::NoCommand);\n            }\n            let user_env = collect_env_vars(&env_vars, env_file.as_deref())?;\n            run_in_container(verbose, shell, &command, &user_env).map_err(|e| match e {\n                ManagerError::EnvironmentNotFound(msg) => ManagerError::EnvironmentNotFound(\n                    format!(\"{msg}. Run 'morloc-manager new' to create an environment\")\n                ),\n                other => other,\n            })\n        }\n\n        // ---- rm ----\n        Cmd::Rm { names, system, force } => {\n            if system { check_system_write_access()?; }\n            if names.is_empty() {\n                return Err(ManagerError::EnvError(\"No environment names specified\".to_string()));\n            }\n            // Capture current active env for post-removal feedback\n            let was_active = cfg::read_active_config().and_then(|c| c.active_env);\n            // Attempt each removal; collect failures, continue past errors\n            let mut failures: Vec<String> = Vec::new();\n            for name in &names {\n                let result: Result<()> = (|| {\n                    let scope = if system {\n                        Scope::System\n                    } else {\n                        cfg::find_env_scope(name)?\n                    };\n                    if scope == Scope::System && !system {\n                        check_system_write_access()?;\n                    }\n                    if !force {\n                        if let Some(cfg) = cfg::read_active_config() {\n                            if cfg.active_env.as_deref() == Some(name.as_str()) {\n                                return Err(ManagerError::EnvError(format!(\n                                    \"active environment (use --force)\"\n                                )));\n                            }\n                        }\n                    }\n                    let ec = cfg::read_env_config(scope, name)\n                        .map_err(|_| ManagerError::EnvironmentNotFound(name.to_string()))?;\n                    environment::remove_environment(ec.engine, scope, name)?;\n                    Ok(())\n                })();\n                match result {\n                    Ok(()) => {\n                        // Check if removed env was active and report new state\n                        if was_active.as_deref() == Some(name.as_str()) {\n                            match environment::resolve_active_environment() {\n                                Ok((new_active, _, _)) => {\n                                    // Persist the fallback as the new active environment\n                                    let _ = environment::select_environment(&new_active, Scope::Local);\n                                    eprintln!(\"Removed environment: {name}. Active environment is now: {new_active}\");\n                                }\n                                Err(_) => {\n                                    eprintln!(\"Removed environment: {name}. No active environment. Use: morloc-manager select <name>\");\n                                }\n                            }\n                        } else {\n                            eprintln!(\"Removed environment: {name}\");\n                        }\n                    }\n                    Err(e) => failures.push(format!(\"{name}: {e}\")),\n                }\n            }\n            if !failures.is_empty() {\n                eprintln!();\n                eprintln!(\"Failed to remove {} environment(s):\", failures.len());\n                for f in &failures {\n                    eprintln!(\"  {f}\");\n                }\n                return Err(ManagerError::EnvError(format!(\n                    \"{} of {} removals failed\",\n                    failures.len(),\n                    names.len()\n                )));\n            }\n            Ok(())\n        }\n\n        // ---- nuke ----\n        Cmd::Nuke { system, images, yes } => {\n            let scope = if system { Scope::System } else { Scope::Local };\n            let scope_label = if system { \"system\" } else { \"local\" };\n\n            if system {\n                check_system_write_access()?;\n            }\n\n            // Confirm before removing all environments\n            let env_names = cfg::list_env_names(scope);\n            if env_names.is_empty() {\n                eprintln!(\"No {scope_label} environments found.\");\n                return Ok(());\n            }\n\n            if !yes {\n                eprintln!(\"This will remove {} {scope_label} environment(s):\", env_names.len());\n                for n in &env_names {\n                    eprintln!(\"  {n}\");\n                }\n                if io::stdin().is_terminal() {\n                    eprint!(\"Continue? [y/N] \");\n                    io::stderr().flush().ok();\n                    let mut answer = String::new();\n                    io::stdin().read_line(&mut answer).ok();\n                    if !matches!(answer.trim(), \"y\" | \"yes\" | \"Y\" | \"YES\") {\n                        eprintln!(\"Aborted.\");\n                        return Ok(());\n                    }\n                } else {\n                    return Err(ManagerError::EnvError(\n                        \"nuke requires --yes for non-interactive use\".to_string(),\n                    ));\n                }\n            }\n\n            eprintln!(\"Removing all {scope_label} morloc environments...\");\n\n            // Collect env info before removal (configs are deleted during removal)\n            let mut env_list: Vec<(String, ContainerEngine)> = Vec::new();\n            let mut base_images: HashSet<String> = HashSet::new();\n\n            for name in cfg::list_env_names(scope) {\n                if let Ok(ec) = cfg::read_env_config(scope, &name) {\n                    if images {\n                        base_images.insert(ec.base_image.clone());\n                        if let Some(ref orig) = ec.original_image {\n                            base_images.insert(orig.clone());\n                        }\n                    }\n                    env_list.push((name, ec.engine));\n                }\n            }\n\n            if env_list.is_empty() {\n                eprintln!(\"No {scope_label} environments found.\");\n            } else {\n                let mut removed = 0usize;\n                let mut failures: Vec<String> = Vec::new();\n\n                for (name, engine) in &env_list {\n                    eprintln!(\"Removing environment: {name}...\");\n                    match environment::remove_environment(*engine, scope, name) {\n                        Ok(()) => {\n                            eprintln!(\"  Removed: {name}\");\n                            removed += 1;\n                        }\n                        Err(e) => {\n                            eprintln!(\"  Failed: {name}: {e}\");\n                            failures.push(format!(\"{name}: {e}\"));\n                        }\n                    }\n                }\n\n                // Clear active_env in the targeted scope's config\n                let cfg_path = cfg::config_path(scope);\n                if let Ok(cfg_data) = cfg::read_config::<Config>(&cfg_path) {\n                    if cfg_data.active_env.is_some() {\n                        let new_cfg = Config { active_env: None, ..cfg_data };\n                        let _ = cfg::write_config(&cfg_path, &new_cfg);\n                        eprintln!(\"Cleared active environment.\");\n                    }\n                }\n\n                eprintln!(\"Removed {removed} environment(s).\");\n\n                if !failures.is_empty() {\n                    eprintln!();\n                    eprintln!(\"Failed to remove {} environment(s):\", failures.len());\n                    for f in &failures {\n                        eprintln!(\"  {f}\");\n                    }\n                    return Err(ManagerError::EnvError(format!(\n                        \"{} of {} removals failed\",\n                        failures.len(),\n                        env_list.len()\n                    )));\n                }\n            }\n\n            // Remove base images if --images\n            if images && !base_images.is_empty() {\n                let engine = ensure_engine().unwrap_or(ContainerEngine::Docker);\n                eprintln!(\"Removing base images...\");\n                for img in &base_images {\n                    if container::image_exists_locally(engine, img) {\n                        eprintln!(\"  Removing image: {img}...\");\n                        if container::remove_image(engine, img) {\n                            eprintln!(\"  Removed: {img}\");\n                        } else {\n                            eprintln!(\"  Failed to remove: {img}\");\n                        }\n                    }\n                }\n            }\n\n            // Hint about the other scope\n            let other_scope = if system { Scope::Local } else { Scope::System };\n            let other_envs = cfg::list_env_names(other_scope);\n            if !other_envs.is_empty() {\n                if system {\n                    eprintln!(\n                        \"{} local environment(s) remain. Use: morloc-manager nuke\",\n                        other_envs.len()\n                    );\n                } else {\n                    eprintln!(\n                        \"{} system environment(s) remain. Use: sudo morloc-manager nuke --system\",\n                        other_envs.len()\n                    );\n                }\n            }\n\n            Ok(())\n        }\n\n        // ---- ls ----\n        Cmd::Ls { system, local } => {\n            let active_env = cfg::read_active_config()\n                .and_then(|c| c.active_env);\n            let active_str = active_env.as_deref();\n\n            // Determine which scope effectively owns the active environment.\n            // Local takes priority (same resolution as run/select).\n            let active_in_local = active_str\n                .map(|name| cfg::env_config_path(Scope::Local, name).is_file())\n                .unwrap_or(false);\n\n            let show_local = !system || local;\n            let show_system = !local || system;\n\n            let local_envs = if show_local {\n                let local_active = if active_in_local { active_str } else { None };\n                environment::list_environments(Scope::Local, local_active)\n            } else {\n                Vec::new()\n            };\n            let system_envs = if show_system {\n                let system_active = if active_in_local { None } else { active_str };\n                environment::list_environments(Scope::System, system_active)\n            } else {\n                Vec::new()\n            };\n\n            if json {\n                #[derive(serde::Serialize)]\n                struct LsOutput {\n                    local: Vec<environment::EnvInfo>,\n                    system: Vec<environment::EnvInfo>,\n                }\n                let output = LsOutput { local: local_envs, system: system_envs };\n                println!(\"{}\", serde_json::to_string_pretty(&output).unwrap());\n            } else {\n                let total = local_envs.len() + system_envs.len();\n                if !local_envs.is_empty() {\n                    println!(\"Local environments:\");\n                    for e in &local_envs {\n                        let active_mark = if e.active { \" (active)\" } else { \"\" };\n                        let ver_mark = e.morloc_version.as_ref()\n                            .map(|v| format!(\" [{}]\", v.show()))\n                            .unwrap_or_default();\n                        println!(\"  {}{}{}\", e.name, ver_mark, active_mark);\n                    }\n                }\n                if !system_envs.is_empty() {\n                    if !local_envs.is_empty() {\n                        println!();\n                    }\n                    println!(\"System environments:\");\n                    for e in &system_envs {\n                        let active_mark = if e.active { \" (active)\" } else { \"\" };\n                        let ver_mark = e.morloc_version.as_ref()\n                            .map(|v| format!(\" [{}]\", v.show()))\n                            .unwrap_or_default();\n                        println!(\"  {}{}{}\", e.name, ver_mark, active_mark);\n                    }\n                }\n                if total == 0 {\n                    println!(\"No environments found. Create one with: morloc-manager new\");\n                }\n            }\n            Ok(())\n        }\n\n        // ---- info ----\n        Cmd::Info { name, system } => {\n            if let Some(env_name) = name {\n                // Detailed info for a specific environment\n                let scope = if system {\n                    if !cfg::env_config_path(Scope::System, &env_name).is_file() {\n                        return Err(ManagerError::EnvironmentNotFound(format!(\n                            \"{env_name} (in system scope)\"\n                        )));\n                    }\n                    Scope::System\n                } else {\n                    cfg::find_env_scope(&env_name)?\n                };\n                let ec = cfg::read_env_config(scope, &env_name)?;\n                let data_dir = cfg::env_data_dir(scope, &env_name);\n                let active = cfg::read_active_config()\n                    .and_then(|c| c.active_env)\n                    .as_deref() == Some(env_name.as_str());\n\n                if json {\n                    #[derive(serde::Serialize)]\n                    struct InfoDetail {\n                        name: String,\n                        scope: String,\n                        active: bool,\n                        base_image: String,\n                        built_image: Option<String>,\n                        morloc_version: Option<Version>,\n                        engine: String,\n                        shm_size: String,\n                        dockerfile: Option<String>,\n                        flags: Vec<String>,\n                        data_dir: String,\n                    }\n                    let df_str = ec.dockerfile.as_ref().map(|_| {\n                        let df_path = cfg::env_dockerfile_path(scope, &env_name);\n                        df_path.display().to_string()\n                    });\n                    let flags_path = cfg::env_flags_path(scope, &env_name);\n                    let flags = cfg::read_flags_file_lines(&flags_path);\n                    let output = InfoDetail {\n                        name: ec.name.clone(),\n                        scope: match scope { Scope::Local => \"local\", Scope::System => \"system\" }.to_string(),\n                        active,\n                        base_image: ec.base_image.clone(),\n                        built_image: ec.built_image.clone(),\n                        morloc_version: ec.morloc_version.clone(),\n                        engine: display_engine(ec.engine).to_string(),\n                        shm_size: ec.shm_size.clone(),\n                        dockerfile: df_str,\n                        flags,\n                        data_dir: data_dir.display().to_string(),\n                    };\n                    println!(\"{}\", serde_json::to_string_pretty(&output).unwrap());\n                } else {\n                    println!(\"Name:           {}\", ec.name);\n                    println!(\"Scope:          {}\", match scope { Scope::Local => \"local\", Scope::System => \"system\" });\n                    println!(\"Active:         {}\", if active { \"yes\" } else { \"no\" });\n                    println!(\"Base image:     {}\", ec.base_image);\n                    if let Some(ref img) = ec.built_image {\n                        println!(\"Built image:    {img}\");\n                    }\n                    if let Some(ref ver) = ec.morloc_version {\n                        println!(\"Morloc version: {}\", ver.show());\n                    }\n                    println!(\"Engine:         {}\", display_engine(ec.engine));\n                    println!(\"SHM size:       {}\", ec.shm_size);\n                    println!(\"Dockerfile:     {}\", match ec.dockerfile {\n                        Some(_) => {\n                            let df_path = cfg::env_dockerfile_path(scope, &env_name);\n                            if df_path.exists() {\n                                df_path.display().to_string()\n                            } else {\n                                format!(\"{} (MISSING)\", df_path.display())\n                            }\n                        }\n                        None => \"none\".to_string(),\n                    });\n                    let flags_path = cfg::env_flags_path(scope, &env_name);\n                    println!(\"Flags:          {}\", flags_path.display());\n                    let flags = cfg::read_flags_file_lines(&flags_path);\n                    for flag in &flags {\n                        println!(\"  {flag}\");\n                    }\n                    println!(\"Data dir:       {}\", data_dir.display());\n                }\n            } else {\n                // Overview\n                let local_cfg = cfg::read_config::<Config>(&cfg::config_path(Scope::Local)).ok();\n                let system_cfg = cfg::read_config::<Config>(&cfg::config_path(Scope::System)).ok();\n                let se_mode = detect_selinux();\n\n                let active_env = environment::resolve_active_environment()\n                    .map(|(name, _, _)| name)\n                    .unwrap_or_else(|_| \"none\".to_string());\n\n                let se_str = match se_mode {\n                    SELinuxMode::Enforcing => \"enforcing\",\n                    SELinuxMode::Permissive => \"permissive\",\n                    SELinuxMode::Disabled => \"not detected\",\n                };\n\n                if json {\n                    #[derive(serde::Serialize)]\n                    struct DirInfo { path: String, exists: bool }\n                    #[derive(serde::Serialize)]\n                    struct InfoOverview {\n                        active: String,\n                        local_engine: String,\n                        system_engine: String,\n                        selinux: String,\n                        directories: std::collections::BTreeMap<String, DirInfo>,\n                        local: Vec<environment::EnvInfo>,\n                        system: Vec<environment::EnvInfo>,\n                    }\n                    let active_str = if active_env == \"none\" { None } else { Some(active_env.as_str()) };\n                    let mut directories = std::collections::BTreeMap::new();\n                    for (label, path) in [\n                        (\"config_local\", cfg::config_dir(Scope::Local)),\n                        (\"data_local\", cfg::data_dir(Scope::Local)),\n                        (\"config_system\", cfg::config_dir(Scope::System)),\n                        (\"data_system\", cfg::data_dir(Scope::System)),\n                    ] {\n                        directories.insert(label.to_string(), DirInfo {\n                            path: path.display().to_string(),\n                            exists: path.is_dir(),\n                        });\n                    }\n                    let output = InfoOverview {\n                        active: active_env.clone(),\n                        local_engine: local_cfg.as_ref().map(|c| display_engine(c.engine)).unwrap_or(\"unset\").to_string(),\n                        system_engine: system_cfg.as_ref().map(|c| display_engine(c.engine)).unwrap_or(\"unset\").to_string(),\n                        selinux: se_str.to_string(),\n                        directories,\n                        local: environment::list_environments(Scope::Local, active_str),\n                        system: environment::list_environments(Scope::System, active_str),\n                    };\n                    println!(\"{}\", serde_json::to_string_pretty(&output).unwrap());\n                } else {\n                    println!(\"Active:         {active_env}\");\n                    println!(\"Local engine:   {}\",\n                        local_cfg.as_ref().map(|c| display_engine(c.engine)).unwrap_or(\"unset\"));\n                    println!(\"System engine:  {}\",\n                        system_cfg.as_ref().map(|c| display_engine(c.engine)).unwrap_or(\"unset\"));\n                    println!(\"SELinux:        {se_str}\");\n\n                    let dirs = [\n                        (\"Config (local)\", cfg::config_dir(Scope::Local)),\n                        (\"Data (local)\", cfg::data_dir(Scope::Local)),\n                        (\"Config (system)\", cfg::config_dir(Scope::System)),\n                        (\"Data (system)\", cfg::data_dir(Scope::System)),\n                    ];\n                    println!(\"\\nDirectories:\");\n                    for (label, path) in &dirs {\n                        let status = if path.is_dir() { \"exists\" } else { \"not found\" };\n                        println!(\"  {:<20} {} ({})\", label, path.display(), status);\n                    }\n\n                    let active_str = if active_env == \"none\" { None } else { Some(active_env.as_str()) };\n\n                    // Check if active env lives in local scope (local takes priority)\n                    let active_in_local = active_str\n                        .map(|name| cfg::env_config_path(Scope::Local, name).is_file())\n                        .unwrap_or(false);\n\n                    let local_envs = environment::list_environments(Scope::Local, active_str);\n                    println!(\"\\nLocal environments:\");\n                    if local_envs.is_empty() {\n                        println!(\"  (none)\");\n                    } else {\n                        for e in &local_envs {\n                            let active_mark = if e.active { \" (active)\" } else { \"\" };\n                            let ver_mark = e.morloc_version.as_ref()\n                                .map(|v| format!(\" [{}]\", v.show()))\n                                .unwrap_or_default();\n                            println!(\"  {}{}{}\", e.name, ver_mark, active_mark);\n                        }\n                    }\n\n                    let system_envs = environment::list_environments(Scope::System, active_str);\n                    if !system_envs.is_empty() {\n                        println!(\"\\nSystem environments:\");\n                        for e in &system_envs {\n                            let active_mark = if e.active && active_in_local {\n                                \" (active - shadowed)\"\n                            } else if e.active {\n                                \" (active)\"\n                            } else {\n                                \"\"\n                            };\n                            let ver_mark = e.morloc_version.as_ref()\n                                .map(|v| format!(\" [{}]\", v.show()))\n                                .unwrap_or_default();\n                            println!(\"  {}{}{}\", e.name, ver_mark, active_mark);\n                        }\n                    }\n                }\n            }\n            Ok(())\n        }\n\n        // ---- select ----\n        Cmd::Select { name, system } => {\n            if system { check_system_write_access()?; }\n            let write_scope = resolve_scope(system);\n            environment::select_environment(&name, write_scope)?;\n            if system {\n                eprintln!(\"Set system default environment: {name}\");\n            } else {\n                eprintln!(\"Selected environment: {name}\");\n            }\n            Ok(())\n        }\n\n        // ---- update ----\n        Cmd::Update {\n            name, image, version, tag, dockerfile, dockerfile_stub, force, include, flagfile,\n            engine_arg, engine, shm_size, no_build, reinit, non_interactive: _,\n        } => {\n            let (env_name, env_scope) = match name {\n                Some(n) => {\n                    let scope = cfg::find_env_scope(&n)?;\n                    (n, scope)\n                }\n                None => {\n                    let (n, s, _) = environment::resolve_active_environment()?;\n                    (n, s)\n                }\n            };\n            if env_scope == Scope::System {\n                check_system_write_access()?;\n            }\n\n            // Handle --dockerfile-stub: generate stub if no Dockerfile exists\n            let resolved_dockerfile = if dockerfile.is_some() && dockerfile_stub {\n                return Err(ManagerError::EnvError(\n                    \"Cannot use both --dockerfile and --dockerfile-stub\".to_string(),\n                ));\n            } else if dockerfile_stub {\n                let df_path = cfg::env_dockerfile_path(env_scope, &env_name);\n                if df_path.exists() && !force {\n                    return Err(ManagerError::EnvError(format!(\n                        \"Dockerfile already exists: {}\\nUse --force to overwrite.\",\n                        df_path.display()\n                    )));\n                }\n                let stub_dir = cfg::data_dir(env_scope).join(\"tmp\");\n                fs::create_dir_all(&stub_dir).map_err(|e| {\n                    ManagerError::EnvError(format!(\"Failed to create tmp dir: {e}\"))\n                })?;\n                let stub_path = stub_dir.join(format!(\"{env_name}.Dockerfile\"));\n                let stub_content = format!(\n                    \"# morloc environment: {env_name}\\n\\\n                     # Edit this file, then rebuild with: morloc-manager update\\n\\\n                     \\n\\\n                     # CONTAINER_BASE is replaced at build time with the environment's base image\\n\\\n                     ARG CONTAINER_BASE=scratch\\n\\\n                     FROM ${{CONTAINER_BASE}}\\n\\\n                     \\n\\\n                     # Example: install system packages\\n\\\n                     # RUN apt-get update && apt-get install -y jq && rm -rf /var/lib/apt/lists/*\\n\\\n                     \\n\\\n                     # Example: install Python packages\\n\\\n                     # RUN pip install scikit-learn pandas\\n\\\n                     \\n\\\n                     # Example: install R packages\\n\\\n                     # RUN R -e \\\"install.packages('ggplot2', repos='https://cloud.r-project.org')\\\"\\n\"\n                );\n                fs::write(&stub_path, &stub_content).map_err(|e| {\n                    ManagerError::EnvError(format!(\"Failed to write stub Dockerfile: {e}\"))\n                })?;\n                Some(stub_path.to_string_lossy().to_string())\n            } else {\n                dockerfile\n            };\n\n            if version.is_some() && image.is_some() {\n                return Err(ManagerError::EnvError(\n                    \"--version and --image are mutually exclusive\".to_string()\n                ));\n            }\n\n            // Resolve base image if --version, --tag, or --image provided\n            let (base_image, original_image, morloc_ver) = if let Some(ref ver_str) = version {\n                let ec = cfg::read_env_config(env_scope, &env_name)?;\n                let clean = ver_str.strip_prefix('v').unwrap_or(ver_str);\n                let ver: Version = clean.parse().map_err(|_| {\n                    ManagerError::InvalidVersion(ver_str.clone())\n                })?;\n                let img = environment::pull_version_image(ec.engine, &ver)?;\n                (Some(img), None, Some(ver))\n            } else if let Some(ref t) = tag {\n                let ec = cfg::read_env_config(env_scope, &env_name)?;\n                let (img, ver) = environment::pull_tagged_image(ec.engine, t)?;\n                (Some(img), None, Some(ver))\n            } else if let Some(ref img) = image {\n                let ec = cfg::read_env_config(env_scope, &env_name)?;\n                environment::pull_custom_image(ec.engine, img)?;\n                // Detect version from the new image so it doesn't stay stale\n                let detected_ver = environment::detect_morloc_version(ec.engine, img).ok();\n                (Some(img.clone()), None, detected_ver)\n            } else {\n                (None, None, None)\n            };\n\n            eprintln!(\"Updating environment: {env_name}\");\n            let opts = environment::ApplyOptions {\n                name: env_name.clone(),\n                scope: env_scope,\n                is_new: false,\n                base_image,\n                original_image,\n                morloc_version: morloc_ver,\n                dockerfile: resolved_dockerfile,\n                includes: include,\n                flagfile,\n                engine_args: engine_arg,\n                engine: engine.map(|e| e.into()),\n                shm_size,\n                skip_dockerfile_build: no_build || dockerfile_stub,\n                verbose,\n            };\n            environment::apply_environment(&opts)?;\n\n            if dockerfile_stub {\n                let df_path = cfg::env_dockerfile_path(env_scope, &env_name);\n                eprintln!(\"Stub Dockerfile: {}\", df_path.display());\n                eprintln!(\"Edit it, then run: morloc-manager update {env_name}\");\n            }\n\n            // --version, --tag, and --image imply --reinit (ABI may have changed)\n            if reinit || version.is_some() || tag.is_some() || image.is_some() {\n                // Re-read the config (apply_environment may have updated it)\n                let ec = cfg::read_env_config(env_scope, &env_name)?;\n\n                // Check for running serve container -- reinit replaces morloc-nexus\n                // which will fail with \"Text file busy\" if the container has it open.\n                let serve_name = serve::serve_container_name(&env_name);\n                let running = serve::find_running_serve_containers(ec.engine);\n                if running.iter().any(|n| n == &serve_name) {\n                    return Err(ManagerError::EnvError(format!(\n                        \"Cannot reinit environment '{env_name}' while its serve container is running.\\n  \\\n                         Run 'morloc-manager stop {env_name}' first.\"\n                    )));\n                }\n\n                run_morloc_init_for(Some((env_name.clone(), env_scope, ec)), verbose)?;\n            }\n\n            eprintln!(\"{}\", bold_green(&format!(\"Environment '{env_name}' updated.\")));\n\n            if env_scope == Scope::System && !check_podman_additional_stores(\n                cfg::read_env_config(env_scope, &env_name)\n                    .map(|ec| ec.engine)\n                    .unwrap_or(ContainerEngine::Podman),\n            ) {\n                eprintln!();\n                warn_podman_additional_stores();\n            }\n\n            Ok(())\n        }\n\n        // ---- freeze ----\n        Cmd::Freeze { name, output, force } => {\n            let output_dir = output.as_deref().unwrap_or(\"./morloc-freeze\");\n            // Protect against silently overwriting a previous freeze\n            let existing_tar = std::path::Path::new(output_dir).join(\"state.tar.gz\");\n            if existing_tar.exists() && !force {\n                return Err(ManagerError::FreezeError(format!(\n                    \"Output directory already contains a freeze: {}\\n  \\\n                     Use --force to overwrite, or specify a different -o path.\",\n                    existing_tar.display()\n                )));\n            }\n            let (env_name, env_scope, ec) = resolve_env_or_active(name)?;\n            let engine = ec.engine;\n            // Detect the version from the container binary for sanity check.\n            // The morloc binary can't report prerelease tags (stack limitation),\n            // so if major.minor.patch match, keep the recorded version which has\n            // the full tag from the image.\n            eprintln!(\"Detecting morloc version from image...\");\n            let detected = environment::detect_morloc_version(ec.engine, ec.active_image())?;\n            let ver = if let Some(ref recorded) = ec.morloc_version {\n                if recorded.major == detected.major\n                    && recorded.minor == detected.minor\n                    && recorded.patch == detected.patch\n                {\n                    recorded.clone()\n                } else {\n                    eprintln!(\n                        \"Warning: recorded morloc version ({}) does not match image ({}).\",\n                        recorded.show(), detected.show()\n                    );\n                    detected\n                }\n            } else {\n                detected\n            };\n            let data_dir = cfg::env_data_dir(env_scope, &env_name);\n            let image = ec.active_image().to_string();\n            let result = freeze::freeze_from_dir(env_scope, ver.clone(), engine, &image, &data_dir.to_string_lossy(), output_dir, verbose);\n            if result.is_ok() && ec.morloc_version.as_ref() != Some(&ver) {\n                let mut updated = ec.clone();\n                updated.morloc_version = Some(ver);\n                let _ = cfg::write_env_config(env_scope, &env_name, &updated);\n            }\n            result\n        }\n\n        // ---- unfreeze ----\n        Cmd::Unfreeze { from, tag, base, engine: engine_override, rebuild } => {\n            let from = {\n                let p = std::path::Path::new(&from);\n                if p.is_dir() {\n                    let tar = p.join(\"state.tar.gz\");\n                    if tar.is_file() {\n                        tar.to_string_lossy().to_string()\n                    } else {\n                        return Err(ManagerError::UnfreezeError(format!(\n                            \"Directory '{}' does not contain state.tar.gz. \\\n                             Pass the path to state.tar.gz directly, or the directory containing it.\",\n                            from\n                        )));\n                    }\n                } else if p.is_file() {\n                    from\n                } else {\n                    return Err(ManagerError::UnfreezeError(format!(\n                        \"Input not found: {from}. \\\n                         Pass the path to state.tar.gz or the directory containing it.\"\n                    )));\n                }\n            };\n            // Read version and engine from the freeze manifest so unfreeze\n            // works on deployment machines with no morloc environments.\n            let tarball_dir = std::path::Path::new(&from)\n                .parent()\n                .unwrap_or(std::path::Path::new(\".\"));\n            let manifest_path = tarball_dir.join(\"freeze-manifest.json\");\n            let manifest = freeze::read_freeze_manifest(&manifest_path.to_string_lossy())\n                .map_err(|_| ManagerError::UnfreezeError(format!(\n                    \"Cannot read freeze manifest at {}. Ensure state.tar.gz and freeze-manifest.json are in the same directory.\",\n                    manifest_path.display()\n                )))?;\n            let engine = match engine_override {\n                Some(EngineArg::Docker) => ContainerEngine::Docker,\n                Some(EngineArg::Podman) => ContainerEngine::Podman,\n                None => {\n                    let e = ensure_engine()?;\n                    eprintln!(\n                        \"Note: using {} engine from global config. Override with --engine if needed.\",\n                        display_engine(e)\n                    );\n                    e\n                }\n            };\n            serve::build_serve_image(engine, verbose, &from, &tag, manifest.morloc_version, base.as_deref(), rebuild, &manifest.programs)\n        }\n\n        // ---- start ----\n        Cmd::Start { name, port, env_vars, env_file, force } => {\n            let (env_name, env_scope, ec) = resolve_env_or_active(name)?;\n            let image = ec.active_image().to_string();\n            let data_dir = cfg::env_data_dir(env_scope, &env_name);\n            let container_name = serve::serve_container_name(&env_name);\n            // Warn if a Dockerfile is configured but the layered image hasn't been built\n            if ec.dockerfile.is_some() && ec.built_image.is_none() {\n                eprintln!(\"Warning: Dockerfile is configured but image has not been built. Using base image.\");\n                eprintln!(\"  Run 'morloc-manager update {env_name}' to build the Dockerfile layer.\");\n            }\n            // Refuse to replace a running container unless --force is passed\n            if container::container_exists(ec.engine, &container_name) {\n                if !force {\n                    return Err(ManagerError::EnvError(format!(\n                        \"Serve container already running for '{env_name}'. Use --force to replace.\"\n                    )));\n                }\n                eprintln!(\"Warning: replacing existing serve container '{container_name}'\");\n            }\n            let port_mappings = if port.is_empty() {\n                vec![(8080, 8080)]\n            } else {\n                port\n            };\n            let flags_path = cfg::env_flags_path(env_scope, &env_name);\n            let extra_flags = cfg::read_flags_file(&flags_path);\n            let user_env = collect_env_vars(&env_vars, env_file.as_deref())?;\n            serve::serve_environment(\n                ec.engine, verbose, &image,\n                &data_dir.to_string_lossy(), &container_name,\n                &port_mappings, &extra_flags, &Some(ec.shm_size.clone()),\n                &user_env,\n            )\n        }\n\n        // ---- stop ----\n        Cmd::Stop { name } => {\n            let (env_name, _, ec) = resolve_env_or_active(name)?;\n            let container_name = serve::serve_container_name(&env_name);\n            if crate::container::container_exists(ec.engine, &container_name) {\n                serve::stop_serve_container(ec.engine, verbose, &container_name)?;\n                eprintln!(\"Stopped serving environment: {env_name}\");\n            } else {\n                return Err(ManagerError::EnvError(\n                    format!(\"No serve container running for environment '{env_name}'\")\n                ));\n            }\n            Ok(())\n        }\n\n        // ---- logs ----\n        Cmd::Logs { name, follow } => {\n            let (container_name, engine) = if let Some(ref n) = name {\n                let (_, _, ec) = resolve_env_or_active(Some(n.clone()))?;\n                let cname = serve::serve_container_name(n);\n                if !container::container_exists(ec.engine, &cname) {\n                    return Err(ManagerError::EnvError(\n                        format!(\"No serve container running for environment '{n}'\")\n                    ));\n                }\n                (cname, ec.engine)\n            } else {\n                find_running_serve_container()?\n            };\n            let exe = match engine {\n                ContainerEngine::Podman => \"podman\",\n                ContainerEngine::Docker => \"docker\",\n            };\n            let mut cmd_args = vec![\"logs\"];\n            if follow {\n                cmd_args.push(\"-f\");\n            }\n            cmd_args.push(&container_name);\n            // Log content is the primary data of this command, so both the\n            // container's original stdout and stderr should go to our stdout.\n            // docker/podman logs preserves the original stream split; we merge\n            // them so that `morloc-manager logs | grep ERROR` works.\n            let stdout_handle = std::io::stdout();\n            let status = std::process::Command::new(exe)\n                .args(&cmd_args)\n                .stdin(Stdio::null())\n                .stdout(Stdio::inherit())\n                .stderr(Stdio::from(stdout_handle))\n                .status()\n                .map_err(|e| ManagerError::EnvError(format!(\"Failed to run {exe} logs: {e}\")))?;\n            if !status.success() {\n                return Err(ManagerError::EngineError {\n                    engine,\n                    code: status.code().unwrap_or(1),\n                    stderr: String::new(),\n                });\n            }\n            Ok(())\n        }\n\n        // ---- eval ----\n        Cmd::Eval { first, second, port } => {\n            let expr = if let Some(ref expr_arg) = second {\n                // first is env name — validate it exists and its serve container is running\n                let (env_name, _, ec) = resolve_env_or_active(Some(first))?;\n                let container_name = serve::serve_container_name(&env_name);\n                if !container::container_exists(ec.engine, &container_name) {\n                    return Err(ManagerError::EnvError(format!(\n                        \"No serve container running for '{env_name}'. Start with: morloc-manager start {env_name}\"\n                    )));\n                }\n                expr_arg.clone()\n            } else {\n                first\n            };\n            use std::io::{Read as IoRead, Write as IoWrite};\n            let body = format!(\"{{\\\"expr\\\":{}}}\", serde_json::to_string(&expr).unwrap_or_default());\n            let request = format!(\n                \"POST /eval HTTP/1.1\\r\\nHost: localhost\\r\\nContent-Type: application/json\\r\\nContent-Length: {}\\r\\nConnection: close\\r\\n\\r\\n{}\",\n                body.len(), body\n            );\n            let addr = format!(\"127.0.0.1:{port}\");\n            let mut stream = std::net::TcpStream::connect(&addr).map_err(|e| {\n                ManagerError::EnvError(format!(\n                    \"Cannot connect to serve container on {addr}: {e}\\n  Is a serve container running? Start with: morloc-manager start\"\n                ))\n            })?;\n            stream.write_all(request.as_bytes()).map_err(|e| {\n                ManagerError::EnvError(format!(\"Failed to send request: {e}\"))\n            })?;\n            let mut response = String::new();\n            stream.read_to_string(&mut response).map_err(|e| {\n                ManagerError::EnvError(format!(\"Failed to read response: {e}\"))\n            })?;\n            // Extract body from HTTP response (after \\r\\n\\r\\n)\n            if let Some(pos) = response.find(\"\\r\\n\\r\\n\") {\n                let body = &response[pos + 4..];\n                println!(\"{body}\");\n            } else {\n                println!(\"{response}\");\n            }\n            Ok(())\n        }\n\n        // ---- status ----\n        Cmd::Status => {\n            let mut all_containers: Vec<serve::ServeContainerInfo> = Vec::new();\n            let mut any_engine = false;\n            for engine in [ContainerEngine::Podman, ContainerEngine::Docker] {\n                let exe = match engine {\n                    ContainerEngine::Podman => \"podman\",\n                    ContainerEngine::Docker => \"docker\",\n                };\n                if which(exe) {\n                    any_engine = true;\n                    if let Ok(containers) = serve::query_serve_containers(engine, verbose) {\n                        all_containers.extend(containers);\n                    }\n                }\n            }\n            if !any_engine {\n                return Err(ManagerError::EngineNotFound);\n            }\n            if json {\n                #[derive(serde::Serialize)]\n                struct StatusOutput { containers: Vec<serve::ServeContainerInfo> }\n                let output = StatusOutput { containers: all_containers };\n                println!(\"{}\", serde_json::to_string_pretty(&output).unwrap());\n            } else if all_containers.is_empty() {\n                println!(\"No morloc serve containers running.\");\n            } else {\n                println!(\"Running servers:\");\n                for c in &all_containers {\n                    println!(\"  {}  {}  ({})  [{}]\", c.name, c.ports, c.env, c.status);\n                }\n            }\n            Ok(())\n        }\n\n        // ---- doctor ----\n        Cmd::Doctor { name, system, deep, strict } => {\n            let (env_name, env_scope, ec) = if let Some(ref n) = name {\n                let s = if system { Scope::System } else { cfg::find_env_scope(n)? };\n                let c = cfg::read_env_config(s, n)?;\n                (n.clone(), s, c)\n            } else {\n                resolve_env_or_active(None)?\n            };\n            doctor::doctor(ec.engine, verbose, &env_name, env_scope, &ec, deep, strict, json)\n        }\n\n    }\n}\n\n// ======================================================================\n// Serve container discovery\n// ======================================================================\n\n/// Find exactly one running morloc-serve-* container across all engines.\n/// Returns (container_name, engine). Errors if zero or multiple found.\nfn find_running_serve_container() -> Result<(String, ContainerEngine)> {\n    let mut found: Vec<(String, ContainerEngine)> = Vec::new();\n    for engine in [ContainerEngine::Podman, ContainerEngine::Docker] {\n        let exe = match engine {\n            ContainerEngine::Podman => \"podman\",\n            ContainerEngine::Docker => \"docker\",\n        };\n        if which(exe) {\n            for name in serve::find_running_serve_containers(engine) {\n                found.push((name, engine));\n            }\n        }\n    }\n    match found.len() {\n        0 => Err(ManagerError::EnvError(\n            \"No morloc serve containers running\".to_string(),\n        )),\n        1 => Ok(found.into_iter().next().unwrap()),\n        _ => {\n            let names: Vec<String> = found.iter().map(|(n, _)| n.clone()).collect();\n            Err(ManagerError::EnvError(format!(\n                \"Multiple serve containers running. Specify one explicitly:\\n  {}\",\n                names.join(\"\\n  \")\n            )))\n        }\n    }\n}\n\n// ======================================================================\n// Container run\n// ======================================================================\n\nfn run_in_container(\n    verbose: bool,\n    shell: bool,\n    args: &[String],\n    user_env: &[(String, String)],\n) -> Result<()> {\n    run_in_container_for(None, verbose, shell, args, user_env)\n}\n\nfn run_in_container_for(\n    target: Option<(String, Scope, EnvironmentConfig)>,\n    verbose: bool,\n    shell: bool,\n    args: &[String],\n    user_env: &[(String, String)],\n) -> Result<()> {\n    let (env_name, env_scope, ec) = match target {\n        Some(t) => t,\n        None => environment::resolve_active_environment()?,\n    };\n    let engine = ec.engine;\n    let image = ec.active_image().to_string();\n    let data_dir = cfg::env_data_dir(env_scope, &env_name);\n    let v_data_dir = data_dir.to_string_lossy().to_string();\n\n    // Warn if a Dockerfile is configured but the layered image hasn't been built\n    if ec.dockerfile.is_some() && ec.built_image.is_none() {\n        eprintln!(\"Warning: Dockerfile is configured but image has not been built. Using base image.\");\n        eprintln!(\"  Run 'morloc-manager update {env_name}' to build the Dockerfile layer.\");\n    }\n\n    // Fail fast with a clear message if docker socket is unreachable\n    require_docker_socket(engine)?;\n\n    // Verify the image is accessible before attempting to run\n    if !container::image_exists_locally(engine, &image) {\n        // Show the raw container engine error before our hint\n        if let Some(raw_err) = container::image_inspect_stderr(engine, &image) {\n            let trimmed = raw_err.trim();\n            if !trimmed.is_empty() {\n                eprintln!(\"{trimmed}\");\n            }\n        }\n        if env_scope == Scope::System && !check_podman_additional_stores(engine) {\n            return Err(ManagerError::EnvError(format!(\n                \"Image '{image}' not found. The environment '{env_name}' is a system environment \\\n                 but Podman is not configured to see rootful images.\\n\\\n                 Option 1 (recommended): Use Docker for system environments.\\n\\\n                 Option 2: Add to [storage.options] in /etc/containers/storage.conf:\\n\\n  \\\n                 additionalimagestores = [\\\"/var/lib/containers/storage\\\"]\\n\\n\\\n                 Note: Option 2 may cause storage locking conflicts on Fedora and Debian.\\n\"\n            )));\n        }\n        let hint = if env_scope == Scope::System {\n            format!(\"Ask your administrator to run: sudo morloc-manager update {env_name}\")\n        } else {\n            format!(\"Run 'morloc-manager update {env_name}' to build it.\")\n        };\n        return Err(ManagerError::EnvError(format!(\n            \"Image '{image}' not found locally. {hint}\"\n        )));\n    }\n\n    let se_mode = detect_selinux();\n    let suffix = volume_suffix(se_mode);\n    let home = dirs::home_dir()\n        .unwrap_or_default()\n        .to_string_lossy()\n        .to_string();\n    let cwd = std::env::current_dir()\n        .unwrap_or_default()\n        .to_string_lossy()\n        .to_string();\n\n    // Refuse to run from the root directory — container engines cannot\n    // bind-mount \"/\" and the resulting error is opaque.\n    if !shell && cwd == \"/\" {\n        return Err(ManagerError::EnvError(\n            \"Cannot run from the root directory (/). \\\n             Change to a subdirectory first (e.g., cd /tmp).\".to_string()\n        ));\n    }\n\n    // Read flags from the environment's flags file\n    let flags_path = cfg::env_flags_path(env_scope, &env_name);\n    let extra_flags = cfg::read_flags_file(&flags_path);\n\n    let is_init = matches!(args, [a, b, ..] if a == \"morloc\" && b == \"init\");\n    let is_home_dir = normalize_trailing(&cwd) == normalize_trailing(&home);\n\n    if !is_init && !suffix.is_empty() && !is_home_dir {\n        selinux::validate_mount_path(&cwd)?;\n        run_with_config(\n            engine, verbose, &image, &v_data_dir, &home, &cwd, suffix,\n            shell, args, false, &ec.shm_size, &extra_flags, user_env,\n        )\n    } else {\n        let (cwd_final, skip_work_mount) = if is_home_dir && !suffix.is_empty() && !is_init {\n            eprintln!(\"Warning: running from home directory with SELinux; working directory mount skipped.\");\n            eprintln!(\"Workaround: create a project subdirectory and work from there:\");\n            eprintln!(\"  mkdir ~/myproject && cd ~/myproject\");\n            (home.clone(), true)\n        } else {\n            (cwd, false)\n        };\n        run_with_config(\n            engine, verbose, &image, &v_data_dir, &home, &cwd_final, suffix,\n            shell, args, is_init || skip_work_mount, &ec.shm_size, &extra_flags, user_env,\n        )\n    }\n}\n\nfn run_with_config(\n    engine: ContainerEngine,\n    verbose: bool,\n    image: &str,\n    v_data_dir: &str,\n    home: &str,\n    cwd: &str,\n    suffix: &str,\n    shell: bool,\n    args: &[String],\n    is_init: bool,\n    shm_size: &str,\n    extra_flags: &[String],\n    user_env: &[(String, String)],\n) -> Result<()> {\n    if shell {\n        if !io::stdin().is_terminal() || !io::stdout().is_terminal() {\n            eprintln!(\"Error: --shell requires an interactive terminal (TTY).\");\n            eprintln!(\"If connecting over SSH, use: ssh -t <host> morloc-manager run --shell\");\n            std::process::exit(1);\n        }\n    }\n\n    // Mount data at /opt/morloc — matching the serve container (start).\n    // The compiler reads MORLOC_HOME to resolve all generated paths.\n    let mh = \"/opt/morloc\";\n    let base_mounts = vec![\n        (v_data_dir.to_string(), mh.to_string()),\n    ];\n    let work_mount = if is_init {\n        Vec::new()\n    } else {\n        vec![(cwd.to_string(), cwd.to_string())]\n    };\n    let all_mounts: Vec<(String, String)> = base_mounts.into_iter().chain(work_mount).collect();\n    let work_dir = if is_init {\n        mh.to_string()\n    } else {\n        cwd.to_string()\n    };\n    let mut env_vars = vec![\n        (\"HOME\".to_string(), home.to_string()),\n        (\"MORLOC_HOME\".to_string(), mh.to_string()),\n        (\n            \"PATH\".to_string(),\n            format!(\"{mh}/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"),\n        ),\n    ];\n    env_vars.extend(user_env.iter().cloned());\n    let cmd = if shell {\n        Some(vec![\"/bin/bash\".to_string()])\n    } else if args.is_empty() {\n        None\n    } else {\n        Some(args.to_vec())\n    };\n\n    let cfg = RunConfig {\n        image: image.to_string(),\n        bind_mounts: all_mounts,\n        env: env_vars,\n        interactive: shell,\n        shm_size: Some(shm_size.to_string()),\n        work_dir: Some(work_dir),\n        selinux_suffix: suffix.to_string(),\n        command: cmd,\n        extra_flags: extra_flags.to_vec(),\n        ..RunConfig::new(image)\n    };\n\n    let status = container_run_passthrough(engine, verbose, shell, &cfg);\n    let code = status.code().unwrap_or(1);\n    if status.success() {\n        Ok(())\n    } else if code >= 125 {\n        // Exit 125+ = container engine error (not the user's program)\n        Err(ManagerError::EngineError {\n            engine,\n            code,\n            stderr: \"Container engine error\".to_string(),\n        })\n    } else {\n        // Exit 1-124 = program exited with non-zero, pass through silently\n        std::process::exit(code);\n    }\n}\n\nfn run_morloc_init_for(\n    target: Option<(String, Scope, EnvironmentConfig)>,\n    verbose: bool,\n) -> Result<()> {\n    let init_args: Vec<String> = if verbose {\n        [\"morloc\", \"init\", \"-f\"].iter().map(|s| s.to_string()).collect()\n    } else {\n        [\"morloc\", \"init\", \"-f\", \"-q\"].iter().map(|s| s.to_string()).collect()\n    };\n    eprintln!(\"Initializing morloc (this may take several minutes)...\");\n    run_in_container_for(target, verbose, false, &init_args, &[])\n}\n\nfn normalize_trailing(p: &str) -> String {\n    let mut s = p.to_string();\n    if !s.ends_with('/') {\n        s.push('/');\n    }\n    s\n}\n\n// ======================================================================\n// Tests\n// ======================================================================\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::container::{build_build_args, build_run_args, engine_executable, engine_specific_run_flags, BuildConfig};\n\n    // ---- Type tests ----\n\n    #[test]\n    fn show_version_formats_correctly() {\n        assert_eq!(Version::new(0, 67, 0).show(), \"0.67.0\");\n    }\n\n    #[test]\n    fn parse_version_round_trips() {\n        assert_eq!(\"0.67.0\".parse::<Version>().ok(), Some(Version::new(0, 67, 0)));\n    }\n\n    #[test]\n    fn parse_version_rejects_invalid() {\n        assert!(\"abc\".parse::<Version>().is_err());\n    }\n\n    #[test]\n    fn parse_version_rejects_incomplete() {\n        assert!(\"0.67\".parse::<Version>().is_err());\n    }\n\n    #[test]\n    fn version_ordering_is_semantic() {\n        assert!(Version::new(1, 0, 0) > Version::new(0, 99, 99));\n    }\n\n    #[test]\n    fn version_ordering_minor() {\n        assert!(Version::new(0, 2, 0) > Version::new(0, 1, 99));\n    }\n\n    #[test]\n    fn version_equality() {\n        assert_eq!(Version::new(0, 67, 0), Version::new(0, 67, 0));\n    }\n\n    #[test]\n    fn parse_version_with_prerelease() {\n        for (input, expected_pre) in [\n            (\"0.77.0-rc.1\", \"rc.1\"),\n            (\"1.0.0-alpha\", \"alpha\"),\n            (\"1.0.0-beta.2\", \"beta.2\"),\n            (\"0.1.0-dev.20260414\", \"dev.20260414\"),\n        ] {\n            let ver: Version = input.parse().unwrap();\n            assert_eq!(ver.prerelease, Some(expected_pre.to_string()), \"input: {input}\");\n            assert_eq!(ver.show(), input, \"round-trip failed for: {input}\");\n        }\n    }\n\n    #[test]\n    fn prerelease_sorts_before_release() {\n        let rc: Version = \"0.77.0-rc.1\".parse().unwrap();\n        let release = Version::new(0, 77, 0);\n        assert!(rc < release);\n    }\n\n    // ---- Error message tests ----\n\n    #[test]\n    fn invalid_version_renders() {\n        let err = ManagerError::InvalidVersion(\"abc\".to_string());\n        assert!(err.to_string().contains(\"Invalid version\"));\n    }\n\n    #[test]\n    fn no_command_renders() {\n        let err = ManagerError::NoCommand;\n        assert!(err.to_string().contains(\"No command\"));\n    }\n\n    #[test]\n    fn no_active_environment_suggests_new() {\n        let err = ManagerError::NoActiveEnvironment;\n        assert!(err.to_string().contains(\"new\"));\n    }\n\n    #[test]\n    fn config_permission_denied_mentions_permissions() {\n        let err = ManagerError::ConfigPermissionDenied(\"/etc/morloc/config.json\".to_string());\n        assert!(err.to_string().contains(\"Permission\"));\n    }\n\n    #[test]\n    fn freeze_error_renders() {\n        let err = ManagerError::FreezeError(\"tar error\".to_string());\n        assert!(err.to_string().contains(\"Freeze failed\"));\n    }\n\n    // ---- Config default tests ----\n\n    #[test]\n    fn default_config_has_no_active_env() {\n        assert_eq!(Config::default().active_env, None);\n    }\n\n    #[test]\n    fn default_config_uses_podman() {\n        assert_eq!(Config::default().engine, ContainerEngine::Podman);\n    }\n\n    // ---- Config JSON round-trip tests ----\n\n    #[test]\n    fn config_json_round_trip() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"config.json\");\n        let cfg = Config {\n            active_env: Some(\"ml\".to_string()),\n            engine: ContainerEngine::Docker,\n        };\n        cfg::write_config(&path, &cfg).unwrap();\n        let cfg2: Config = cfg::read_config(&path).unwrap();\n        assert_eq!(cfg2.active_env.as_deref(), Some(\"ml\"));\n        assert_eq!(cfg2.engine, ContainerEngine::Docker);\n    }\n\n    #[test]\n    fn config_read_missing_returns_not_found() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"nonexistent.json\");\n        let result = cfg::read_config::<Config>(&path);\n        assert!(matches!(result, Err(ManagerError::ConfigNotFound(_))));\n    }\n\n    #[test]\n    fn config_read_invalid_json_returns_parse_error() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"bad.json\");\n        fs::write(&path, \"not json at all\").unwrap();\n        let result = cfg::read_config::<Config>(&path);\n        assert!(matches!(result, Err(ManagerError::ConfigParseError { .. })));\n    }\n\n    #[test]\n    fn env_config_json_round_trip() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"env.json\");\n        let ec = EnvironmentConfig {\n            name: \"test\".to_string(),\n            base_image: \"ghcr.io/morloc-project/morloc/morloc-full:0.67.0\".to_string(),\n            original_image: None,\n            dockerfile: None,\n            content_hash: None,\n            built_image: None,\n            engine: ContainerEngine::Podman,\n            shm_size: \"1g\".to_string(),\n            morloc_version: Some(Version::new(0, 67, 0)),\n        };\n        cfg::write_config(&path, &ec).unwrap();\n        let ec2: EnvironmentConfig = cfg::read_config(&path).unwrap();\n        assert_eq!(ec2.name, \"test\");\n        assert_eq!(ec2.shm_size, \"1g\");\n        assert_eq!(ec2.morloc_version, Some(Version::new(0, 67, 0)));\n    }\n\n    #[test]\n    fn freeze_manifest_json_round_trip() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"fm.json\");\n        let fm = FreezeManifest {\n            morloc_version: Version::new(0, 67, 0),\n            frozen_at: chrono::Utc::now(),\n            modules: vec![ModuleEntry {\n                name: \"math\".to_string(),\n                version: Some(\"0.3.0\".to_string()),\n                sha256: \"abc123\".to_string(),\n            }],\n            programs: vec![ProgramEntry {\n                name: \"svc\".to_string(),\n                commands: vec![\"hello\".to_string(), \"compute\".to_string()],\n            }],\n            base_image: \"morloc-full:0.67.0\".to_string(),\n            env_layer: Some(FrozenEnvLayer {\n                name: \"ml\".to_string(),\n                dockerfile: \"FROM scratch\".to_string(),\n                content_hash: \"abc\".to_string(),\n                image_tag: None,\n            }),\n            env_vars: Vec::new(),\n        };\n        cfg::write_config(&path, &fm).unwrap();\n        let fm2: FreezeManifest = cfg::read_config(&path).unwrap();\n        assert_eq!(fm2.morloc_version, Version::new(0, 67, 0));\n        assert_eq!(fm2.modules.len(), 1);\n        assert_eq!(fm2.programs.len(), 1);\n        assert_eq!(fm2.programs[0].commands, vec![\"hello\", \"compute\"]);\n        // env_vars is no longer written but can still be read from old manifests\n        assert!(fm2.env_vars.is_empty());\n    }\n\n    #[test]\n    fn freeze_manifest_reads_legacy_env_vars() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"legacy.json\");\n        // Simulate an old manifest that included env_vars\n        let json = r#\"{\n            \"morloc_version\": {\"major\":0,\"minor\":67,\"patch\":0,\"pre\":null},\n            \"frozen_at\": \"2025-01-01T00:00:00Z\",\n            \"modules\": [],\n            \"programs\": [],\n            \"base_image\": \"morloc-full:0.67.0\",\n            \"env_layer\": null,\n            \"env_vars\": [\"API_KEY\", \"DB_URL\"]\n        }\"#;\n        std::fs::write(&path, json).unwrap();\n        let fm: FreezeManifest = cfg::read_config(&path).unwrap();\n        assert_eq!(fm.env_vars, vec![\"API_KEY\", \"DB_URL\"]);\n    }\n\n    // ---- Config flags tests ----\n\n    #[test]\n    fn read_flags_file_parses() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"test.flags\");\n        fs::write(\n            &path,\n            \"# This is a comment\\n--gpus all\\n\\n  -v /data:/data  \\n# another comment\\n--network host\\n\",\n        )\n        .unwrap();\n        let flags = cfg::read_flags_file(&path);\n        assert_eq!(\n            flags,\n            vec![\"--gpus\", \"all\", \"-v\", \"/data:/data\", \"--network\", \"host\"]\n        );\n    }\n\n    #[test]\n    fn read_flags_file_missing() {\n        let dir = tempfile::tempdir().unwrap();\n        let flags = cfg::read_flags_file(&dir.path().join(\"nope.flags\"));\n        assert!(flags.is_empty());\n    }\n\n    #[test]\n    fn read_flags_file_expands_env_vars() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"test.flags\");\n        fs::write(&path, \"-v $HOME/data:/data\\n\").unwrap();\n        let flags = cfg::read_flags_file(&path);\n        let home = std::env::var(\"HOME\").unwrap();\n        assert_eq!(flags, vec![\"-v\", &format!(\"{home}/data:/data\")]);\n    }\n\n    #[test]\n    fn read_flags_file_expands_tilde() {\n        let dir = tempfile::tempdir().unwrap();\n        let path = dir.path().join(\"test.flags\");\n        fs::write(&path, \"-v ~/data:/data\\n\").unwrap();\n        let flags = cfg::read_flags_file(&path);\n        let home = std::env::var(\"HOME\").unwrap();\n        assert_eq!(flags, vec![\"-v\", &format!(\"{home}/data:/data\")]);\n    }\n\n    // ---- Container CLI argument tests ----\n\n    #[test]\n    fn engine_executable_docker() {\n        assert_eq!(engine_executable(ContainerEngine::Docker), \"docker\");\n    }\n\n    #[test]\n    fn engine_executable_podman() {\n        assert_eq!(engine_executable(ContainerEngine::Podman), \"podman\");\n    }\n\n    #[test]\n    fn build_run_args_minimal() {\n        let cfg = RunConfig::new(\"myimage:latest\");\n        let args = build_run_args(\n            ContainerEngine::Docker,\n            &engine_specific_run_flags(ContainerEngine::Docker),\n            &cfg,\n        );\n        assert_eq!(args[0], \"run\");\n        assert!(args.contains(&\"--rm\".to_string()));\n        assert!(args.contains(&\"myimage:latest\".to_string()));\n        assert!(!args.contains(&\"-it\".to_string()));\n    }\n\n    #[test]\n    fn build_run_args_podman_userns() {\n        let cfg = RunConfig::new(\"myimage:latest\");\n        let args = build_run_args(\n            ContainerEngine::Podman,\n            &engine_specific_run_flags(ContainerEngine::Podman),\n            &cfg,\n        );\n        assert!(args.contains(&\"--userns=keep-id\".to_string()));\n    }\n\n    #[test]\n    fn build_run_args_interactive() {\n        let mut cfg = RunConfig::new(\"img\");\n        cfg.interactive = true;\n        let args = build_run_args(\n            ContainerEngine::Docker,\n            &engine_specific_run_flags(ContainerEngine::Docker),\n            &cfg,\n        );\n        assert!(args.contains(&\"-it\".to_string()));\n    }\n\n    #[test]\n    fn build_run_args_selinux_suffix() {\n        let mut cfg = RunConfig::new(\"img\");\n        cfg.bind_mounts = vec![(\"/host\".to_string(), \"/container\".to_string())];\n        cfg.selinux_suffix = \":z\".to_string();\n        let args = build_run_args(\n            ContainerEngine::Docker,\n            &engine_specific_run_flags(ContainerEngine::Docker),\n            &cfg,\n        );\n        assert!(args.contains(&\"-v\".to_string()));\n        assert!(args.contains(&\"/host:/container:z\".to_string()));\n    }\n\n    #[test]\n    fn build_run_args_workdir() {\n        let mut cfg = RunConfig::new(\"img\");\n        cfg.work_dir = Some(\"/work\".to_string());\n        let args = build_run_args(\n            ContainerEngine::Docker,\n            &engine_specific_run_flags(ContainerEngine::Docker),\n            &cfg,\n        );\n        assert!(args.contains(&\"-w\".to_string()));\n        assert!(args.contains(&\"/work\".to_string()));\n    }\n\n    #[test]\n    fn build_run_args_read_only() {\n        let mut cfg = RunConfig::new(\"img\");\n        cfg.read_only = true;\n        let args = build_run_args(\n            ContainerEngine::Docker,\n            &engine_specific_run_flags(ContainerEngine::Docker),\n            &cfg,\n        );\n        assert!(args.contains(&\"--read-only\".to_string()));\n    }\n\n    #[test]\n    fn build_run_args_command_at_end() {\n        let mut cfg = RunConfig::new(\"img\");\n        cfg.command = Some(vec![\n            \"morloc\".to_string(),\n            \"make\".to_string(),\n            \"-o\".to_string(),\n            \"svc\".to_string(),\n            \"svc.loc\".to_string(),\n        ]);\n        let args = build_run_args(\n            ContainerEngine::Docker,\n            &engine_specific_run_flags(ContainerEngine::Docker),\n            &cfg,\n        );\n        let img_idx = args.iter().position(|a| a == \"img\").unwrap();\n        let cmd_idx = args.iter().position(|a| a == \"morloc\").unwrap();\n        assert!(img_idx < cmd_idx);\n    }\n\n    #[test]\n    fn build_build_args_includes_tag_and_dockerfile() {\n        let cfg = BuildConfig {\n            dockerfile: \"/tmp/Dockerfile\".to_string(),\n            context: \"/tmp/ctx\".to_string(),\n            tag: \"test:v1\".to_string(),\n            build_args: vec![(\"BASE\".to_string(), \"ubuntu:22.04\".to_string())],\n        };\n        let args = build_build_args(&cfg);\n        assert_eq!(args[0], \"build\");\n        assert!(args.contains(&\"-f\".to_string()));\n        assert!(args.contains(&\"-t\".to_string()));\n        assert!(args.contains(&\"--build-arg\".to_string()));\n        assert_eq!(args.last().unwrap(), \"/tmp/ctx\");\n    }\n\n    // ---- SELinux tests ----\n\n    #[test]\n    fn root_is_unsafe() {\n        assert!(!selinux::is_safe_to_relabel(\"/\"));\n    }\n\n    #[test]\n    fn tmp_is_unsafe() {\n        assert!(!selinux::is_safe_to_relabel(\"/tmp\"));\n    }\n\n    #[test]\n    fn tmp_subdir_is_unsafe() {\n        assert!(!selinux::is_safe_to_relabel(\"/tmp/foo\"));\n    }\n\n    #[test]\n    fn home_subdir_is_safe() {\n        assert!(selinux::is_safe_to_relabel(\"/home/user/project\"));\n    }\n\n    #[test]\n    fn var_tmp_is_unsafe() {\n        assert!(!selinux::is_safe_to_relabel(\"/var/tmp\"));\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/selinux.rs",
    "content": "use std::path::Path;\nuse std::process::Command;\n\nuse crate::error::{ManagerError, Result};\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum SELinuxMode {\n    Enforcing,\n    Permissive,\n    Disabled,\n}\n\npub fn detect_selinux() -> SELinuxMode {\n    if !Path::new(\"/usr/sbin/getenforce\").exists() {\n        return SELinuxMode::Disabled;\n    }\n    let Ok(output) = Command::new(\"getenforce\").output() else {\n        return SELinuxMode::Disabled;\n    };\n    if !output.status.success() {\n        return SELinuxMode::Disabled;\n    }\n    let stdout = String::from_utf8_lossy(&output.stdout);\n    let first_line = stdout.lines().next().unwrap_or(\"\");\n    match first_line {\n        \"Enforcing\" => SELinuxMode::Enforcing,\n        \"Permissive\" => SELinuxMode::Permissive,\n        _ => SELinuxMode::Disabled,\n    }\n}\n\npub fn volume_suffix(mode: SELinuxMode) -> &'static str {\n    match mode {\n        SELinuxMode::Enforcing => \":z\",\n        SELinuxMode::Permissive | SELinuxMode::Disabled => \"\",\n    }\n}\n\npub fn is_safe_to_relabel(path: &str) -> bool {\n    let home = dirs::home_dir().unwrap_or_default();\n    let norm = normalize(path);\n    let home_norm = normalize_trailing(&home.to_string_lossy());\n    let is_home_root = normalize_trailing(&norm) == home_norm;\n    !is_unsafe_system_path(&norm) && !is_home_root\n}\n\npub fn validate_mount_path(path: &str) -> Result<()> {\n    if is_safe_to_relabel(path) {\n        Ok(())\n    } else {\n        Err(ManagerError::SELinuxError(format!(\n            \"Cannot bind-mount {path} with SELinux relabeling. \\\n             This path is unsafe to relabel. \\\n             Use a subdirectory instead (e.g., {path}/project/).\"\n        )))\n    }\n}\n\nfn is_unsafe_system_path(p: &str) -> bool {\n    let norm = normalize_trailing(p);\n    norm == \"/\" || norm.starts_with(\"/tmp/\") || norm == \"/tmp/\" || norm.starts_with(\"/var/tmp/\") || norm == \"/var/tmp/\"\n}\n\nfn normalize(p: &str) -> String {\n    // Simple normalization: resolve . and remove trailing /\n    let path = Path::new(p);\n    path.to_string_lossy().to_string()\n}\n\nfn normalize_trailing(p: &str) -> String {\n    let mut s = normalize(p);\n    if !s.ends_with('/') {\n        s.push('/');\n    }\n    s\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/serve.rs",
    "content": "use std::fs;\nuse std::path::Path;\nuse std::process::{Command, Stdio};\nuse std::thread;\nuse std::time::Duration;\n\nuse crate::container::{\n    container_build, container_pull, container_run, container_run_quiet, container_stop,\n    container_remove, engine_executable, exit_code_to_int, image_exists_locally,\n    BuildConfig, RunConfig,\n};\nuse crate::error::{ManagerError, Result};\nuse crate::types::*;\n\npub fn build_serve_image(\n    engine: ContainerEngine,\n    verbose: bool,\n    state_tarball: &str,\n    tag: &str,\n    ver: Version,\n    base_override: Option<&str>,\n    rebuild: bool,\n    programs: &[ProgramEntry],\n) -> Result<()> {\n    if !Path::new(state_tarball).exists() {\n        return Err(ManagerError::UnfreezeError(format!(\n            \"Tarball not found: {state_tarball}\"\n        )));\n    }\n\n    if !rebuild && image_exists_locally(engine, tag) {\n        eprintln!(\"Image '{tag}' already exists locally; skipping build (use --rebuild to force)\");\n        return Ok(());\n    }\n\n    let tarball_dir = Path::new(state_tarball)\n        .parent()\n        .unwrap_or(Path::new(\".\"));\n    let manifest_path = tarball_dir.join(\"freeze-manifest.json\");\n    let m_manifest = if manifest_path.exists() {\n        crate::freeze::read_freeze_manifest(&manifest_path.to_string_lossy()).ok()\n    } else {\n        None\n    };\n\n    let base_image = match base_override {\n        Some(b) => b.to_string(),\n        None => resolve_base_from_manifest(engine, m_manifest.as_ref(), ver),\n    };\n\n    eprintln!(\"Using base image: {base_image}\");\n    if !image_exists_locally(engine, &base_image) {\n        let exe = engine_executable(engine);\n        if verbose {\n            eprintln!(\"[morloc-manager] {exe} pull {base_image}\");\n        }\n        let (pull_status, _, pull_err) = container_pull(engine, &base_image);\n        if !pull_status.success() {\n            return Err(ManagerError::EngineError {\n                engine,\n                code: exit_code_to_int(pull_status),\n                stderr: pull_err,\n            });\n        }\n    }\n\n    let context_dir = tarball_dir.join(\"serve-build\");\n    fs::create_dir_all(&context_dir)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"mkdir failed: {e}\")))?;\n\n    eprintln!(\"Extracting frozen state...\");\n    let tar_status = Command::new(\"tar\")\n        .args([\"-xzf\", state_tarball, \"-C\", &context_dir.to_string_lossy()])\n        .stdin(Stdio::null())\n        .stdout(Stdio::null())\n        .stderr(Stdio::inherit())\n        .status()\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"tar extract failed: {e}\")))?;\n    if !tar_status.success() {\n        return Err(ManagerError::UnfreezeError(\n            \"tar extract failed (see error output above)\".to_string()\n        ));\n    }\n\n    // Rewrite build.path in each manifest so the nexus chdirs to the\n    // container-internal path instead of the original host path.\n    rewrite_manifest_paths(&context_dir)?;\n\n    let dockerfile_path = context_dir.join(\"Dockerfile\");\n    let has_exe = context_dir.join(\"exe\").is_dir()\n        && fs::read_dir(context_dir.join(\"exe\"))\n            .map(|mut d| d.next().is_some())\n            .unwrap_or(false);\n    let has_opt = context_dir.join(\"opt\").is_dir()\n        && fs::read_dir(context_dir.join(\"opt\"))\n            .map(|mut d| d.next().is_some())\n            .unwrap_or(false);\n    let has_src = context_dir.join(\"src\").is_dir()\n        && fs::read_dir(context_dir.join(\"src\"))\n            .map(|mut d| d.next().is_some())\n            .unwrap_or(false);\n    let mh = CONTAINER_MORLOC_HOME;\n    let exe_line = if has_exe {\n        format!(\"COPY exe/ {mh}/exe/\\n\")\n    } else {\n        String::new()\n    };\n    let opt_line = if has_opt {\n        format!(\"COPY opt/ {mh}/opt/\\n\")\n    } else {\n        String::new()\n    };\n    let src_line = if has_src {\n        format!(\"COPY src/ {mh}/src/\\n\")\n    } else {\n        String::new()\n    };\n    // Podman's OCI format drops HEALTHCHECK; omit it to avoid warnings.\n    let healthcheck = if engine == ContainerEngine::Docker {\n        \"# Health check for container orchestrators\\n\\\n         HEALTHCHECK --interval=30s --timeout=5s --retries=3 \\\\\\n\\\n           CMD curl -sf http://localhost:8080/health || exit 1\\n\\\n         \\n\"\n            .to_string()\n    } else {\n        String::new()\n    };\n    let dockerfile_content = format!(\n        \"# Auto-generated by morloc-manager serve-image\\n\\\n         FROM {base_image}\\n\\\n         \\n\\\n         # Ensure morloc binaries are on PATH\\n\\\n         ENV PATH=\\\"{mh}/bin:${{PATH}}\\\"\\n\\\n         \\n\\\n         # Morloc home for pool path resolution\\n\\\n         ENV MORLOC_HOME=\\\"{mh}\\\"\\n\\\n         \\n\\\n         # Copy frozen morloc state (modules, manifests, binaries, pools)\\n\\\n         COPY lib/ {mh}/lib/\\n\\\n         COPY fdb/ {mh}/fdb/\\n\\\n         COPY bin/ {mh}/bin/\\n\\\n         {exe_line}\\\n         {opt_line}\\\n         {src_line}\\\n         RUN chmod -R a+rX {mh}\\n\\\n         \\n\\\n         {healthcheck}\\\n         # Entrypoint: nexus router aggregates all installed programs\\n\\\n         ENTRYPOINT [\\\"morloc-nexus\\\", \\\"--router\\\", \\\\\\n\\\n                     \\\"--fdb\\\", \\\"{mh}/fdb\\\", \\\\\\n\\\n                     \\\"--http-port\\\", \\\"8080\\\"]\\n\"\n    );\n    fs::write(&dockerfile_path, &dockerfile_content)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"Write Dockerfile failed: {e}\")))?;\n\n    eprintln!(\"Building serve image {tag} (base: {base_image})...\");\n    let build_cfg = BuildConfig {\n        dockerfile: dockerfile_path.to_string_lossy().to_string(),\n        context: context_dir.to_string_lossy().to_string(),\n        tag: tag.to_string(),\n        build_args: Vec::new(),\n    };\n    if verbose {\n        let exe = engine_executable(engine);\n        eprintln!(\n            \"[morloc-manager] {exe} build -f {} -t {tag} {}\",\n            build_cfg.dockerfile, build_cfg.context\n        );\n    }\n    let (status, _, build_err) = container_build(engine, &build_cfg);\n    if !status.success() {\n        return Err(ManagerError::EngineError {\n            engine,\n            code: exit_code_to_int(status),\n            stderr: build_err,\n        });\n    }\n    eprintln!(\"Built serve image: {tag}\");\n\n    // Validate programs work inside the built image\n    validate_programs(engine, tag, programs, Vec::new(), verbose)?;\n\n    // Clean up the temporary build context\n    if let Err(e) = fs::remove_dir_all(&context_dir) {\n        eprintln!(\"Warning: failed to clean up {}: {e}\", context_dir.display());\n    }\n\n    Ok(())\n}\n\n#[allow(dead_code)]\npub fn run_serve_container(\n    engine: ContainerEngine,\n    verbose: bool,\n    image: &str,\n    name: &str,\n    ports: &[(u16, u16)],\n) -> Result<()> {\n    // Clean up any existing dead container with this name (silently)\n    let _ = crate::container::container_remove_quiet(engine, name);\n\n    let port_str: Vec<String> = ports\n        .iter()\n        .map(|(h, c)| format!(\"{h}:{c}\"))\n        .collect();\n    eprintln!(\n        \"Starting serve container {name} on ports {}...\",\n        port_str.join(\", \")\n    );\n\n    let mut cfg = RunConfig::new(image);\n    cfg.read_only = true;\n    cfg.remove_after = false;\n    cfg.name = Some(name.to_string());\n    cfg.ports = ports.to_vec();\n    cfg.extra_flags = vec![\"-d\".to_string()];\n\n    if verbose {\n        let exe = engine_executable(engine);\n        let extra = crate::container::engine_specific_run_flags_io(engine);\n        let args = crate::container::build_run_args(engine, &extra, &cfg);\n        let quoted: Vec<String> = args.iter().map(|a| {\n            if a.contains(' ') { format!(\"'{a}'\") } else { a.clone() }\n        }).collect();\n        eprintln!(\"[morloc-manager] {exe} {}\", quoted.join(\" \"));\n    }\n\n    let (status, _stdout, run_err) = container_run(engine, &cfg);\n    if !status.success() {\n        let _ = crate::container::container_remove_quiet(engine, name);\n        return Err(ManagerError::EngineError {\n            engine,\n            code: exit_code_to_int(status),\n            stderr: run_err,\n        });\n    }\n\n    // Verify container reached running state\n    thread::sleep(Duration::from_secs(1));\n    let exe = engine_executable(engine);\n    let insp_output = Command::new(exe)\n        .args([\"inspect\", \"--format\", \"{{.State.Status}}\", name])\n        .output();\n    match insp_output {\n        Ok(o) if o.status.success() => {\n            let state = String::from_utf8_lossy(&o.stdout).trim().to_string();\n            if state == \"running\" {\n                eprintln!(\"Container {name} started\");\n                eprintln!(\"  Logs:   morloc-manager logs\");\n                eprintln!(\"  Stop:   morloc-manager stop {name}\");\n                eprintln!(\"  Status: morloc-manager status\");\n                Ok(())\n            } else {\n                let log_output = Command::new(exe).args([\"logs\", name]).output();\n                let logs = log_output\n                    .map(|o| {\n                        let stdout = String::from_utf8_lossy(&o.stdout);\n                        let stderr = String::from_utf8_lossy(&o.stderr);\n                        format!(\"{stdout}{stderr}\")\n                    })\n                    .unwrap_or_default();\n                // Clean up the dead container to prevent name conflicts on retry\n                let _ = container_remove(engine, name);\n                Err(ManagerError::EngineError {\n                    engine,\n                    code: 1,\n                    stderr: format!(\"Container failed to start (state: {state}):\\n{logs}\"),\n                })\n            }\n        }\n        _ => Err(ManagerError::EngineError {\n            engine,\n            code: 1,\n            stderr: \"Failed to inspect container state\".to_string(),\n        }),\n    }\n}\n\n/// Serve an environment by bind-mounting its data directory into the container.\npub fn serve_environment(\n    engine: ContainerEngine,\n    verbose: bool,\n    image: &str,\n    data_dir: &str,\n    container_name: &str,\n    ports: &[(u16, u16)],\n    extra_flags: &[String],\n    shm_size: &Option<String>,\n    user_env: &[(String, String)],\n) -> Result<()> {\n    // Clean up any existing dead container with this name (silently)\n    let _ = crate::container::container_remove_quiet(engine, container_name);\n\n    let port_str: Vec<String> = ports\n        .iter()\n        .map(|(h, c)| format!(\"{h}:{c}\"))\n        .collect();\n    eprintln!(\n        \"Starting serve container {container_name} on ports {}...\",\n        port_str.join(\", \")\n    );\n\n    let mut cfg = RunConfig::new(image);\n    cfg.read_only = true;\n    cfg.remove_after = false;\n    cfg.name = Some(container_name.to_string());\n    cfg.ports = ports.to_vec();\n    let mh = CONTAINER_MORLOC_HOME;\n    cfg.bind_mounts = vec![(data_dir.to_string(), mh.to_string())];\n    cfg.env = vec![\n        (\"PATH\".to_string(), format!(\"{mh}/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\")),\n        (\"MORLOC_HOME\".to_string(), mh.to_string()),\n    ];\n    cfg.env.extend(user_env.iter().cloned());\n    cfg.command = Some(vec![\n        \"morloc-nexus\".to_string(),\n        \"--router\".to_string(),\n        \"--fdb\".to_string(), format!(\"{mh}/fdb\"),\n        \"--http-port\".to_string(), \"8080\".to_string(),\n    ]);\n    cfg.shm_size = shm_size.clone();\n    cfg.extra_flags = vec![\"-d\".to_string()];\n    cfg.extra_flags.extend(extra_flags.iter().cloned());\n\n    if verbose {\n        let exe = engine_executable(engine);\n        let extra = crate::container::engine_specific_run_flags_io(engine);\n        let args = crate::container::build_run_args(engine, &extra, &cfg);\n        let quoted: Vec<String> = args.iter().map(|a| {\n            if a.contains(' ') { format!(\"'{a}'\") } else { a.clone() }\n        }).collect();\n        eprintln!(\"[morloc-manager] {exe} {}\", quoted.join(\" \"));\n    }\n\n    let (status, _stdout, run_err) = container_run(engine, &cfg);\n    if !status.success() {\n        // `container_run` may have left a partially-created container behind\n        // (e.g., port conflict after container creation). Clean it up so the\n        // next `start` doesn't fail on a name collision.\n        let _ = crate::container::container_remove_quiet(engine, container_name);\n\n        // Detect port conflict and provide a friendlier error message\n        let lower = run_err.to_lowercase();\n        if lower.contains(\"address already in use\") || lower.contains(\"port is already allocated\")\n            || lower.contains(\"pasta failed\")\n        {\n            // Try to extract the port number from the error\n            let port_hint = ports.first()\n                .map(|(h, _)| format!(\" Port {h} is already in use.\"))\n                .unwrap_or_default();\n            return Err(ManagerError::EnvError(format!(\n                \"{port_hint}\\n  \\\n                 Another container or process is using this port.\\n  \\\n                 Use '-p <other-port>:8080' to choose a different host port, or\\n  \\\n                 check running containers with 'morloc-manager status'.\"\n            )));\n        }\n\n        return Err(ManagerError::EngineError {\n            engine,\n            code: exit_code_to_int(status),\n            stderr: run_err,\n        });\n    }\n\n    // Verify container reached running state\n    thread::sleep(Duration::from_secs(1));\n    let exe = engine_executable(engine);\n    let insp_output = Command::new(exe)\n        .args([\"inspect\", \"--format\", \"{{.State.Status}}\", container_name])\n        .output();\n    match insp_output {\n        Ok(o) if o.status.success() => {\n            let state = String::from_utf8_lossy(&o.stdout).trim().to_string();\n            if state == \"running\" {\n                eprintln!(\"Container {container_name} started\");\n                eprintln!(\"  Logs:   morloc-manager logs\");\n                eprintln!(\"  Stop:   morloc-manager stop\");\n                eprintln!(\"  Status: morloc-manager status\");\n                Ok(())\n            } else {\n                let log_output = Command::new(exe).args([\"logs\", container_name]).output();\n                let logs = log_output\n                    .map(|o| {\n                        let stdout = String::from_utf8_lossy(&o.stdout);\n                        let stderr = String::from_utf8_lossy(&o.stderr);\n                        format!(\"{stdout}{stderr}\")\n                    })\n                    .unwrap_or_default();\n                let _ = container_remove(engine, container_name);\n                Err(ManagerError::EngineError {\n                    engine,\n                    code: 1,\n                    stderr: format!(\"Container failed to start (state: {state}):\\n{logs}\"),\n                })\n            }\n        }\n        _ => Err(ManagerError::EngineError {\n            engine,\n            code: 1,\n            stderr: \"Failed to inspect container state\".to_string(),\n        }),\n    }\n}\n\npub fn stop_serve_container(engine: ContainerEngine, verbose: bool, name: &str) -> Result<()> {\n    if !crate::container::container_exists(engine, name) {\n        return Err(ManagerError::EnvError(format!(\n            \"No serve container running for '{name}'\"\n        )));\n    }\n    if verbose {\n        let exe = engine_executable(engine);\n        eprintln!(\"[morloc-manager] {exe} stop {name}\");\n    }\n    let (status, err) = container_stop(engine, name);\n    let _ = crate::container::container_remove_quiet(engine, name);\n    if !status.success() {\n        return Err(ManagerError::EngineError {\n            engine,\n            code: exit_code_to_int(status),\n            stderr: err,\n        });\n    }\n    Ok(())\n}\n\n/// Build the serve container name for an environment.\n/// Format: morloc-serve-<username>-<envname>\npub fn serve_container_name(env_name: &str) -> String {\n    let user = std::env::var(\"USER\")\n        .or_else(|_| std::env::var(\"LOGNAME\"))\n        .unwrap_or_else(|_| \"unknown\".to_string());\n    format!(\"morloc-serve-{user}-{env_name}\")\n}\n\n/// The prefix used to filter all serve containers for the current user.\npub fn serve_container_prefix() -> String {\n    let user = std::env::var(\"USER\")\n        .or_else(|_| std::env::var(\"LOGNAME\"))\n        .unwrap_or_else(|_| \"unknown\".to_string());\n    format!(\"morloc-serve-{user}-\")\n}\n\n/// Extract the environment name from a serve container name.\npub fn env_name_from_container(container_name: &str) -> &str {\n    let prefix = serve_container_prefix();\n    container_name.strip_prefix(&prefix).unwrap_or(container_name)\n}\n\n#[derive(serde::Serialize)]\npub struct ServeContainerInfo {\n    pub name: String,\n    pub env: String,\n    pub ports: String,\n    pub status: String,\n}\n\n/// Query running serve containers and return structured info.\npub fn query_serve_containers(engine: ContainerEngine, verbose: bool) -> Result<Vec<ServeContainerInfo>> {\n    let exe = engine_executable(engine);\n    let fmt = \"{{.Names}}\\t{{.Status}}\\t{{.Ports}}\";\n    let prefix = serve_container_prefix();\n    let filter = format!(\"name={prefix}\");\n    if verbose {\n        eprintln!(\"[morloc-manager] {exe} ps -a --filter {filter} --format '{fmt}'\");\n    }\n    let output = Command::new(exe)\n        .args([\n            \"ps\", \"-a\", \"--filter\", &filter, \"--format\", fmt,\n        ])\n        // Use /tmp as cwd to avoid podman \"cannot chdir\" failures when the\n        // current directory is inaccessible (e.g. another user's home).\n        .current_dir(\"/tmp\")\n        .output()\n        .map_err(|e| ManagerError::EngineError {\n            engine,\n            code: 1,\n            stderr: format!(\"Failed to list containers: {e}\"),\n        })?;\n    if !output.status.success() {\n        return Err(ManagerError::EngineError {\n            engine,\n            code: exit_code_to_int(output.status),\n            stderr: String::from_utf8_lossy(&output.stderr).to_string(),\n        });\n    }\n    let text = String::from_utf8_lossy(&output.stdout).trim().to_string();\n    let mut result = Vec::new();\n    for line in text.lines() {\n        let parts: Vec<&str> = line.split('\\t').collect();\n        if parts.len() >= 3 {\n            let name = parts[0];\n            let status = parts[1];\n            let ports = parts[2];\n            let env = env_name_from_container(name);\n            result.push(ServeContainerInfo {\n                name: name.to_string(),\n                env: env.to_string(),\n                ports: if ports.is_empty() { \"-\".to_string() } else { ports.to_string() },\n                status: status.to_string(),\n            });\n        }\n    }\n    Ok(result)\n}\n\n/// Find running serve container names for the current user.\npub fn find_running_serve_containers(engine: ContainerEngine) -> Vec<String> {\n    let exe = engine_executable(engine);\n    let filter = format!(\"name={}\", serve_container_prefix());\n    let output = Command::new(exe)\n        .args([\"ps\", \"--filter\", &filter, \"--format\", \"{{.Names}}\"])\n        .current_dir(\"/tmp\")\n        .output();\n    match output {\n        Ok(o) if o.status.success() => {\n            String::from_utf8_lossy(&o.stdout)\n                .lines()\n                .filter(|l| !l.is_empty())\n                .map(|l| l.to_string())\n                .collect()\n        }\n        _ => Vec::new(),\n    }\n}\n\n// ======================================================================\n// Program validation\n// ======================================================================\n\n/// Run `--help` for each installed program inside a container image to\n/// verify that pool processes start correctly (e.g. all imports resolve).\n///\n/// `bind_mounts` should be non-empty for pre-freeze validation (where the\n/// data dir is on the host) and empty for post-unfreeze validation (where\n/// everything is baked into the image).\npub fn validate_programs(\n    engine: ContainerEngine,\n    image: &str,\n    programs: &[ProgramEntry],\n    bind_mounts: Vec<(String, String)>,\n    verbose: bool,\n) -> Result<()> {\n    if programs.is_empty() {\n        return Ok(());\n    }\n    eprintln!(\"Validating installed programs...\");\n    let mut any_failed = false;\n    for prog in programs {\n        let exe_path = format!(\"{}/bin/{}\", CONTAINER_MORLOC_HOME, prog.name);\n        if verbose {\n            let exe = engine_executable(engine);\n            eprintln!(\"[morloc-manager] {exe} run --rm --entrypoint '' {image} {exe_path} --help\");\n        }\n        let cfg = RunConfig {\n            bind_mounts: bind_mounts.clone(),\n            command: Some(vec![exe_path, \"--help\".to_string()]),\n            env: vec![\n                (\"MORLOC_HOME\".to_string(), CONTAINER_MORLOC_HOME.to_string()),\n            ],\n            // Override the image ENTRYPOINT so the command runs directly\n            // instead of being appended to the router entrypoint.\n            extra_flags: vec![\"--entrypoint\".to_string(), \"\".to_string()],\n            ..RunConfig::new(image)\n        };\n        let (status, _stdout, stderr) = container_run_quiet(engine, &cfg);\n        if status.success() {\n            let n = prog.commands.len();\n            eprintln!(\"  [ok] {} ({} commands)\", prog.name, n);\n        } else {\n            let snippet: String = stderr.lines().take(5).collect::<Vec<_>>().join(\"\\n    \");\n            eprintln!(\"  [FAIL] {}: {}\", prog.name, snippet);\n            any_failed = true;\n        }\n    }\n    if any_failed {\n        return Err(ManagerError::FreezeError(\n            \"Some programs failed validation (see errors above)\".to_string(),\n        ));\n    }\n    Ok(())\n}\n\n// ======================================================================\n// Manifest path rewriting for frozen images\n// ======================================================================\n\nconst CONTAINER_MORLOC_HOME: &str = \"/opt/morloc\";\nconst MANIFEST_MARKER: &str = \"### MANIFEST ###\";\n\n/// Rewrite `build.path` in every `.manifest` file under `fdb/` so the\n/// nexus inside the container chdirs to the correct location instead of\n/// the original host path.\nfn rewrite_manifest_paths(context_dir: &Path) -> Result<()> {\n    let fdb_dir = context_dir.join(\"fdb\");\n    if !fdb_dir.is_dir() {\n        return Ok(());\n    }\n    let entries = fs::read_dir(&fdb_dir)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"read fdb/: {e}\")))?;\n    for entry in entries {\n        let entry = entry\n            .map_err(|e| ManagerError::UnfreezeError(format!(\"read fdb/ entry: {e}\")))?;\n        let path = entry.path();\n        let name = entry.file_name();\n        let name_str = name.to_string_lossy();\n        if !name_str.ends_with(\".manifest\") {\n            continue;\n        }\n        let prog_name = &name_str[..name_str.len() - \".manifest\".len()];\n        let container_build_path = format!(\"{}/exe/{}\", CONTAINER_MORLOC_HOME, prog_name);\n        rewrite_one_manifest(&path, &container_build_path)?;\n    }\n    Ok(())\n}\n\n/// Rewrite the `build.path` field in a single manifest wrapper script.\nfn rewrite_one_manifest(path: &Path, new_build_path: &str) -> Result<()> {\n    let content = fs::read_to_string(path)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"read {}: {e}\", path.display())))?;\n\n    let (prefix, json_str) = if content.starts_with(\"#!\") {\n        if let Some(marker_pos) = content.find(MANIFEST_MARKER) {\n            let after_marker = &content[marker_pos..];\n            let json_start = after_marker\n                .find('\\n')\n                .map(|i| marker_pos + i + 1)\n                .unwrap_or(content.len());\n            (&content[..json_start], &content[json_start..])\n        } else {\n            return Ok(()); // no marker, skip\n        }\n    } else {\n        (\"\", content.as_str())\n    };\n\n    let mut manifest: serde_json::Value = serde_json::from_str(json_str)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"parse {}: {e}\", path.display())))?;\n\n    if let Some(build) = manifest.get_mut(\"build\") {\n        if let Some(p) = build.get_mut(\"path\") {\n            *p = serde_json::Value::String(new_build_path.to_string());\n        }\n    }\n\n    let new_json = serde_json::to_string(&manifest)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"serialize {}: {e}\", path.display())))?;\n\n    let new_content = format!(\"{}{}\\n\", prefix, new_json);\n    fs::write(path, new_content)\n        .map_err(|e| ManagerError::UnfreezeError(format!(\"write {}: {e}\", path.display())))?;\n    Ok(())\n}\n\n// ======================================================================\n// Manifest and image resolution\n// ======================================================================\n\nfn resolve_base_from_manifest(\n    engine: ContainerEngine,\n    m_manifest: Option<&FreezeManifest>,\n    ver: Version,\n) -> String {\n    let ghcr_fallback = format!(\n        \"ghcr.io/morloc-project/morloc/morloc-full:{}\",\n        ver.show()\n    );\n    let Some(fm) = m_manifest else {\n        return ghcr_fallback;\n    };\n\n    // Resolve the effective base image: use manifest's base_image if it exists\n    // locally, otherwise fall back to the GHCR image. The manifest may record a\n    // locally-retagged image (e.g. localhost/morloc:0.69.0) that won't exist on\n    // other machines.\n    let effective_base = if image_exists_locally(engine, &fm.base_image) {\n        fm.base_image.clone()\n    } else {\n        eprintln!(\n            \"Base image '{}' not found locally, trying GHCR fallback...\",\n            fm.base_image\n        );\n        ghcr_fallback\n    };\n\n    match &fm.env_layer {\n        None => effective_base,\n        Some(fel) => {\n            // Fast path: env image tag exists locally\n            if let Some(ref tag) = fel.image_tag {\n                let exe = engine_executable(engine);\n                let check = Command::new(exe)\n                    .args([\"image\", \"inspect\", tag])\n                    .stdout(Stdio::null())\n                    .stderr(Stdio::null())\n                    .status();\n                if check.map(|s| s.success()).unwrap_or(false) {\n                    return tag.clone();\n                }\n            }\n            // Rebuild env layer from stored Dockerfile using effective base\n            rebuild_env_image(engine, &effective_base, fm, fel)\n        }\n    }\n}\n\nfn rebuild_env_image(\n    engine: ContainerEngine,\n    effective_base: &str,\n    fm: &FreezeManifest,\n    fel: &FrozenEnvLayer,\n) -> String {\n    let env_tag = format!(\n        \"localhost/morloc-env:{}-{}\",\n        fm.morloc_version.show(),\n        fel.name\n    );\n    let exe = engine_executable(engine);\n    // Check if tagged image exists locally\n    let check = Command::new(exe)\n        .args([\"image\", \"inspect\", &env_tag])\n        .stdout(Stdio::null())\n        .stderr(Stdio::null())\n        .status();\n    if check.map(|s| s.success()).unwrap_or(false) {\n        return env_tag;\n    }\n\n    eprintln!(\"Building deployment image (environment layer: {})\", fel.name);\n    let build_dir = \"/tmp/morloc-env-rebuild\";\n    let _ = fs::create_dir_all(build_dir);\n    let df_path = format!(\"{build_dir}/Dockerfile\");\n    let _ = fs::write(&df_path, &fel.dockerfile);\n    let build_cfg = BuildConfig {\n        dockerfile: df_path,\n        context: build_dir.to_string(),\n        tag: env_tag.clone(),\n        build_args: vec![(\"CONTAINER_BASE\".to_string(), effective_base.to_string())],\n    };\n    let (status, _, build_err) = container_build(engine, &build_cfg);\n    if status.success() {\n        env_tag\n    } else {\n        eprintln!(\n            \"Warning: env rebuild failed, falling back to base image: {build_err}\"\n        );\n        effective_base.to_string()\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-manager/src/types.rs",
    "content": "use serde::{Deserialize, Deserializer, Serialize, Serializer};\nuse std::cmp::Ordering;\nuse std::str::FromStr;\n\n// ======================================================================\n// Core enumerations\n// ======================================================================\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\npub enum Scope {\n    Local,\n    System,\n}\n\nimpl Serialize for Scope {\n    fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n        match self {\n            Scope::Local => serializer.serialize_str(\"local\"),\n            Scope::System => serializer.serialize_str(\"system\"),\n        }\n    }\n}\n\nimpl<'de> Deserialize<'de> for Scope {\n    fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n        let s = String::deserialize(deserializer)?;\n        match s.as_str() {\n            \"local\" => Ok(Scope::Local),\n            \"system\" => Ok(Scope::System),\n            _ => Err(serde::de::Error::custom(format!(\"Unknown scope: {s}\"))),\n        }\n    }\n}\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum ContainerEngine {\n    Docker,\n    Podman,\n}\n\nimpl Serialize for ContainerEngine {\n    fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n        match self {\n            ContainerEngine::Docker => serializer.serialize_str(\"docker\"),\n            ContainerEngine::Podman => serializer.serialize_str(\"podman\"),\n        }\n    }\n}\n\nimpl<'de> Deserialize<'de> for ContainerEngine {\n    fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n        let s = String::deserialize(deserializer)?;\n        match s.as_str() {\n            \"docker\" => Ok(ContainerEngine::Docker),\n            \"podman\" => Ok(ContainerEngine::Podman),\n            _ => Err(serde::de::Error::custom(format!(\n                \"Unknown container engine: {s}\"\n            ))),\n        }\n    }\n}\n\n// ======================================================================\n// Version\n// ======================================================================\n\n#[derive(Debug, Clone, PartialEq, Eq)]\npub struct Version {\n    pub major: u32,\n    pub minor: u32,\n    pub patch: u32,\n    pub prerelease: Option<String>,\n}\n\nimpl Version {\n    #[cfg(test)]\n    pub fn new(major: u32, minor: u32, patch: u32) -> Self {\n        Self {\n            major,\n            minor,\n            patch,\n            prerelease: None,\n        }\n    }\n\n    pub fn show(&self) -> String {\n        match &self.prerelease {\n            Some(pre) => format!(\"{}.{}.{}-{}\", self.major, self.minor, self.patch, pre),\n            None => format!(\"{}.{}.{}\", self.major, self.minor, self.patch),\n        }\n    }\n}\n\nimpl Ord for Version {\n    fn cmp(&self, other: &Self) -> Ordering {\n        self.major\n            .cmp(&other.major)\n            .then(self.minor.cmp(&other.minor))\n            .then(self.patch.cmp(&other.patch))\n            .then(match (&self.prerelease, &other.prerelease) {\n                (None, None) => Ordering::Equal,\n                (Some(_), None) => Ordering::Less,    // pre-release < release\n                (None, Some(_)) => Ordering::Greater,  // release > pre-release\n                (Some(a), Some(b)) => a.cmp(b),\n            })\n    }\n}\n\nimpl PartialOrd for Version {\n    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n        Some(self.cmp(other))\n    }\n}\n\nimpl FromStr for Version {\n    type Err = String;\n\n    fn from_str(s: &str) -> Result<Self, Self::Err> {\n        // Split off pre-release suffix on first '-': \"0.77.0-rc.1\" -> (\"0.77.0\", Some(\"rc.1\"))\n        let (version_part, prerelease) = match s.find('-') {\n            Some(idx) => (&s[..idx], Some(s[idx + 1..].to_string())),\n            None => (s, None),\n        };\n        let parts: Vec<&str> = version_part.split('.').collect();\n        if parts.len() != 3 {\n            return Err(format!(\"Invalid version: {s}. Expected format: MAJOR.MINOR.PATCH[-PRERELEASE]\"));\n        }\n        let major = parts[0]\n            .parse()\n            .map_err(|_| format!(\"Invalid major version: {}\", parts[0]))?;\n        let minor = parts[1]\n            .parse()\n            .map_err(|_| format!(\"Invalid minor version: {}\", parts[1]))?;\n        let patch = parts[2]\n            .parse()\n            .map_err(|_| format!(\"Invalid patch version: {}\", parts[2]))?;\n        Ok(Version {\n            major,\n            minor,\n            patch,\n            prerelease,\n        })\n    }\n}\n\nimpl Serialize for Version {\n    fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n        serializer.serialize_str(&self.show())\n    }\n}\n\nimpl<'de> Deserialize<'de> for Version {\n    fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n        let s = String::deserialize(deserializer)?;\n        s.parse().map_err(serde::de::Error::custom)\n    }\n}\n\n// ======================================================================\n// Configuration\n// ======================================================================\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct Config {\n    /// Name of the active environment.\n    pub active_env: Option<String>,\n    /// Default container engine.\n    #[serde(default = \"default_engine\")]\n    pub engine: ContainerEngine,\n}\n\nfn default_engine() -> ContainerEngine {\n    ContainerEngine::Podman\n}\n\nimpl Default for Config {\n    fn default() -> Self {\n        Self {\n            active_env: None,\n            engine: ContainerEngine::Podman,\n        }\n    }\n}\n\n// ======================================================================\n// Environment configuration\n// ======================================================================\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct EnvironmentConfig {\n    /// Human-readable name (also the directory name).\n    pub name: String,\n    /// Base container image reference.\n    pub base_image: String,\n    /// Original pullable image reference (e.g., :edge tag) before local re-tagging.\n    #[serde(default)]\n    pub original_image: Option<String>,\n    /// Filename of the custom Dockerfile layer (within the env config dir).\n    #[serde(default)]\n    pub dockerfile: Option<String>,\n    /// SHA256 hash of the Dockerfile content (for rebuild detection).\n    #[serde(default)]\n    pub content_hash: Option<String>,\n    /// Built image tag after applying the Dockerfile layer.\n    /// None when only the base image is used.\n    #[serde(default)]\n    pub built_image: Option<String>,\n    /// Container engine for this environment.\n    pub engine: ContainerEngine,\n    /// Shared memory size for container runs.\n    #[serde(default = \"default_shm_size\")]\n    pub shm_size: String,\n    /// Morloc version this environment was created from.\n    #[serde(default)]\n    pub morloc_version: Option<Version>,\n}\n\nfn default_shm_size() -> String {\n    \"512m\".to_string()\n}\n\nimpl EnvironmentConfig {\n    /// Returns the image to use for running containers.\n    /// Prefers the built Dockerfile layer image, falls back to base_image.\n    pub fn active_image(&self) -> &str {\n        self.built_image.as_deref().unwrap_or(&self.base_image)\n    }\n}\n\n// ======================================================================\n// Freeze manifest\n// ======================================================================\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct FreezeManifest {\n    pub morloc_version: Version,\n    pub frozen_at: chrono::DateTime<chrono::Utc>,\n    pub modules: Vec<ModuleEntry>,\n    pub programs: Vec<ProgramEntry>,\n    pub base_image: String,\n    pub env_layer: Option<FrozenEnvLayer>,\n    /// Deprecated: previously held expected env var names. Retained for backward\n    /// compatibility when reading older freeze manifests.\n    #[serde(default, skip_serializing)]\n    #[allow(dead_code)]\n    pub env_vars: Vec<String>,\n}\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct FrozenEnvLayer {\n    pub name: String,\n    pub dockerfile: String,\n    pub content_hash: String,\n    /// Container image tag (e.g. localhost/morloc-env:0.79.2-dnd).\n    /// Named image_tag because it stores a mutable tag, not a content-addressed digest.\n    #[serde(alias = \"image_digest\")]\n    pub image_tag: Option<String>,\n}\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct ModuleEntry {\n    pub name: String,\n    pub version: Option<String>,\n    pub sha256: String,\n}\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct ProgramEntry {\n    pub name: String,\n    pub commands: Vec<String>,\n}\n"
  },
  {
    "path": "data/rust/morloc-manifest/Cargo.toml",
    "content": "[package]\nname = \"morloc-manifest\"\nversion = \"0.81.0\"\nedition = \"2021\"\ndescription = \"Morloc manifest schema (v2): shared Rust types for the .manifest JSON blob produced by the morloc compiler and consumed by the nexus, runtime, and any future tooling.\"\n# Version is intentionally synchronized with the morloc compiler version\n# (see ../../../package.yaml). Manifests record the morloc_version that\n# built them; consumers reject manifests whose version differs from\n# CARGO_PKG_VERSION at parse time. Bumping the morloc compiler version\n# requires bumping this version in lockstep.\n\n[dependencies]\nserde = { workspace = true }\nserde_json = { workspace = true }\n"
  },
  {
    "path": "data/rust/morloc-manifest/src/lib.rs",
    "content": "//! Morloc manifest schema (v2) -- canonical Rust types.\n//!\n//! The morloc compiler emits a `.manifest` JSON blob describing every\n//! exported command's interface. This crate is the **single source of\n//! truth** for that schema's Rust representation. Both the CLI nexus\n//! (`morloc-nexus`) and the C-FFI runtime (`morloc-runtime`) depend on\n//! these types so neither has to maintain its own deserialization\n//! logic.\n//!\n//! ## Versioning\n//!\n//! The manifest does not carry a dedicated schema version. Manifests\n//! are transient build artifacts (always regenerated on `morloc make`,\n//! never stored in version control), so the morloc compiler version\n//! recorded in the [`Build`] sub-object serves as the staleness\n//! indicator. The check happens in [`parse_manifest`].\n//!\n//! Version coupling: this crate's `CARGO_PKG_VERSION` is intentionally\n//! kept in lockstep with the morloc Haskell compiler version (see\n//! `package.yaml`). The same is true of `morloc-nexus` and\n//! `morloc-runtime` -- bumping the morloc compiler requires bumping\n//! all three Rust crates in the same commit.\n//!\n//! ## Extension slots\n//!\n//! Every entity object (manifest, pool, command, arg, return, group,\n//! service) carries:\n//!\n//! - `constraints: Vec<Constraint>` -- enforceable rules. Currently\n//!   the compiler emits only the `kind` constraint on named-type args\n//!   (record/object/table). Future constraints (`min`, `max`, `regex`,\n//!   `length`, `non_empty`, `row_count`, ...) will append to this\n//!   list without any schema change.\n//!\n//! - `metadata: BTreeMap<String, serde_json::Value>` -- free-form\n//!   informational key-value pairs. Always emitted as `{}` today;\n//!   reserved so consumers never have to check whether the field\n//!   exists. Future doc hints, studio annotations, telemetry tags,\n//!   etc. live here until they stabilize into first-class fields.\n//!\n//! Many of these slots are `#[allow(dead_code)]` because no current\n//! consumer reads them. They are deliberate forward-compatible\n//! placeholders, not vestigial fields.\n//!\n//! ## Unknown-field tolerance\n//!\n//! All structs use `#[serde(default)]` on optional fields and silently\n//! ignore unknown JSON keys. A manifest written by a newer morloc\n//! compiler will still parse with an older nexus (modulo the version\n//! mismatch error in [`parse_manifest`]).\n\nuse serde::Deserialize;\nuse std::collections::BTreeMap;\n\n/// Convenient alias for the `metadata` extension slot. Using\n/// `BTreeMap` (rather than `HashMap` or raw `serde_json::Value`) gives\n/// us (a) compile-time enforcement that metadata is always a JSON\n/// object, and (b) deterministic iteration order for stable diffs.\npub type Metadata = BTreeMap<String, serde_json::Value>;\n\n// -- Top-level manifest -------------------------------------------------------\n\n/// The top-level manifest object. Embedded in every built nexus binary\n/// as a JSON blob after the `### MANIFEST ###` marker.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct Manifest {\n    /// Program identifier -- comes from the morloc `module` declaration.\n    pub name: String,\n    /// Compiler-sourced build metadata (path, timestamp, version).\n    /// Distinct from the user-sourced top-level `metadata` slot.\n    pub build: Build,\n    /// Language pool daemons that this program dispatches to.\n    #[serde(default)]\n    pub pools: Vec<Pool>,\n    /// Exported commands the user can invoke.\n    #[serde(default)]\n    pub commands: Vec<Command>,\n    /// Command groups for organizing CLI subcommands in help output.\n    #[serde(default)]\n    pub groups: Vec<CmdGroup>,\n    /// Daemon-mode service configuration. None for normal CLI mode.\n    #[serde(default)]\n    pub service: Option<Service>,\n    /// Module-level description lines (from docstrings before `module`).\n    /// Shown after \"Usage:\" and before \"Nexus options\" in top-level help.\n    #[serde(default)]\n    pub desc: Vec<String>,\n    /// Epilogue blocks shown at the end of top-level help output.\n    #[serde(default)]\n    pub epilogues: Vec<Vec<String>>,\n    /// **Reserved.** User-sourced free-form annotations on the module.\n    /// Always emitted as `{}` today. Distinct from `build` (which is\n    /// compiler-sourced).\n    #[serde(default)]\n    pub metadata: Metadata,\n}\n\n/// Compiler-sourced metadata about how this manifest was produced.\n///\n/// Future build fields (`hash`, `source_hash`, `host`, `user`, `system`,\n/// `dependencies`, `cflags`, `reproducible`, ...) will be added directly\n/// to this struct as additive non-breaking changes -- no sub-metadata\n/// nesting required.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct Build {\n    /// Absolute path to the build directory containing this program's\n    /// pool executables and generated source files. The nexus chdirs\n    /// here at startup so relative pool exec paths resolve.\n    pub path: String,\n    /// Unix timestamp at which the manifest was generated.\n    pub time: i64,\n    /// Version of the morloc compiler that produced this manifest. The\n    /// nexus compares this against its own compile-time\n    /// `CARGO_PKG_VERSION` (which is intentionally synchronized with\n    /// the morloc compiler version) in [`parse_manifest`]; a mismatch\n    /// produces an actionable \"rebuild with the current compiler\"\n    /// error.\n    pub morloc_version: String,\n}\n\n/// A single language pool daemon. Each pool is one OS process that\n/// hosts the language-specific implementations of source functions.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct Pool {\n    /// Language tag (e.g. `\"py\"`, `\"cpp\"`, `\"r\"`, `\"julia\"`).\n    pub lang: String,\n    /// argv used to spawn the pool process (e.g. `[\"python3\", \"pool.py\"]`).\n    pub exec: Vec<String>,\n    /// Unix domain socket basename (under tmpdir) for IPC.\n    pub socket: String,\n    /// **Reserved.** Per-pool metadata. Future slots: `resource`\n    /// (cpu/memory limits), `env` (environment variables),\n    /// `startup_timeout`, `health_check`.\n    #[serde(default)]\n    pub metadata: Metadata,\n}\n\n// -- Commands -----------------------------------------------------------------\n\n/// Discriminator for the command kind. Closed enum so adding a new\n/// variant requires explicit code changes everywhere it's matched.\n#[derive(Debug, Deserialize, Clone, Copy, PartialEq, Eq)]\n#[serde(rename_all = \"lowercase\")]\npub enum CmdType {\n    /// Dispatched to a language pool process via IPC.\n    Remote,\n    /// Evaluated inline by the nexus from an embedded expression tree.\n    Pure,\n}\n\n/// One exported morloc function the user can invoke as a CLI subcommand.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct Command {\n    /// CLI subcommand name (defaults to the morloc function name; can\n    /// be overridden via a `--' name:` docstring directive).\n    pub name: String,\n    /// Discriminator: [`CmdType::Remote`] (dispatch to a pool) or\n    /// [`CmdType::Pure`] (evaluate inline via the manifest's `expr`\n    /// tree).\n    #[serde(rename = \"type\")]\n    pub cmd_type: CmdType,\n\n    // -- Remote-only dispatch info ----------------------------------------\n    /// Manifold ID -- the integer key under which the pool's dispatch\n    /// table contains this function's entry. Remote commands only.\n    #[serde(default)]\n    pub mid: u32,\n    /// Index into [`Manifest::pools`] for the primary pool that hosts\n    /// this command's top-level function. Remote commands only.\n    #[serde(default, rename = \"pool\")]\n    pub pool_index: usize,\n    /// Indices of every pool transitively required to execute this\n    /// command (the primary pool plus any pools called as foreign\n    /// functions from inside it). Remote commands only.\n    #[serde(default)]\n    pub needed_pools: Vec<usize>,\n\n    // -- Common fields ----------------------------------------------------\n    /// Description lines shown in CLI help. The first line is the\n    /// summary used in subcommand listings.\n    #[serde(default)]\n    pub desc: Vec<String>,\n    /// Argument list, in declaration order. Each entry is a\n    /// discriminated union -- see [`Arg`].\n    #[serde(default)]\n    pub args: Vec<Arg>,\n    /// Return-value descriptor. Always present, even for nullary\n    /// returns (use a Nil schema in that case).\n    #[serde(default, rename = \"return\")]\n    pub ret: Return,\n    /// **Reserved.** Command-level constraints -- invariants that span\n    /// multiple arguments (e.g. `equal_length` of two list args).\n    /// Empty in v2; populated when the constraint system rolls out.\n    #[serde(default)]\n    pub constraints: Vec<Constraint>,\n    /// **Reserved.** Per-command metadata. Future slots: `effects`\n    /// (declared I/O / network / filesystem effect set), `resource`\n    /// (CPU/memory/time limits), `auth` (required capabilities),\n    /// `version` (semantic version of the command's API),\n    /// `deprecated` (migration notice).\n    #[serde(default)]\n    pub metadata: Metadata,\n\n    // -- Pure-only evaluation info ----------------------------------------\n    /// Embedded expression tree (NexusExpr JSON) for pure commands.\n    /// Evaluated inline by the nexus instead of being dispatched to a\n    /// pool. Absent on remote commands.\n    #[serde(default)]\n    pub expr: Option<serde_json::Value>,\n\n    // -- Command group ----------------------------------------------------\n    /// Optional name of the command group this command belongs to. Used\n    /// to organize subcommands in help output. The Haskell emitter now\n    /// writes a real JSON null for absent groups (see\n    /// `Morloc.CodeGenerator.Nexus.cmdGroupField`), so no custom\n    /// deserializer is needed.\n    #[serde(default)]\n    pub group: Option<String>,\n}\n\nimpl Command {\n    pub fn is_pure(&self) -> bool {\n        self.cmd_type == CmdType::Pure\n    }\n}\n\n/// Return-value descriptor. Structurally similar to a typed [`Arg`]\n/// minus the CLI-specific fields (kind, metavar, quoted, short/long,\n/// default). Always present on every command.\n#[derive(Debug, Deserialize, Default)]\n#[allow(dead_code)]\npub struct Return {\n    /// Morloc serialization schema string for the return type. The\n    /// nexus uses this to deserialize the bytes coming back from the\n    /// pool process.\n    #[serde(default)]\n    pub schema: String,\n    /// User-facing type name as written in the morloc source (e.g.\n    /// `\"Int\"`, `\"Config\"`, `\"[Int]\"`). Used in help output and error\n    /// messages. JSON key is `type`; the Rust field is `type_desc`\n    /// because `type` is a reserved keyword.\n    #[serde(default, rename = \"type\")]\n    pub type_desc: String,\n    /// Description lines for the return value, parsed from `--' return:`\n    /// docstrings.\n    #[serde(default)]\n    pub desc: Vec<String>,\n    /// **Reserved.** Constraints on the return value. Currently used\n    /// only for `kind: record|object|table` on named return types;\n    /// future constraints (min/max/regex/...) will live here.\n    #[serde(default)]\n    pub constraints: Vec<Constraint>,\n    /// **Reserved.** Per-return metadata; same forward-compatibility\n    /// rationale as the per-arg slot.\n    #[serde(default)]\n    pub metadata: Metadata,\n}\n\n// -- Constraints --------------------------------------------------------------\n\n/// A single constraint entry attached to an arg, return value, or\n/// command. Discriminated by `type`.\n///\n/// **Currently emitted constraint types**:\n///\n/// - `kind`: marks a named type as `record` / `object` / `table`. The\n///   `value` payload is the lowercased name. The CLI help renderer\n///   uses this to partition into Record Schemas / Table Schemas\n///   sections.\n///\n/// **Reserved constraint types** (defined shapes, not yet emitted by\n/// any compiler pass -- names and payloads chosen so the schema\n/// doesn't need to bump when the constraint feature lands):\n///\n/// - `min`, `max`: numeric bounds with `value: <number>`.\n/// - `length`: `value: <int>` for a fixed length, or `{min, max}` for\n///   a bounded range.\n/// - `non_empty`: list/string must be non-empty (no payload).\n/// - `regex`: string must match `value: \"<pcre>\"`.\n/// - `enum`: value must be one of `value: [...]`.\n/// - `unique`: list elements must be pairwise distinct (no payload).\n/// - `row_count`: fixed/bounded row count for `table`-tagged args.\n/// - `sorted`: `value: \"asc\"|\"desc\"`.\n///\n/// **Extensibility rules**:\n///\n/// 1. Unknown `type` values MUST be silently ignored by readers.\n/// 2. Adding a new constraint type does not bump the manifest version.\n/// 3. Changing the payload shape of an existing type DOES bump.\n/// 4. Constraints are unordered.\n/// 5. Multiple constraints of the same type on the same entity are\n///    allowed.\n#[derive(Debug, Deserialize, Clone)]\n#[allow(dead_code)]\npub struct Constraint {\n    /// Constraint discriminator. JSON key is `type`; the Rust field is\n    /// `ctype` because `type` is a reserved keyword.\n    #[serde(rename = \"type\")]\n    pub ctype: String,\n    /// Constraint-specific payload. Shape depends on `ctype`. Some\n    /// constraint types (`non_empty`, `unique`) carry no payload.\n    #[serde(default)]\n    pub value: Option<serde_json::Value>,\n}\n\n// -- Arguments ----------------------------------------------------------------\n\n/// CLI argument variants. Each command's argument list is a sequence\n/// of these, in declaration order.\n///\n/// Three of the four variants (`Positional`, `Optional`, `Group`)\n/// carry type/schema/constraints information because they represent\n/// typed values that flow through to a pool. `Flag` is a pure boolean\n/// toggle with no associated type -- and therefore no `schema`,\n/// `type_desc`, or `constraints` slot.\n///\n/// `#[allow(dead_code)]` covers the `metadata` slots that are reserved\n/// for future use and not read by any current consumer.\n#[derive(Debug, Deserialize)]\n#[serde(tag = \"kind\")]\n#[allow(dead_code)]\npub enum Arg {\n    /// A positional CLI argument.\n    #[serde(rename = \"pos\")]\n    Positional {\n        /// Morloc serialization schema string. Used at dispatch time\n        /// to parse the user's CLI input into a binary data packet.\n        #[serde(default)]\n        schema: Option<String>,\n        /// User-facing type name (e.g. `\"Int\"`, `\"Config\"`). The Rust\n        /// field is `type_desc` because `type` is a reserved keyword.\n        #[serde(default, rename = \"type\")]\n        type_desc: Option<String>,\n        /// Display placeholder shown in help (e.g. `\"FILE\"`). None\n        /// falls back to a generic `ARG` placeholder.\n        #[serde(default)]\n        metavar: Option<String>,\n        /// If true, the user's CLI value is JSON-wrapped before being\n        /// passed to the pool. Used for `Str`-typed positionals\n        /// declared with `--' literal: true`.\n        #[serde(default)]\n        quoted: bool,\n        /// Description lines from `--' desc:` docstrings.\n        #[serde(default)]\n        desc: Vec<String>,\n        /// Per-argument enforceable invariants. Currently only the\n        /// `kind` constraint is emitted (for named-type args). Future\n        /// constraints (min/max/regex/length/...) will populate this.\n        #[serde(default)]\n        constraints: Vec<Constraint>,\n        /// **Reserved.** Per-argument informational metadata. Always\n        /// emitted as `{}` in v2; not yet read by any consumer.\n        #[serde(default)]\n        metadata: Metadata,\n    },\n    /// An optional CLI argument with a long/short option name.\n    #[serde(rename = \"opt\")]\n    Optional {\n        /// Morloc serialization schema for the option's value type.\n        #[serde(default)]\n        schema: Option<String>,\n        /// User-facing type name. JSON key is `type`.\n        #[serde(default, rename = \"type\")]\n        type_desc: Option<String>,\n        /// Required display placeholder (e.g. `\"FILE\"`).\n        #[serde(default)]\n        metavar: Option<String>,\n        /// JSON-wrap flag for `Str`-typed literal options.\n        #[serde(default)]\n        quoted: bool,\n        /// Single-character short option (e.g. `\"f\"` for `-f`).\n        #[serde(default, rename = \"short\")]\n        short_opt: Option<String>,\n        /// Long option name (e.g. `\"file\"` for `--file`).\n        #[serde(default, rename = \"long\")]\n        long_opt: Option<String>,\n        /// Default value used when the user does not pass the option.\n        /// Always present (declared via `--' default:` docstring).\n        #[serde(default, rename = \"default\")]\n        default_val: Option<String>,\n        /// Description lines.\n        #[serde(default)]\n        desc: Vec<String>,\n        /// Per-argument constraints -- see [`Arg::Positional`]'s\n        /// `constraints` field.\n        #[serde(default)]\n        constraints: Vec<Constraint>,\n        /// **Reserved.** Per-argument metadata. Not read in v2.\n        #[serde(default)]\n        metadata: Metadata,\n    },\n    /// A pure boolean flag toggle. Carries no type, schema, or\n    /// constraints because it has no payload -- flipping the flag\n    /// produces the value `true` or `false`.\n    #[serde(rename = \"flag\")]\n    Flag {\n        /// Single-character short option (e.g. `\"v\"` for `-v`).\n        #[serde(default, rename = \"short\")]\n        short_opt: Option<String>,\n        /// Long option name (e.g. `\"verbose\"` for `--verbose`).\n        #[serde(default, rename = \"long\")]\n        long_opt: Option<String>,\n        /// Long option name that flips the flag in the opposite\n        /// direction (e.g. `\"no-verbose\"` for `--no-verbose`).\n        #[serde(default)]\n        long_rev: Option<String>,\n        /// Default value when the flag is not present on the CLI.\n        /// String form: `\"true\"` or `\"false\"`.\n        #[serde(default, rename = \"default\")]\n        default_val: Option<String>,\n        /// Description lines.\n        #[serde(default)]\n        desc: Vec<String>,\n        /// **Reserved.** Per-flag metadata. Not read in v2.\n        #[serde(default)]\n        metadata: Metadata,\n    },\n    /// A record-typed argument that has been \"unrolled\" into a flat\n    /// collection of CLI flags/options, one per record field. The\n    /// group's top-level `schema` is the schema of the whole record;\n    /// dispatch sends the assembled record to the pool, so individual\n    /// entries never need their own schemas.\n    #[serde(rename = \"grp\")]\n    Group {\n        /// Morloc schema for the whole record (a `Map` schema).\n        #[serde(default)]\n        schema: Option<String>,\n        /// User-facing record type name (e.g. `\"SysConfig\"`).\n        #[serde(default, rename = \"type\")]\n        type_desc: Option<String>,\n        /// Display placeholder for the group as a whole.\n        #[serde(default)]\n        metavar: Option<String>,\n        /// Description lines for the group.\n        #[serde(default)]\n        desc: Vec<String>,\n        /// Optional CLI option that accepts the entire record as a\n        /// single JSON value (e.g. `--sys-config '{...}'`).\n        #[serde(default)]\n        group_opt: Option<GroupOpt>,\n        /// Flattened per-field options/flags. Each entry's `arg` is a\n        /// nested [`Arg`] (typically `Optional` or `Flag`) that has\n        /// no schema of its own -- only the group's top-level schema\n        /// matters at dispatch time.\n        #[serde(default)]\n        entries: Vec<GroupEntry>,\n        /// Per-group constraints. Currently the `kind` constraint\n        /// (almost always `record`) is emitted.\n        #[serde(default)]\n        constraints: Vec<Constraint>,\n        /// **Reserved.** Per-group metadata. Not read in v2.\n        #[serde(default)]\n        metadata: Metadata,\n    },\n}\n\n/// Nested CLI option that accepts the entire record (associated with\n/// an [`Arg::Group`]) as a single JSON value.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct GroupOpt {\n    /// Short option char that accepts the whole record as one JSON value.\n    #[serde(default, rename = \"short\")]\n    pub short_opt: Option<String>,\n    /// Long option name that accepts the whole record as one JSON value.\n    #[serde(default, rename = \"long\")]\n    pub long_opt: Option<String>,\n}\n\n/// One entry inside an [`Arg::Group`] -- pairs a record field name\n/// with the CLI flag/option that backs it.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct GroupEntry {\n    /// Record field name.\n    pub key: String,\n    /// CLI binding for this field. Always a [`Arg::Optional`] or\n    /// [`Arg::Flag`] in practice; never carries its own schema (the\n    /// containing group's schema covers all fields).\n    pub arg: Arg,\n}\n\n/// CLI command group -- purely organizational metadata used to bucket\n/// related subcommands together in the help output.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct CmdGroup {\n    /// Group name; matches `Command::group` on member commands.\n    pub name: String,\n    /// Group description lines for the help output.\n    #[serde(default)]\n    pub desc: Vec<String>,\n    /// **Reserved.** Per-group metadata. Not read in v2.\n    #[serde(default)]\n    pub metadata: Metadata,\n}\n\n/// Daemon-mode service configuration. Present only when the program\n/// is configured to run as a long-lived service rather than a one-shot\n/// CLI invocation.\n#[derive(Debug, Deserialize)]\n#[allow(dead_code)]\npub struct Service {\n    /// Transport type: typically `\"http\"`, `\"tcp\"`, or `\"unix\"`.\n    #[serde(rename = \"type\")]\n    pub service_type: Option<String>,\n    /// Listening host address (TCP/HTTP).\n    pub host: Option<String>,\n    /// Listening port (TCP/HTTP).\n    pub port: Option<i32>,\n    /// Unix socket path (when `service_type` is `\"unix\"`).\n    pub socket: Option<String>,\n    /// **Reserved.** Per-service metadata. Not read in v2.\n    #[serde(default)]\n    pub metadata: Metadata,\n}\n\n// -- I/O ----------------------------------------------------------------------\n\n/// Read the manifest payload from a built-nexus wrapper script. The\n/// nexus binary is wrapped in a shell script that contains a\n/// `### MANIFEST ###` marker followed by the JSON blob. Plain JSON\n/// files (no shebang) are returned as-is.\npub fn read_manifest_payload(path: &str) -> Result<String, String> {\n    let content = std::fs::read_to_string(path)\n        .map_err(|e| format!(\"Cannot open manifest file '{}': {}\", path, e))?;\n\n    if content.starts_with(\"#!\") {\n        if let Some(pos) = content.find(\"### MANIFEST ###\") {\n            let after_marker = &content[pos..];\n            let payload_start = after_marker\n                .find('\\n')\n                .map(|i| pos + i + 1)\n                .unwrap_or(content.len());\n            Ok(content[payload_start..].to_string())\n        } else {\n            Err(\"No ### MANIFEST ### marker found in wrapper script\".into())\n        }\n    } else {\n        Ok(content)\n    }\n}\n\n/// Parse a manifest JSON payload into a [`Manifest`]. Performs a\n/// staleness check on `build.morloc_version` against this crate's own\n/// `CARGO_PKG_VERSION` (which is intentionally pinned to match the\n/// morloc compiler version). Mismatched versions return a clean\n/// \"rebuild with the current compiler\" error rather than silently\n/// misinterpreting the manifest.\npub fn parse_manifest(payload: &str) -> Result<Manifest, String> {\n    let m: Manifest = serde_json::from_str(payload)\n        .map_err(|e| format!(\"Failed to parse manifest JSON: {}\", e))?;\n    let crate_version = env!(\"CARGO_PKG_VERSION\");\n    if m.build.morloc_version != crate_version {\n        return Err(format!(\n            \"manifest built with morloc {}, runtime is {}; rebuild with the current compiler\",\n            m.build.morloc_version, crate_version\n        ));\n    }\n    Ok(m)\n}\n\n// -- Arg accessors ------------------------------------------------------------\n//\n// Variant-agnostic helpers for the fields that exist on multiple Arg\n// variants. Callers in the nexus and runtime use these instead of\n// pattern-matching at every site.\n\nimpl Arg {\n    /// Single-character short option (e.g. `'f'` for `-f`). Returns\n    /// None for positional and group args.\n    pub fn short_opt_char(&self) -> Option<char> {\n        let s = match self {\n            Arg::Optional { short_opt, .. } => short_opt.as_deref(),\n            Arg::Flag { short_opt, .. } => short_opt.as_deref(),\n            _ => None,\n        };\n        s.and_then(|s| s.chars().next())\n    }\n\n    /// Long option name (e.g. `\"verbose\"` for `--verbose`). Returns\n    /// None for positional and group args.\n    pub fn long_opt_str(&self) -> Option<&str> {\n        match self {\n            Arg::Optional { long_opt, .. } => long_opt.as_deref(),\n            Arg::Flag { long_opt, .. } => long_opt.as_deref(),\n            _ => None,\n        }\n    }\n\n    /// True if this arg is a boolean flag toggle.\n    pub fn is_flag(&self) -> bool {\n        matches!(self, Arg::Flag { .. })\n    }\n\n    /// True if the user's CLI value should be JSON-wrapped before\n    /// being passed to the pool. Used for `Str`-typed arguments\n    /// declared with `--' literal: true`. Always false for flags and\n    /// groups.\n    pub fn is_quoted(&self) -> bool {\n        match self {\n            Arg::Positional { quoted, .. } | Arg::Optional { quoted, .. } => *quoted,\n            _ => false,\n        }\n    }\n\n    /// Default CLI value when the user does not pass the argument.\n    /// Returns None for positional args (which are always required)\n    /// and groups.\n    pub fn default_val(&self) -> Option<&str> {\n        match self {\n            Arg::Optional { default_val, .. } => default_val.as_deref(),\n            Arg::Flag { default_val, .. } => default_val.as_deref(),\n            _ => None,\n        }\n    }\n\n    /// CLI display placeholder (e.g. `\"FILE\"`, `\"INT\"`). None for\n    /// flags and for positional args without an explicit metavar.\n    pub fn metavar_str(&self) -> Option<&str> {\n        match self {\n            Arg::Positional { metavar, .. } => metavar.as_deref(),\n            Arg::Optional { metavar, .. } => metavar.as_deref(),\n            Arg::Group { metavar, .. } => metavar.as_deref(),\n            _ => None,\n        }\n    }\n\n    /// Description lines from the source-level docstring. Always\n    /// available regardless of variant.\n    pub fn desc_lines(&self) -> &[String] {\n        match self {\n            Arg::Positional { desc, .. }\n            | Arg::Optional { desc, .. }\n            | Arg::Flag { desc, .. }\n            | Arg::Group { desc, .. } => desc,\n        }\n    }\n\n    /// User-facing type name for typed args (e.g. `\"Int\"`,\n    /// `\"Config\"`). Returns None for flags, which carry no type.\n    pub fn type_desc_str(&self) -> Option<&str> {\n        match self {\n            Arg::Positional { type_desc, .. }\n            | Arg::Optional { type_desc, .. }\n            | Arg::Group { type_desc, .. } => type_desc.as_deref(),\n            Arg::Flag { .. } => None,\n        }\n    }\n\n    /// Morloc serialization schema string for typed args. Returns\n    /// None for flags. The schema drives both dispatch (how to encode\n    /// the value into a packet) and help rendering (how to extract\n    /// record field layouts for the Record/Table Schemas sections).\n    pub fn schema_str(&self) -> Option<&str> {\n        match self {\n            Arg::Positional { schema, .. }\n            | Arg::Optional { schema, .. }\n            | Arg::Group { schema, .. } => schema.as_deref(),\n            Arg::Flag { .. } => None,\n        }\n    }\n\n    /// All constraints attached to this arg. Empty for flags. The\n    /// caller is responsible for filtering by constraint type and\n    /// silently ignoring unknown types (per the extensibility rules\n    /// on [`Constraint`]).\n    pub fn constraints(&self) -> &[Constraint] {\n        match self {\n            Arg::Positional { constraints, .. }\n            | Arg::Optional { constraints, .. }\n            | Arg::Group { constraints, .. } => constraints,\n            Arg::Flag { .. } => &[],\n        }\n    }\n\n    /// Convenience accessor: extract the `value` of the `kind`\n    /// constraint as a string slice. Returns the lowercased\n    /// `\"record\"`, `\"object\"`, or `\"table\"` for named-type args.\n    /// None for everything else (including untagged primitive types).\n    pub fn kind_constraint(&self) -> Option<&str> {\n        self.constraints()\n            .iter()\n            .find(|c| c.ctype == \"kind\")\n            .and_then(|c| c.value.as_ref().and_then(|v| v.as_str()))\n    }\n}\n\n// -- Tests --------------------------------------------------------------------\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    /// Wrap a v2 command body with the required top-level fields so\n    /// each test fixture stays compact.\n    fn wrap(commands_json: &str) -> String {\n        let v = env!(\"CARGO_PKG_VERSION\");\n        format!(\n            r#\"{{\n                \"name\": \"main\",\n                \"build\": {{\n                    \"path\": \"/tmp/test\",\n                    \"time\": 0,\n                    \"morloc_version\": \"{}\"\n                }},\n                \"pools\": [\n                    {{\"lang\": \"py\", \"exec\": [\"python3\", \"pool.py\"], \"socket\": \"pipe-py\", \"metadata\": {{}}}}\n                ],\n                \"commands\": {},\n                \"groups\": [],\n                \"metadata\": {{}}\n            }}\"#,\n            v, commands_json\n        )\n    }\n\n    #[test]\n    fn test_parse_simple_manifest() {\n        let json = wrap(\n            r#\"[\n                {\n                    \"name\": \"f\",\n                    \"type\": \"remote\",\n                    \"mid\": 1,\n                    \"pool\": 0,\n                    \"needed_pools\": [0],\n                    \"desc\": [],\n                    \"args\": [\n                        {\n                            \"kind\": \"pos\",\n                            \"schema\": \"s\",\n                            \"type\": \"Str\",\n                            \"metavar\": null,\n                            \"quoted\": false,\n                            \"desc\": [],\n                            \"constraints\": [],\n                            \"metadata\": {}\n                        }\n                    ],\n                    \"return\": {\n                        \"schema\": \"s\",\n                        \"type\": \"Str\",\n                        \"desc\": [],\n                        \"constraints\": [],\n                        \"metadata\": {}\n                    },\n                    \"constraints\": [],\n                    \"metadata\": {},\n                    \"group\": null\n                }\n            ]\"#,\n        );\n        let m = parse_manifest(&json).unwrap();\n        assert_eq!(m.pools.len(), 1);\n        assert_eq!(m.pools[0].lang, \"py\");\n        assert_eq!(m.commands.len(), 1);\n        assert_eq!(m.commands[0].name, \"f\");\n        assert!(!m.commands[0].is_pure());\n        assert_eq!(m.commands[0].mid, 1);\n        assert!(m.commands[0].group.is_none());\n        assert_eq!(m.commands[0].args.len(), 1);\n        assert_eq!(m.commands[0].args[0].schema_str(), Some(\"s\"));\n        assert_eq!(m.commands[0].args[0].type_desc_str(), Some(\"Str\"));\n        assert_eq!(m.commands[0].ret.schema, \"s\");\n        assert_eq!(m.commands[0].ret.type_desc, \"Str\");\n    }\n\n    #[test]\n    fn test_parse_pure_command() {\n        let json = wrap(\n            r#\"[\n                {\n                    \"name\": \"greet\",\n                    \"type\": \"pure\",\n                    \"desc\": [\"Say hello\"],\n                    \"args\": [\n                        {\n                            \"kind\": \"pos\",\n                            \"schema\": \"s\",\n                            \"type\": \"Str\",\n                            \"metavar\": \"NAME\",\n                            \"quoted\": true,\n                            \"desc\": [\"name\"],\n                            \"constraints\": [],\n                            \"metadata\": {}\n                        }\n                    ],\n                    \"return\": {\n                        \"schema\": \"s\",\n                        \"type\": \"Str\",\n                        \"desc\": [],\n                        \"constraints\": [],\n                        \"metadata\": {}\n                    },\n                    \"expr\": {\"tag\": \"lit\", \"schema\": \"s\", \"lit_type\": \"str\", \"value\": \"hello\"},\n                    \"constraints\": [],\n                    \"metadata\": {},\n                    \"group\": null\n                }\n            ]\"#,\n        );\n        let m = parse_manifest(&json).unwrap();\n        assert!(m.commands[0].is_pure());\n        assert!(m.commands[0].expr.is_some());\n    }\n\n    #[test]\n    fn test_parse_kind_constraint() {\n        let json = wrap(\n            r#\"[\n                {\n                    \"name\": \"process\",\n                    \"type\": \"remote\",\n                    \"mid\": 1,\n                    \"pool\": 0,\n                    \"needed_pools\": [0],\n                    \"desc\": [],\n                    \"args\": [\n                        {\n                            \"kind\": \"pos\",\n                            \"schema\": \"<dict>m24name<list>a<str>s3age<list>a<int>i4\",\n                            \"type\": \"People\",\n                            \"metavar\": null,\n                            \"quoted\": false,\n                            \"desc\": [],\n                            \"constraints\": [\n                                {\"type\": \"kind\", \"value\": \"table\"}\n                            ],\n                            \"metadata\": {}\n                        }\n                    ],\n                    \"return\": {\n                        \"schema\": \"i4\",\n                        \"type\": \"Int\",\n                        \"desc\": [],\n                        \"constraints\": [],\n                        \"metadata\": {}\n                    },\n                    \"constraints\": [],\n                    \"metadata\": {},\n                    \"group\": null\n                }\n            ]\"#,\n        );\n        let m = parse_manifest(&json).unwrap();\n        assert_eq!(m.commands[0].args[0].kind_constraint(), Some(\"table\"));\n    }\n\n    #[test]\n    fn test_version_mismatch_rejected() {\n        let json = r#\"{\n            \"name\": \"main\",\n            \"build\": {\"path\": \"/tmp/x\", \"time\": 0, \"morloc_version\": \"0.0.1-stale\"},\n            \"pools\": [],\n            \"commands\": [],\n            \"groups\": [],\n            \"metadata\": {}\n        }\"#;\n        let err = parse_manifest(json).unwrap_err();\n        assert!(\n            err.contains(\"rebuild with the current compiler\"),\n            \"got: {}\",\n            err\n        );\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-nexus/Cargo.toml",
    "content": "[package]\nname = \"morloc-nexus\"\nversion = \"0.81.0\"\nedition = \"2021\"\ndescription = \"Morloc nexus: CLI dispatcher for multi-language pool orchestration\"\n# Version is intentionally synchronized with the morloc compiler version\n# (see ../../../package.yaml). Manifests record the morloc_version that\n# built them; the nexus rejects manifests whose version differs from its\n# own CARGO_PKG_VERSION at parse time. Bumping the morloc compiler\n# version requires bumping this version in lockstep.\n\n[[bin]]\nname = \"morloc-nexus\"\npath = \"src/main.rs\"\n\n[dependencies]\nmorloc-manifest = { path = \"../morloc-manifest\" }\nmorloc-runtime = { path = \"../morloc-runtime\" }\nlibc = { workspace = true }\nserde = { workspace = true }\nserde_json = { workspace = true }\nnix = { workspace = true }\nclap = { workspace = true }\nthiserror = { workspace = true }\n"
  },
  {
    "path": "data/rust/morloc-nexus/build.rs",
    "content": "fn main() {\n    // Use MORLOC_HOME at build time if set, else fall back to $HOME default.\n    // This is only for the compile-time link search path.\n    let morloc_lib = std::env::var(\"MORLOC_HOME\")\n        .map(|h| format!(\"{}/lib\", h))\n        .unwrap_or_else(|_| {\n            format!(\n                \"{}/.local/share/morloc/lib\",\n                std::env::var(\"HOME\").unwrap_or_else(|_| \"/root\".into())\n            )\n        });\n    println!(\"cargo:rustc-link-search=native={}\", morloc_lib);\n    println!(\"cargo:rustc-link-lib=dylib=morloc\");\n\n    // Embed $ORIGIN-relative rpaths so the nexus finds libmorloc.so\n    // regardless of install location:\n    //   $ORIGIN/../lib           covers /opt/morloc/bin -> /opt/morloc/lib\n    //   $ORIGIN/../share/morloc/lib  covers ~/.local/bin -> ~/.local/share/morloc/lib\n    println!(\"cargo:rustc-link-arg=-Wl,-rpath,$ORIGIN/../lib\");\n    println!(\"cargo:rustc-link-arg=-Wl,-rpath,$ORIGIN/../share/morloc/lib\");\n\n    // The morloc compiler version is sourced from CARGO_PKG_VERSION\n    // (this crate's Cargo.toml), which is intentionally kept in\n    // lockstep with the morloc Haskell package.yaml. No build-time\n    // extraction needed -- Cargo guarantees CARGO_PKG_VERSION is set\n    // and rebuilds when Cargo.toml changes.\n}\n"
  },
  {
    "path": "data/rust/morloc-nexus/src/dispatch.rs",
    "content": "//! Command dispatch: CLI argument parsing and routing to pools.\n//!\n//! Replaces the dispatch_command, dispatch, run_command, and run_pure_command\n//! functions from nexus.c. Uses the C libmorloc for packet construction and\n//! serialization until Phase 2/3 replaces those.\n//!\n//! For Phase 1, the nexus links against the C libmorloc.so for:\n//! - make_call_packet_from_cli, parse_cli_data_argument\n//! - send_and_receive_over_socket\n//! - pack_with_schema, print_voidstar, etc.\n//! - morloc_eval for pure commands\n\nuse crate::help;\nuse crate::manifest::{Arg, Command, Manifest};\nuse crate::process::{self, PoolSocket};\n\n/// Output format enum.\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum OutputFormat {\n    Json,\n    MessagePack,\n    VoidStar,\n    Packet,\n}\n\n/// Nexus configuration parsed from CLI options.\n#[derive(Debug, Clone)]\npub struct NexusConfig {\n    pub help_flag: bool,\n    pub print_flag: bool,\n    pub packet_path: Option<String>,\n    pub socket_base: Option<String>,\n    pub output_path: Option<String>,\n    pub output_format: OutputFormat,\n    pub daemon_flag: bool,\n    pub router_flag: bool,\n    pub unix_socket_path: Option<String>,\n    pub tcp_port: Option<i32>,\n    pub http_port: Option<i32>,\n    pub fdb_path: Option<String>,\n    pub eval_timeout: i32,\n}\n\nimpl Default for NexusConfig {\n    fn default() -> Self {\n        NexusConfig {\n            help_flag: false,\n            print_flag: false,\n            packet_path: None,\n            socket_base: None,\n            output_path: None,\n            output_format: OutputFormat::Json,\n            daemon_flag: false,\n            router_flag: false,\n            unix_socket_path: None,\n            tcp_port: None,\n            http_port: None,\n            fdb_path: None,\n            eval_timeout: 30,\n        }\n    }\n}\n\n/// Emit a uniform error when pool communication fails, then exit.\n///\n/// The pool's stderr was inherited by the nexus, so any traceback the pool\n/// printed before dying is already on the user's terminal. This helper\n/// reports the communication error plus the pool's exit status (if it has\n/// been reaped) so the user can correlate the two.\n///\n/// Race condition: the pool process may still be writing its error output\n/// (traceback, panic message, etc.) to stderr when the nexus detects the\n/// broken connection. If we call clean_exit immediately, it sends SIGTERM\n/// to the pool process group, which can kill the pool before its stderr\n/// buffer is flushed. We insert a brief drain window to let any in-flight\n/// stderr from the dying pool reach the terminal before tearing everything\n/// down. This is best-effort: a pool killed by SIGKILL (OOM killer, etc.)\n/// won't have pending output, and a pool stuck in a blocking syscall won't\n/// flush within the window. But for the common case of a Python exception\n/// traceback, this is enough.\nfn die_with_pool_error(\n    socket: &PoolSocket,\n    pool_index: usize,\n    context: &str,\n    comm_err: &dyn std::fmt::Display,\n) -> ! {\n    // Give the dying pool process time to flush its stderr/stdout before\n    // we tear down the process group. Without this, a Python traceback or\n    // error message that is still in a pipe buffer gets lost when\n    // clean_exit sends SIGTERM/SIGKILL to the pool's process group.\n    std::thread::sleep(std::time::Duration::from_millis(100));\n\n    eprintln!(\"Error: {}: {}\", context, comm_err);\n    if let Some(info) = process::pool_death_info(pool_index) {\n        eprintln!(\"Pool '{}' {}\", socket.lang, info);\n    }\n    process::clean_exit(1);\n}\n\n/// Parse nexus-level options from argv. Returns the index of the first\n/// non-option argument (the manifest path or subcommand).\npub fn parse_nexus_options(args: &[String], config: &mut NexusConfig) -> usize {\n    let mut i = 1; // skip argv[0]\n    while i < args.len() {\n        let arg = &args[i];\n        match arg.as_str() {\n            \"-h\" | \"--help\" => {\n                config.help_flag = true;\n                i += 1;\n            }\n            \"-p\" | \"--print\" => {\n                config.print_flag = true;\n                i += 1;\n            }\n            \"-c\" | \"--call-packet\" => {\n                i += 1;\n                if i < args.len() {\n                    config.packet_path = Some(args[i].clone());\n                    i += 1;\n                }\n            }\n            \"-s\" | \"--socket-base\" => {\n                i += 1;\n                if i < args.len() {\n                    config.socket_base = Some(args[i].clone());\n                    i += 1;\n                }\n            }\n            \"-o\" | \"--output-file\" => {\n                i += 1;\n                if i < args.len() {\n                    config.output_path = Some(args[i].clone());\n                    i += 1;\n                }\n            }\n            \"-f\" | \"--output-form\" => {\n                i += 1;\n                if i < args.len() {\n                    config.output_format = parse_output_format(&args[i]);\n                    i += 1;\n                }\n            }\n            \"--daemon\" => {\n                config.daemon_flag = true;\n                i += 1;\n            }\n            \"--router\" => {\n                config.router_flag = true;\n                i += 1;\n            }\n            \"--socket\" => {\n                i += 1;\n                if i < args.len() {\n                    config.unix_socket_path = Some(args[i].clone());\n                    i += 1;\n                }\n            }\n            \"--port\" => {\n                i += 1;\n                if i < args.len() {\n                    config.tcp_port = args[i].parse().ok();\n                    i += 1;\n                }\n            }\n            \"--http-port\" => {\n                i += 1;\n                if i < args.len() {\n                    config.http_port = args[i].parse().ok();\n                    i += 1;\n                }\n            }\n            \"--fdb\" => {\n                i += 1;\n                if i < args.len() {\n                    config.fdb_path = Some(args[i].clone());\n                    i += 1;\n                }\n            }\n            \"--eval-timeout\" => {\n                i += 1;\n                if i < args.len() {\n                    config.eval_timeout = args[i].parse().unwrap_or(30);\n                    i += 1;\n                }\n            }\n            _ => {\n                // Handle --key=value forms\n                if let Some(val) = arg.strip_prefix(\"--socket=\") {\n                    config.unix_socket_path = Some(val.to_string());\n                    i += 1;\n                } else if let Some(val) = arg.strip_prefix(\"--port=\") {\n                    config.tcp_port = val.parse().ok();\n                    i += 1;\n                } else if let Some(val) = arg.strip_prefix(\"--http-port=\") {\n                    config.http_port = val.parse().ok();\n                    i += 1;\n                } else if let Some(val) = arg.strip_prefix(\"--fdb=\") {\n                    config.fdb_path = Some(val.to_string());\n                    i += 1;\n                } else if let Some(val) = arg.strip_prefix(\"--eval-timeout=\") {\n                    config.eval_timeout = val.parse().unwrap_or(30);\n                    i += 1;\n                } else {\n                    // Not a nexus option - stop parsing\n                    break;\n                }\n            }\n        }\n    }\n    i\n}\n\n/// Extract daemon/server long options from argv in single-command mode.\n/// Removes matched options from the args vector.\npub fn extract_global_options(args: &mut Vec<String>, config: &mut NexusConfig) {\n    let mut i = 1;\n    while i < args.len() {\n        if args[i] == \"--\" {\n            break;\n        }\n\n        let mut matched = false;\n        let mut consumed = 1;\n\n        match args[i].as_str() {\n            \"--daemon\" => {\n                config.daemon_flag = true;\n                matched = true;\n            }\n            \"--socket\" if i + 1 < args.len() => {\n                config.unix_socket_path = Some(args[i + 1].clone());\n                consumed = 2;\n                matched = true;\n            }\n            \"--port\" if i + 1 < args.len() => {\n                config.tcp_port = args[i + 1].parse().ok();\n                consumed = 2;\n                matched = true;\n            }\n            \"--http-port\" if i + 1 < args.len() => {\n                config.http_port = args[i + 1].parse().ok();\n                consumed = 2;\n                matched = true;\n            }\n            \"--fdb\" if i + 1 < args.len() => {\n                config.fdb_path = Some(args[i + 1].clone());\n                consumed = 2;\n                matched = true;\n            }\n            \"--eval-timeout\" if i + 1 < args.len() => {\n                config.eval_timeout = args[i + 1].parse().unwrap_or(30);\n                consumed = 2;\n                matched = true;\n            }\n            _ => {\n                // Check --key=value forms\n                if let Some(val) = args[i].strip_prefix(\"--socket=\") {\n                    config.unix_socket_path = Some(val.to_string());\n                    matched = true;\n                } else if let Some(val) = args[i].strip_prefix(\"--port=\") {\n                    config.tcp_port = val.parse().ok();\n                    matched = true;\n                } else if let Some(val) = args[i].strip_prefix(\"--http-port=\") {\n                    config.http_port = val.parse().ok();\n                    matched = true;\n                } else if let Some(val) = args[i].strip_prefix(\"--fdb=\") {\n                    config.fdb_path = Some(val.to_string());\n                    matched = true;\n                } else if let Some(val) = args[i].strip_prefix(\"--eval-timeout=\") {\n                    config.eval_timeout = val.parse().unwrap_or(30);\n                    matched = true;\n                }\n            }\n        }\n\n        if matched {\n            for _ in 0..consumed {\n                args.remove(i);\n            }\n        } else {\n            i += 1;\n        }\n    }\n}\n\nfn parse_output_format(s: &str) -> OutputFormat {\n    match s {\n        \"json\" => OutputFormat::Json,\n        \"mpk\" => OutputFormat::MessagePack,\n        \"voidstar\" => OutputFormat::VoidStar,\n        \"packet\" => OutputFormat::Packet,\n        _ => {\n            eprintln!(\"Invalid output format: {}\", s);\n            std::process::exit(1);\n        }\n    }\n}\n\n/// Wrap a string in JSON quotes (for literal string arguments).\npub fn quoted(s: &str) -> String {\n    // JSON-escape the string\n    let escaped = serde_json::to_string(s).unwrap_or_else(|_| format!(\"\\\"{}\\\"\", s));\n    escaped\n}\n\n/// Main dispatch entry point. Routes to the correct command based on argv.\npub fn dispatch(\n    args: &[String],\n    arg_start: usize,\n    _shm_basename: &str,\n    config: &NexusConfig,\n    manifest: &Manifest,\n    sockets: &mut [PoolSocket],\n    prog_name: &str,\n) {\n    if arg_start >= args.len() {\n        help::print_usage(prog_name, manifest);\n    }\n\n    let cmd_name = &args[arg_start];\n    let next = arg_start + 1;\n\n    // Check if it matches a group name\n    for grp in &manifest.groups {\n        if grp.name == *cmd_name {\n            if next >= args.len() {\n                help::print_group_usage(prog_name, manifest, cmd_name);\n            }\n            let subcmd = &args[next];\n            if subcmd == \"-h\" || subcmd == \"--help\" {\n                help::print_group_usage(prog_name, manifest, cmd_name);\n            }\n            // Find command within this group\n            for cmd in &manifest.commands {\n                if cmd.group.as_deref() == Some(cmd_name.as_str()) && cmd.name == *subcmd {\n                    dispatch_command(args, next + 1, config, manifest, cmd, sockets, prog_name);\n                    return;\n                }\n            }\n            eprintln!(\"Unrecognized command '{}' in group '{}'\", subcmd, cmd_name);\n            process::clean_exit(1);\n        }\n    }\n\n    // Try ungrouped commands\n    for cmd in &manifest.commands {\n        if cmd.name == *cmd_name && cmd.group.is_none() {\n            dispatch_command(args, next, config, manifest, cmd, sockets, prog_name);\n            return;\n        }\n    }\n\n    eprintln!(\"Unrecognized command '{}'\", cmd_name);\n    process::clean_exit(1);\n}\n\n/// Dispatch a single command: parse its args, start needed daemons, execute.\npub fn dispatch_command(\n    args: &[String],\n    arg_start: usize,\n    config: &NexusConfig,\n    manifest: &Manifest,\n    cmd: &Command,\n    sockets: &mut [PoolSocket],\n    prog_name: &str,\n) {\n    let single_cmd = manifest.commands.len() == 1 && manifest.groups.is_empty();\n\n    // Parse command-specific arguments\n    let (parsed_args, _remaining_start) =\n        parse_command_args(args, arg_start, cmd, config, single_cmd, prog_name);\n\n    // Start daemons for remote commands\n    if !cmd.is_pure() {\n        if let Err(e) = process::start_daemons(sockets, &cmd.needed_pools) {\n            eprintln!(\"Error: {}\", e);\n            process::clean_exit(1);\n        }\n    }\n\n    // Execute the command\n    if cmd.is_pure() {\n        run_pure_command(cmd, &parsed_args, config);\n    } else {\n        run_remote_command(cmd, &parsed_args, sockets, config);\n    }\n}\n\n/// Parsed CLI argument value for a manifest arg slot.\n#[derive(Debug)]\npub enum ArgValue {\n    /// A value string (already quoted if needed).\n    Value(String),\n    /// Null/absent value.\n    Null,\n    /// Group argument with per-entry values.\n    Group {\n        grp_val: Option<String>,\n        fields: Vec<Option<String>>,\n        defaults: Vec<Option<String>>,\n    },\n}\n\n/// Parse command-specific arguments from argv.\nfn parse_command_args(\n    args: &[String],\n    pos: usize,\n    cmd: &Command,\n    _config: &NexusConfig,\n    single_cmd: bool,\n    prog_name: &str,\n) -> (Vec<ArgValue>, usize) {\n    let mut parsed = Vec::with_capacity(cmd.args.len());\n    // Simple option tracking: collect all --opt=val and -o val\n    let mut opt_values: std::collections::HashMap<String, String> = std::collections::HashMap::new();\n    let mut flag_values: std::collections::HashMap<String, String> = std::collections::HashMap::new();\n    let mut positional_idx = 0;\n    let mut positionals: Vec<String> = Vec::new();\n\n    // First pass: separate options from positionals\n    let mut i = pos;\n    while i < args.len() {\n        let arg = &args[i];\n        if arg == \"--\" {\n            i += 1;\n            // Everything after -- is positional\n            while i < args.len() {\n                positionals.push(args[i].clone());\n                i += 1;\n            }\n            break;\n        }\n        if arg == \"-h\" || arg == \"--help\" {\n            if single_cmd {\n                help::print_command_help_single(prog_name, cmd);\n            } else {\n                help::print_command_help(prog_name, cmd);\n            }\n        }\n        if arg.starts_with(\"--\") && arg.len() > 2 {\n            // Long option\n            if let Some(eq_pos) = arg.find('=') {\n                let key = &arg[2..eq_pos];\n                let val = &arg[eq_pos + 1..];\n                opt_values.insert(key.to_string(), val.to_string());\n                i += 1;\n            } else {\n                let key = &arg[2..];\n                // Check if it's a flag\n                if is_flag_opt(cmd, key) {\n                    flag_values.insert(key.to_string(), flag_forward_value(cmd, key));\n                    i += 1;\n                } else if is_rev_flag(cmd, key) {\n                    if let Some(orig) = find_flag_by_rev(cmd, key) {\n                        flag_values.insert(orig, flag_reverse_value_by_rev(cmd, key));\n                    }\n                    i += 1;\n                } else if i + 1 < args.len() {\n                    opt_values.insert(key.to_string(), args[i + 1].clone());\n                    i += 2;\n                } else {\n                    eprintln!(\"Error: option --{} requires a value\", key);\n                    process::clean_exit(1);\n                }\n            }\n        } else if arg.starts_with('-') && arg.len() == 2 && arg.as_bytes()[1].is_ascii_alphabetic() {\n            let ch = arg.chars().nth(1).unwrap();\n            if is_short_flag(cmd, ch) {\n                flag_values.insert(\n                    short_to_long(cmd, ch).unwrap_or_else(|| ch.to_string()),\n                    flag_forward_value_by_short(cmd, ch),\n                );\n                i += 1;\n            } else if i + 1 < args.len() {\n                opt_values.insert(\n                    short_to_long(cmd, ch).unwrap_or_else(|| ch.to_string()),\n                    args[i + 1].clone(),\n                );\n                i += 2;\n            } else {\n                eprintln!(\"Error: option -{} requires a value\", ch);\n                process::clean_exit(1);\n            }\n        } else {\n            positionals.push(arg.clone());\n            i += 1;\n        }\n    }\n\n    // Second pass: build ArgValue for each manifest arg\n    for arg_def in &cmd.args {\n        match arg_def {\n            Arg::Positional { quoted, .. } => {\n                if positional_idx < positionals.len() {\n                    let val = if *quoted {\n                        self::quoted(&positionals[positional_idx])\n                    } else {\n                        positionals[positional_idx].clone()\n                    };\n                    parsed.push(ArgValue::Value(val));\n                    positional_idx += 1;\n                } else {\n                    eprintln!(\"Error: too few positional arguments\");\n                    process::clean_exit(1);\n                }\n            }\n            Arg::Optional {\n                long_opt,\n                short_opt,\n                default_val,\n                quoted,\n                ..\n            } => {\n                let key = long_opt\n                    .as_deref()\n                    .or_else(|| short_opt.as_deref())\n                    .unwrap_or(\"\");\n                let user_val = opt_values.get(key);\n                if let Some(val) = user_val {\n                    let v = if *quoted { self::quoted(val) } else { val.clone() };\n                    parsed.push(ArgValue::Value(v));\n                } else if let Some(def) = default_val {\n                    parsed.push(ArgValue::Value(def.clone()));\n                } else {\n                    parsed.push(ArgValue::Null);\n                }\n            }\n            Arg::Flag {\n                long_opt,\n                default_val,\n                ..\n            } => {\n                let key = long_opt.as_deref().unwrap_or(\"\");\n                if let Some(val) = flag_values.get(key) {\n                    parsed.push(ArgValue::Value(val.clone()));\n                } else {\n                    parsed.push(ArgValue::Value(\n                        default_val.as_deref().unwrap_or(\"false\").to_string(),\n                    ));\n                }\n            }\n            Arg::Group {\n                entries,\n                group_opt,\n                ..\n            } => {\n                let grp_val = group_opt.as_ref().and_then(|go| {\n                    go.long_opt\n                        .as_deref()\n                        .and_then(|k| opt_values.get(k))\n                        .cloned()\n                });\n                let mut fields = Vec::new();\n                let mut defaults = Vec::new();\n                for entry in entries {\n                    // Look up by long option name or short option character\n                    let long_key = entry.arg.long_opt_str().unwrap_or(\"\");\n                    let short_key = entry.arg.short_opt_char()\n                        .map(|c| c.to_string())\n                        .unwrap_or_default();\n                    let user = opt_values\n                        .get(long_key)\n                        .or_else(|| opt_values.get(&short_key))\n                        .or_else(|| flag_values.get(long_key))\n                        .or_else(|| flag_values.get(&short_key))\n                        .map(|v| {\n                            if entry.arg.is_quoted() {\n                                self::quoted(v)\n                            } else {\n                                v.clone()\n                            }\n                        });\n                    fields.push(user);\n                    defaults.push(entry.arg.default_val().map(|s| s.to_string()));\n                }\n                parsed.push(ArgValue::Group {\n                    grp_val,\n                    fields,\n                    defaults,\n                });\n            }\n        }\n    }\n\n    if positional_idx < positionals.len() {\n        eprintln!(\"Error: too many positional arguments given\");\n        process::clean_exit(1);\n    }\n\n    (parsed, i)\n}\n\n// -- Command execution ------------------------------------------------------\n\n/// Execute a remote command by sending a call packet to the pool.\nfn run_remote_command(\n    cmd: &Command,\n    args: &[ArgValue],\n    sockets: &[PoolSocket],\n    config: &NexusConfig,\n) {\n    use morloc_runtime::packet;\n    use morloc_runtime::schema::{parse_schema, SerialType};\n    use std::io::{Read, Write};\n    use std::os::unix::net::UnixStream;\n\n    // C library functions from libmorloc.so\n    extern \"C\" {\n        fn parse_cli_data_argument(\n            dest: *mut u8, arg: *const std::ffi::c_void,\n            schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut u8;\n        fn initialize_positional(value: *mut std::ffi::c_char) -> *mut std::ffi::c_void;\n        fn free_argument_t(arg: *mut std::ffi::c_void);\n        fn morloc_packet_size(packet: *const u8, errmsg: *mut *mut std::ffi::c_char) -> usize;\n        fn make_morloc_local_call_packet(\n            midx: u32, arg_packets: *const *const u8, nargs: usize,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut u8;\n        fn get_morloc_data_packet_value(\n            data: *const u8, schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut u8;\n    }\n\n    let socket = &sockets[cmd.pool_index];\n\n    // Parse return schema\n    let return_schema = match parse_schema(&cmd.ret.schema) {\n        Ok(s) => s,\n        Err(e) => {\n            eprintln!(\"Error: failed to parse return schema '{}': {}\", cmd.ret.schema, e);\n            process::clean_exit(1);\n        }\n    };\n\n    // The parsed `args` list and `cmd.args` are index-aligned 1:1 in\n    // declaration order: parse_command_args pushes one ArgValue for\n    // EVERY arg (including flags). The Haskell compiler emits one\n    // schema per arg position too. Walk both lists in lockstep; for\n    // flags, schema_str() returns None and the flag's ArgValue is\n    // already a ready-to-send \"true\"/\"false\" string that doesn't need\n    // packet conversion -- but the original v1 dispatch path still\n    // ran flags through parse_cli_data_argument with the flag's bool\n    // schema, so we mirror that to keep the wire format consistent.\n    let mut arg_packets: Vec<Vec<u8>> = Vec::new();\n    for (i, (arg_val, arg_def)) in args.iter().zip(cmd.args.iter()).enumerate() {\n        let schema_str = arg_def.schema_str().unwrap_or(\"b\");\n        let schema = match parse_schema(schema_str) {\n            Ok(s) => s,\n            Err(e) => {\n                eprintln!(\"Error: failed to parse arg schema #{}: {}\", i, e);\n                process::clean_exit(1);\n            }\n        };\n\n        let c_schema = morloc_runtime::cschema::CSchema::from_rust(&schema);\n        let mut errmsg: *mut std::ffi::c_char = std::ptr::null_mut();\n\n        let c_arg;\n        match arg_val {\n            ArgValue::Group { grp_val, fields, defaults } => {\n                // Group arg: use initialize_unrolled (matches C nexus behavior)\n                extern \"C\" {\n                    fn initialize_unrolled(\n                        size: usize, default_value: *mut std::ffi::c_char,\n                        fields: *mut *mut std::ffi::c_char,\n                        default_fields: *mut *mut std::ffi::c_char,\n                    ) -> *mut std::ffi::c_void;\n                }\n                let n = fields.len();\n                let grp_val_c = grp_val.as_ref()\n                    .map(|s| std::ffi::CString::new(s.as_str()).unwrap().into_raw())\n                    .unwrap_or(std::ptr::null_mut());\n                let mut c_fields: Vec<*mut std::ffi::c_char> = fields.iter()\n                    .map(|f| f.as_ref()\n                        .map(|s| std::ffi::CString::new(s.as_str()).unwrap().into_raw())\n                        .unwrap_or(std::ptr::null_mut()))\n                    .collect();\n                let mut c_defaults: Vec<*mut std::ffi::c_char> = defaults.iter()\n                    .map(|d| d.as_ref()\n                        .map(|s| std::ffi::CString::new(s.as_str()).unwrap().into_raw())\n                        .unwrap_or(std::ptr::null_mut()))\n                    .collect();\n                c_arg = unsafe {\n                    initialize_unrolled(n, grp_val_c, c_fields.as_mut_ptr(), c_defaults.as_mut_ptr())\n                };\n            }\n            _ => {\n                let json_str = match arg_val {\n                    ArgValue::Value(s) => s.clone(),\n                    ArgValue::Null => \"null\".to_string(),\n                    _ => unreachable!(),\n                };\n                let json_c = std::ffi::CString::new(json_str.as_str()).unwrap();\n                c_arg = unsafe { initialize_positional(json_c.into_raw()) };\n            }\n        }\n\n        let c_pkt = unsafe { parse_cli_data_argument(std::ptr::null_mut(), c_arg, c_schema, &mut errmsg) };\n        unsafe { free_argument_t(c_arg) };\n        unsafe { morloc_runtime::cschema::CSchema::free(c_schema) };\n\n        if c_pkt.is_null() {\n            let msg = if !errmsg.is_null() {\n                let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n                unsafe { libc::free(errmsg as *mut std::ffi::c_void) };\n                s\n            } else {\n                \"unknown error\".into()\n            };\n            eprintln!(\"Error: failed to parse argument #{}: {}\", i, msg);\n            process::clean_exit(1);\n        }\n\n        // Get packet size and copy to Vec\n        let pkt_size = unsafe { morloc_packet_size(c_pkt, &mut errmsg) };\n        let data_pkt = unsafe { std::slice::from_raw_parts(c_pkt, pkt_size).to_vec() };\n        unsafe { libc::free(c_pkt as *mut std::ffi::c_void) };\n        arg_packets.push(data_pkt);\n    }\n\n    // Build call packet via C library\n    let arg_ptrs: Vec<*const u8> = arg_packets.iter().map(|p| p.as_ptr()).collect();\n    let mut errmsg_call: *mut std::ffi::c_char = std::ptr::null_mut();\n    let c_call = unsafe {\n        make_morloc_local_call_packet(cmd.mid, arg_ptrs.as_ptr(), arg_packets.len(), &mut errmsg_call)\n    };\n    if c_call.is_null() {\n        eprintln!(\"Error: failed to create call packet\");\n        process::clean_exit(1);\n    }\n\n    // Get call packet size\n    let call_size = unsafe {\n        let mut e: *mut std::ffi::c_char = std::ptr::null_mut();\n        morloc_packet_size(c_call, &mut e)\n    };\n    let call_packet = unsafe { std::slice::from_raw_parts(c_call, call_size).to_vec() };\n    unsafe { libc::free(c_call as *mut std::ffi::c_void) };\n\n    // Send to pool and receive response\n    let mut stream = match UnixStream::connect(&socket.socket_path) {\n        Ok(s) => s,\n        Err(e) => {\n            die_with_pool_error(\n                socket,\n                cmd.pool_index,\n                &format!(\"failed to connect to pool '{}'\", socket.lang),\n                &e,\n            );\n        }\n    };\n\n    if let Err(e) = stream.write_all(&call_packet) {\n        die_with_pool_error(\n            socket,\n            cmd.pool_index,\n            &format!(\"failed to send call packet to pool '{}'\", socket.lang),\n            &e,\n        );\n    }\n\n    // Read response header\n    let mut resp_header_bytes = [0u8; 32];\n    if let Err(e) = stream.read_exact(&mut resp_header_bytes) {\n        die_with_pool_error(\n            socket,\n            cmd.pool_index,\n            &format!(\"failed to read response header from pool '{}'\", socket.lang),\n            &e,\n        );\n    }\n\n    let resp_header = match packet::PacketHeader::from_bytes(&resp_header_bytes) {\n        Ok(h) => h,\n        Err(e) => {\n            eprintln!(\"Error: invalid response packet: {}\", e);\n            process::clean_exit(1);\n        }\n    };\n\n    // Read full response (metadata + payload)\n    let offset = { resp_header.offset } as usize;\n    let length = { resp_header.length } as usize;\n    let remaining = offset + length;\n    let mut resp_body = vec![0u8; remaining];\n    if remaining > 0 {\n        if let Err(e) = stream.read_exact(&mut resp_body) {\n            die_with_pool_error(\n                socket,\n                cmd.pool_index,\n                &format!(\"failed to read response body from pool '{}'\", socket.lang),\n                &e,\n            );\n        }\n    }\n\n    // Reconstruct full packet (header + body)\n    let mut full_packet = Vec::with_capacity(32 + remaining);\n    full_packet.extend_from_slice(&resp_header_bytes);\n    full_packet.extend_from_slice(&resp_body);\n\n    // Check for error\n    match packet::get_error_message(&full_packet) {\n        Ok(Some(err_msg)) => {\n            eprintln!(\"Error: run failed\\n{}\", err_msg);\n            process::clean_exit(1);\n        }\n        Ok(None) => {}\n        Err(e) => {\n            eprintln!(\"Error: failed to parse response: {}\", e);\n            process::clean_exit(1);\n        }\n    }\n\n    // Extract and print via C library for correct voidstar handling\n    let c_schema = morloc_runtime::cschema::CSchema::from_rust(&return_schema);\n    let mut errmsg: *mut std::ffi::c_char = std::ptr::null_mut();\n    let result_ptr = unsafe {\n        get_morloc_data_packet_value(full_packet.as_ptr(), c_schema, &mut errmsg)\n    };\n    if result_ptr.is_null() {\n        let msg = if !errmsg.is_null() {\n            let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n            unsafe { libc::free(errmsg as *mut std::ffi::c_void) };\n            s\n        } else {\n            \"unknown error\".into()\n        };\n        eprintln!(\"Error: failed to extract result: {}\", msg);\n        unsafe { morloc_runtime::cschema::CSchema::free(c_schema) };\n        process::clean_exit(1);\n    }\n\n    // Check if response is Arrow format\n    let is_arrow = resp_header.is_data() && unsafe { resp_header.command.data.format } == packet::PACKET_FORMAT_ARROW;\n\n    // Print using the C library for correct output.\n    // Suppress \"null\" for Unit-returning commands (CLI convention).\n    if return_schema.serial_type != SerialType::Nil {\n        print_result_c(result_ptr, c_schema, &full_packet, is_arrow, config);\n    }\n    unsafe { morloc_runtime::cschema::CSchema::free(c_schema) };\n}\n\n/// Print using the C library functions for correct voidstar handling.\nfn print_result_c(\n    ptr: *mut u8,\n    schema: *const morloc_runtime::cschema::CSchema,\n    full_packet: &[u8],\n    is_arrow: bool,\n    config: &NexusConfig,\n) {\n    extern \"C\" {\n        fn print_voidstar(\n            voidstar: *const std::ffi::c_void,\n            schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> bool;\n        fn pretty_print_voidstar(\n            voidstar: *const std::ffi::c_void,\n            schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> bool;\n        fn print_arrow_as_json(\n            data: *const std::ffi::c_void,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> bool;\n        fn print_arrow_as_table(\n            data: *const std::ffi::c_void,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> bool;\n        fn pack_with_schema(\n            mlc: *const std::ffi::c_void,\n            schema: *const morloc_runtime::cschema::CSchema,\n            mpkptr: *mut *mut std::ffi::c_char,\n            mpk_size: *mut usize,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> i32;\n    }\n\n    let mut errmsg: *mut std::ffi::c_char = std::ptr::null_mut();\n\n    match config.output_format {\n        OutputFormat::Json => {\n            let ok = unsafe {\n                if is_arrow && config.print_flag {\n                    print_arrow_as_table(ptr as *const std::ffi::c_void, &mut errmsg)\n                } else if is_arrow {\n                    print_arrow_as_json(ptr as *const std::ffi::c_void, &mut errmsg)\n                } else if config.print_flag {\n                    pretty_print_voidstar(ptr as *const std::ffi::c_void, schema, &mut errmsg)\n                } else {\n                    print_voidstar(ptr as *const std::ffi::c_void, schema, &mut errmsg)\n                }\n            };\n            if !ok {\n                let msg = if !errmsg.is_null() {\n                    let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n                    unsafe { libc::free(errmsg as *mut std::ffi::c_void) };\n                    s\n                } else {\n                    \"unknown error\".into()\n                };\n                eprintln!(\"Error: {}\", msg);\n                process::clean_exit(1);\n            }\n        }\n        OutputFormat::MessagePack => {\n            let mut mpk_ptr: *mut std::ffi::c_char = std::ptr::null_mut();\n            let mut mpk_size: usize = 0;\n            let rc = unsafe {\n                pack_with_schema(\n                    ptr as *const std::ffi::c_void,\n                    schema,\n                    &mut mpk_ptr,\n                    &mut mpk_size,\n                    &mut errmsg,\n                )\n            };\n            if rc != 0 {\n                eprintln!(\"Error: msgpack serialization failed\");\n                process::clean_exit(1);\n            }\n            if config.print_flag {\n                let bytes = unsafe { std::slice::from_raw_parts(mpk_ptr as *const u8, mpk_size) };\n                for (i, b) in bytes.iter().enumerate() {\n                    if i > 0 && i % 16 == 0 { println!(); }\n                    print!(\"{:02x} \", b);\n                }\n                println!();\n            } else {\n                use std::io::Write;\n                let bytes = unsafe { std::slice::from_raw_parts(mpk_ptr as *const u8, mpk_size) };\n                let _ = std::io::stdout().lock().write_all(bytes);\n            }\n            if !mpk_ptr.is_null() {\n                unsafe { libc::free(mpk_ptr as *mut std::ffi::c_void) };\n            }\n        }\n        OutputFormat::VoidStar => {\n            extern \"C\" {\n                fn print_morloc_data_packet(\n                    packet: *const u8,\n                    schema: *const morloc_runtime::cschema::CSchema,\n                    errmsg: *mut *mut std::ffi::c_char,\n                ) -> i32;\n            }\n            if config.print_flag {\n                // Hex dump\n                for (i, b) in full_packet.iter().enumerate() {\n                    if i > 0 && i % 4 == 0 {\n                        if i % 24 == 0 { println!(); } else { print!(\" \"); }\n                    }\n                    print!(\"{:02X}\", b);\n                }\n                if !full_packet.is_empty() { println!(); }\n            } else {\n                let mut errmsg2: *mut std::ffi::c_char = std::ptr::null_mut();\n                unsafe { print_morloc_data_packet(full_packet.as_ptr(), schema, &mut errmsg2) };\n            }\n        }\n        OutputFormat::Packet => {\n            // Packet format: write raw binary packet to stdout (used by SLURM)\n            use std::io::Write;\n            let _ = std::io::stdout().lock().write_all(&full_packet);\n        }\n    }\n    process::clean_exit(0);\n}\n\n/// Print using Rust-native functions (kept for reference, currently unused).\n#[allow(dead_code)]\nfn print_result(\n    ptr: morloc_runtime::shm::AbsPtr,\n    schema: &morloc_runtime::Schema,\n    config: &NexusConfig,\n) {\n    use morloc_runtime::{json, mpack};\n\n    match config.output_format {\n        OutputFormat::Json => {\n            if config.print_flag {\n                if let Err(e) = json::pretty_print_voidstar(ptr, schema) {\n                    eprintln!(\"Error: {}\", e);\n                    process::clean_exit(1);\n                }\n            } else {\n                if let Err(e) = json::print_voidstar(ptr, schema) {\n                    eprintln!(\"Error: {}\", e);\n                    process::clean_exit(1);\n                }\n            }\n        }\n        OutputFormat::MessagePack => {\n            let mpk = match mpack::pack_with_schema(ptr, schema) {\n                Ok(m) => m,\n                Err(e) => {\n                    eprintln!(\"Error: {}\", e);\n                    process::clean_exit(1);\n                }\n            };\n            if config.print_flag {\n                // Hex dump for human-readable msgpack\n                for (i, byte) in mpk.iter().enumerate() {\n                    if i > 0 && i % 16 == 0 {\n                        println!();\n                    }\n                    print!(\"{:02x} \", byte);\n                }\n                println!();\n            } else {\n                use std::io::Write;\n                let stdout = std::io::stdout();\n                let mut handle = stdout.lock();\n                let _ = handle.write_all(&mpk);\n            }\n        }\n        OutputFormat::VoidStar | OutputFormat::Packet => {\n            eprintln!(\"Error: voidstar/packet output not supported in Rust-native print path\");\n            process::clean_exit(1);\n        }\n    }\n    process::clean_exit(0);\n}\n\n/// Execute a pure command by evaluating the expression via C library.\nfn run_pure_command(cmd: &Command, args: &[ArgValue], config: &NexusConfig) {\n    use morloc_runtime::schema::{parse_schema, SerialType};\n\n    extern \"C\" {\n        fn build_manifest_expr(\n            json_str: *const std::ffi::c_char,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut std::ffi::c_void; // morloc_expression_t*\n        fn morloc_eval(\n            expr: *mut std::ffi::c_void,\n            return_schema: *const morloc_runtime::cschema::CSchema,\n            arg_voidstar: *const *mut u8,\n            arg_schemas: *const *const morloc_runtime::cschema::CSchema,\n            nargs: usize,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut std::ffi::c_void; // absptr_t\n        fn parse_cli_data_argument(\n            dest: *mut u8, arg: *const std::ffi::c_void,\n            schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut u8;\n        fn initialize_positional(value: *mut std::ffi::c_char) -> *mut std::ffi::c_void;\n        fn free_argument_t(arg: *mut std::ffi::c_void);\n        fn get_morloc_data_packet_value(\n            data: *const u8, schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut std::ffi::c_char,\n        ) -> *mut u8;\n        fn make_standard_data_packet(\n            relptr: isize,\n            schema: *const morloc_runtime::cschema::CSchema,\n        ) -> *mut u8;\n        fn abs2rel(ptr: *mut std::ffi::c_void, errmsg: *mut *mut std::ffi::c_char) -> isize;\n    }\n\n    // Build expression tree from manifest JSON\n    let expr_json = match &cmd.expr {\n        Some(v) => serde_json::to_string(v).unwrap_or_default(),\n        None => {\n            eprintln!(\"Error: pure command '{}' has no expression\", cmd.name);\n            process::clean_exit(1);\n        }\n    };\n    let expr_c = std::ffi::CString::new(expr_json.as_str()).unwrap();\n    let mut errmsg: *mut std::ffi::c_char = std::ptr::null_mut();\n    let expr = unsafe { build_manifest_expr(expr_c.as_ptr(), &mut errmsg) };\n    if expr.is_null() {\n        let msg = unsafe_errmsg_to_string(errmsg);\n        eprintln!(\"Error: failed to build expression: {}\", msg);\n        process::clean_exit(1);\n    }\n\n    // Parse return schema\n    let return_schema = match parse_schema(&cmd.ret.schema) {\n        Ok(s) => s,\n        Err(e) => {\n            eprintln!(\"Error: failed to parse return schema '{}': {}\", cmd.ret.schema, e);\n            process::clean_exit(1);\n        }\n    };\n    let c_return_schema = morloc_runtime::cschema::CSchema::from_rust(&return_schema);\n\n    // The parsed `args` list and `cmd.args` are index-aligned 1:1 in\n    // declaration order: parse_command_args pushes one ArgValue for\n    // EVERY arg (including flags). The Haskell compiler emits one\n    // schema per arg position too. Walk both lists in lockstep; for\n    // flags, the schema_str() accessor returns None and we fall back\n    // to the bool schema \"b\" so the wire format stays consistent.\n    let mut c_arg_schemas: Vec<*const morloc_runtime::cschema::CSchema> = Vec::new();\n    let mut c_arg_voidstars: Vec<*mut u8> = Vec::new();\n\n    for (i, (arg_val, arg_def)) in args.iter().zip(cmd.args.iter()).enumerate() {\n        let schema_str = arg_def.schema_str().unwrap_or(\"b\");\n        let schema = match parse_schema(schema_str) {\n            Ok(s) => s,\n            Err(e) => {\n                eprintln!(\"Error: failed to parse arg schema #{}: {}\", i, e);\n                process::clean_exit(1);\n            }\n        };\n        let c_schema = morloc_runtime::cschema::CSchema::from_rust(&schema);\n\n        let json_str = match arg_val {\n            ArgValue::Value(s) => s.clone(),\n            ArgValue::Null => \"null\".to_string(),\n            ArgValue::Group { .. } => \"null\".to_string(),\n        };\n\n        // Parse CLI arg to data packet, then extract voidstar\n        let json_c = std::ffi::CString::new(json_str.as_str()).unwrap();\n        let c_arg = unsafe { initialize_positional(json_c.into_raw()) };\n        let c_pkt = unsafe { parse_cli_data_argument(std::ptr::null_mut(), c_arg, c_schema, &mut errmsg) };\n        unsafe { free_argument_t(c_arg) };\n\n        if c_pkt.is_null() {\n            let msg = unsafe_errmsg_to_string(errmsg);\n            eprintln!(\"Error: failed to parse argument #{}: {}\", i, msg);\n            process::clean_exit(1);\n        }\n\n        let voidstar = unsafe { get_morloc_data_packet_value(c_pkt, c_schema, &mut errmsg) };\n        unsafe { libc::free(c_pkt as *mut std::ffi::c_void) };\n        if voidstar.is_null() {\n            let msg = unsafe_errmsg_to_string(errmsg);\n            eprintln!(\"Error: failed to extract argument #{}: {}\", i, msg);\n            process::clean_exit(1);\n        }\n\n        c_arg_schemas.push(c_schema);\n        c_arg_voidstars.push(voidstar);\n    }\n\n    // Call morloc_eval\n    let result = unsafe {\n        morloc_eval(\n            expr,\n            c_return_schema,\n            c_arg_voidstars.as_ptr(),\n            c_arg_schemas.as_ptr(),\n            c_arg_voidstars.len(),\n            &mut errmsg,\n        )\n    };\n\n    if result.is_null() {\n        let msg = unsafe_errmsg_to_string(errmsg);\n        eprintln!(\"Error: evaluation failed: {}\", msg);\n        process::clean_exit(1);\n    }\n\n    // Convert result to relptr and make a data packet for printing\n    let result_relptr = unsafe { abs2rel(result, &mut errmsg) };\n    let result_packet = unsafe { make_standard_data_packet(result_relptr, c_return_schema) };\n\n    if result_packet.is_null() {\n        eprintln!(\"Error: failed to create result packet\");\n        process::clean_exit(1);\n    }\n\n    // Get packet as bytes for print_result_c\n    extern \"C\" {\n        fn morloc_packet_size(packet: *const u8, errmsg: *mut *mut std::ffi::c_char) -> usize;\n    }\n    let pkt_size = unsafe { morloc_packet_size(result_packet, &mut errmsg) };\n    let pkt_bytes = unsafe { std::slice::from_raw_parts(result_packet, pkt_size).to_vec() };\n\n    // Extract voidstar value from the result packet\n    let result_ptr = unsafe { get_morloc_data_packet_value(pkt_bytes.as_ptr(), c_return_schema, &mut errmsg) };\n\n    if return_schema.serial_type != SerialType::Nil {\n        print_result_c(result_ptr, c_return_schema, &pkt_bytes, false, config);\n    }\n\n    // Cleanup\n    for cs in &c_arg_schemas {\n        unsafe { morloc_runtime::cschema::CSchema::free(*cs as *mut morloc_runtime::cschema::CSchema) };\n    }\n    unsafe {\n        morloc_runtime::cschema::CSchema::free(c_return_schema);\n        libc::free(result_packet as *mut std::ffi::c_void);\n    }\n}\n\nfn unsafe_errmsg_to_string(errmsg: *mut std::ffi::c_char) -> String {\n    if errmsg.is_null() {\n        \"unknown error\".into()\n    } else {\n        let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n        unsafe { libc::free(errmsg as *mut std::ffi::c_void) };\n        s\n    }\n}\n\n// -- Helpers for command argument parsing ------------------------------------\n\nfn is_flag_opt(cmd: &Command, long_name: &str) -> bool {\n    cmd.args.iter().any(|a| match a {\n        Arg::Flag { long_opt, .. } => long_opt.as_deref() == Some(long_name),\n        Arg::Group { entries, .. } => entries.iter().any(|e| match &e.arg {\n            Arg::Flag { long_opt, .. } => long_opt.as_deref() == Some(long_name),\n            _ => false,\n        }),\n        _ => false,\n    })\n}\n\nfn is_rev_flag(cmd: &Command, name: &str) -> bool {\n    cmd.args.iter().any(|a| match a {\n        Arg::Flag { long_rev, .. } => long_rev.as_deref() == Some(name),\n        Arg::Group { entries, .. } => entries.iter().any(|e| match &e.arg {\n            Arg::Flag { long_rev, .. } => long_rev.as_deref() == Some(name),\n            _ => false,\n        }),\n        _ => false,\n    })\n}\n\nfn find_flag_by_rev(cmd: &Command, rev_name: &str) -> Option<String> {\n    for a in &cmd.args {\n        match a {\n            Arg::Flag { long_opt, long_rev, .. } => {\n                if long_rev.as_deref() == Some(rev_name) {\n                    return long_opt.clone();\n                }\n            }\n            Arg::Group { entries, .. } => {\n                for e in entries {\n                    if let Arg::Flag { long_opt, long_rev, .. } = &e.arg {\n                        if long_rev.as_deref() == Some(rev_name) {\n                            return long_opt.clone();\n                        }\n                    }\n                }\n            }\n            _ => {}\n        }\n    }\n    None\n}\n\nfn flag_forward_value(cmd: &Command, long_name: &str) -> String {\n    for a in &cmd.args {\n        if let Arg::Flag {\n            long_opt,\n            default_val,\n            ..\n        } = a\n        {\n            if long_opt.as_deref() == Some(long_name) {\n                let def = default_val.as_deref().unwrap_or(\"false\");\n                return if def == \"true\" {\n                    \"false\".into()\n                } else {\n                    \"true\".into()\n                };\n            }\n        }\n    }\n    \"true\".into()\n}\n\nfn flag_forward_value_by_short(cmd: &Command, ch: char) -> String {\n    for a in &cmd.args {\n        if let Arg::Flag {\n            short_opt,\n            default_val,\n            ..\n        } = a\n        {\n            if short_opt.as_deref().and_then(|s| s.chars().next()) == Some(ch) {\n                let def = default_val.as_deref().unwrap_or(\"false\");\n                return if def == \"true\" {\n                    \"false\".into()\n                } else {\n                    \"true\".into()\n                };\n            }\n        }\n    }\n    \"true\".into()\n}\n\nfn flag_reverse_value_by_rev(cmd: &Command, rev_name: &str) -> String {\n    // Search top-level and group entries\n    let check = |long_rev: &Option<String>, default_val: &Option<String>| -> Option<String> {\n        if long_rev.as_deref() == Some(rev_name) {\n            let def = default_val.as_deref().unwrap_or(\"false\");\n            Some(if def == \"true\" { \"true\".into() } else { \"false\".into() })\n        } else {\n            None\n        }\n    };\n    for a in &cmd.args {\n        match a {\n            Arg::Flag { long_rev, default_val, .. } => {\n                if let Some(v) = check(long_rev, default_val) { return v; }\n            }\n            Arg::Group { entries, .. } => {\n                for e in entries {\n                    if let Arg::Flag { long_rev, default_val, .. } = &e.arg {\n                        if let Some(v) = check(long_rev, default_val) { return v; }\n                    }\n                }\n            }\n            _ => {}\n        }\n    }\n    \"false\".into()\n}\n\nfn is_short_flag(cmd: &Command, ch: char) -> bool {\n    cmd.args.iter().any(|a| match a {\n        Arg::Flag { short_opt, .. } => {\n            short_opt.as_deref().and_then(|s| s.chars().next()) == Some(ch)\n        }\n        _ => false,\n    })\n}\n\nfn short_to_long(cmd: &Command, ch: char) -> Option<String> {\n    for a in &cmd.args {\n        let (s, l) = match a {\n            Arg::Optional {\n                short_opt,\n                long_opt,\n                ..\n            } => (short_opt.as_deref(), long_opt.clone()),\n            Arg::Flag {\n                short_opt,\n                long_opt,\n                ..\n            } => (short_opt.as_deref(), long_opt.clone()),\n            Arg::Group { entries, .. } => {\n                // Search inside group entries\n                for entry in entries {\n                    let (es, el) = match &entry.arg {\n                        Arg::Optional { short_opt, long_opt, .. } => (short_opt.as_deref(), long_opt.clone()),\n                        Arg::Flag { short_opt, long_opt, .. } => (short_opt.as_deref(), long_opt.clone()),\n                        _ => (None, None),\n                    };\n                    if es.and_then(|s| s.chars().next()) == Some(ch) {\n                        return el.or_else(|| Some(ch.to_string()));\n                    }\n                }\n                (None, None)\n            }\n            _ => (None, None),\n        };\n        if s.and_then(|s| s.chars().next()) == Some(ch) {\n            return l;\n        }\n    }\n    None\n}\n"
  },
  {
    "path": "data/rust/morloc-nexus/src/help.rs",
    "content": "//! Help text generation matching the C nexus output format.\n\nuse crate::manifest::{Arg, Command, GroupEntry, Manifest};\n\n/// Print nexus-level usage (no manifest loaded).\npub fn print_nexus_usage(prog_name: &str) -> ! {\n    eprintln!(\"Usage: {} [OPTION...] COMMAND [ARG...]\", prog_name);\n    eprintln!();\n    eprintln!(\"morloc-nexus is the morloc program dispatcher.\");\n    eprintln!();\n    eprintln!(\"Arguments:\");\n    eprintln!(\"  <manifest>           Path to a .manifest file or wrapper script\");\n    eprintln!();\n    eprintln!(\"Nexus options:\");\n    eprintln!(\"  -h, --help           Print this help message\");\n    eprintln!(\"  -p, --print          Pretty-print output for human consumption\");\n    eprintln!(\"  -o, --output-file    Print to this file instead of STDOUT\");\n    eprintln!(\"  -f, --output-format  Output format [json|mpk|voidstar]\");\n    eprintln!();\n    eprintln!(\"Daemon mode:\");\n    eprintln!(\"  --daemon             Run as a long-lived daemon\");\n    eprintln!(\"  --http-port PORT     Listen on HTTP port\");\n    eprintln!(\"  --port PORT          Listen on TCP port\");\n    eprintln!(\"  --socket PATH        Listen on Unix socket\");\n    eprintln!(\"  --eval-timeout SECS  Timeout for /eval requests (default: 30)\");\n    eprintln!();\n    eprintln!(\"Router mode:\");\n    eprintln!(\"  --router             Run as a multi-program router\");\n    eprintln!(\"  --fdb <path>         Path to fdb manifest directory\");\n    std::process::exit(0);\n}\n\n/// Print usage for a multi-command program.\npub fn print_usage(prog_name: &str, manifest: &Manifest) -> ! {\n    eprintln!(\"Usage: {} [OPTION...] COMMAND [ARG...]\", prog_name);\n\n    // Module-level description\n    if !manifest.desc.is_empty() {\n        eprintln!();\n        for line in &manifest.desc {\n            eprintln!(\"{}\", line);\n        }\n    }\n\n    eprintln!();\n    eprintln!(\"Nexus options:\");\n    eprintln!(\"  -h, --help           Print this help message\");\n    eprintln!(\"  -p, --print          Pretty-print output for human consumption\");\n    eprintln!(\"  -o, --output-file    Print to this file instead of STDOUT\");\n    eprintln!(\"  -f, --output-format  Output format [json|mpk|voidstar]\");\n    eprintln!();\n    eprintln!(\"Daemon mode:\");\n    eprintln!(\"  --daemon             Run as a long-lived daemon\");\n    eprintln!(\"  --http-port PORT     Listen on HTTP port\");\n    eprintln!(\"  --port PORT          Listen on TCP port\");\n    eprintln!(\"  --socket PATH        Listen on Unix socket\");\n    eprintln!();\n\n    // Ungrouped commands\n    let ungrouped: Vec<&Command> = manifest\n        .commands\n        .iter()\n        .filter(|c| c.group.is_none())\n        .collect();\n\n    if !ungrouped.is_empty() {\n        eprintln!(\"Commands (call with -h/--help for more info):\");\n        let longest = ungrouped.iter().map(|c| c.name.len()).max().unwrap_or(0);\n        for cmd in &ungrouped {\n            eprint!(\"  {}\", cmd.name);\n            if let Some(first) = cmd.desc.first() {\n                let pad = longest - cmd.name.len() + 2;\n                eprint!(\"{:pad$}{}\", \"\", first, pad = pad);\n            }\n            eprintln!();\n        }\n    }\n\n    if !manifest.groups.is_empty() {\n        if !ungrouped.is_empty() {\n            eprintln!();\n        }\n        eprintln!(\"Command groups (call with -h/--help for more info):\");\n        let longest = manifest.groups.iter().map(|g| g.name.len()).max().unwrap_or(0);\n        for grp in &manifest.groups {\n            eprint!(\"  {}\", grp.name);\n            if let Some(first) = grp.desc.first() {\n                let pad = longest - grp.name.len() + 2;\n                eprint!(\"{:pad$}{}\", \"\", first, pad = pad);\n            }\n            eprintln!();\n        }\n    }\n\n    // Epilogues\n    for epilogue in &manifest.epilogues {\n        eprintln!();\n        for line in epilogue {\n            eprintln!(\"{}\", line);\n        }\n    }\n\n    std::process::exit(0);\n}\n\n/// Print usage for a command group.\npub fn print_group_usage(prog_name: &str, manifest: &Manifest, group_name: &str) -> ! {\n    let grp = manifest.groups.iter().find(|g| g.name == group_name);\n\n    eprintln!(\"Usage: {} {} COMMAND [ARG...]\", prog_name, group_name);\n    if let Some(g) = grp {\n        if !g.desc.is_empty() {\n            eprintln!();\n            for line in &g.desc {\n                eprintln!(\"{}\", line);\n            }\n        }\n    }\n    eprintln!(\"\\nCommands:\");\n\n    let cmds: Vec<&Command> = manifest\n        .commands\n        .iter()\n        .filter(|c| c.group.as_deref() == Some(group_name))\n        .collect();\n\n    let longest = cmds.iter().map(|c| c.name.len()).max().unwrap_or(0);\n    for cmd in &cmds {\n        eprint!(\"  {}\", cmd.name);\n        if let Some(first) = cmd.desc.first() {\n            let pad = longest - cmd.name.len() + 2;\n            eprint!(\"{:pad$}{}\", \"\", first, pad = pad);\n        }\n        eprintln!();\n    }\n\n    std::process::exit(0);\n}\n\n/// Print help for a specific subcommand.\npub fn print_command_help(prog_name: &str, cmd: &Command) -> ! {\n    // Usage line\n    if let Some(ref group) = cmd.group {\n        eprint!(\"Usage: {} {} {}\", prog_name, group, cmd.name);\n    } else {\n        eprint!(\"Usage: {} {}\", prog_name, cmd.name);\n    }\n    print_usage_suffix(cmd);\n    eprintln!();\n    if !cmd.desc.is_empty() {\n        eprintln!();\n    }\n\n    print_command_body(cmd);\n    std::process::exit(0);\n}\n\n/// Print help for a single-command program.\npub fn print_command_help_single(prog_name: &str, cmd: &Command) -> ! {\n    eprint!(\"Usage: {}\", prog_name);\n    print_usage_suffix(cmd);\n    eprintln!();\n\n    // Description\n    if !cmd.desc.is_empty() {\n        eprintln!();\n        for (i, line) in cmd.desc.iter().enumerate() {\n            if i == 0 && line.is_empty() {\n                continue;\n            }\n            eprintln!(\"{}\", line);\n        }\n    }\n\n    // Nexus options\n    eprintln!(\"\\nNexus options:\");\n    eprintln!(\"  --print          Pretty-print output for human consumption\");\n    eprintln!(\"  --output-file    Print to this file instead of STDOUT\");\n    eprintln!(\"  --output-form    Output format [json|mpk|voidstar]\");\n    eprintln!(\"\\nDaemon mode:\");\n    eprintln!(\"  --daemon         Run as a long-lived daemon\");\n    eprintln!(\"  --http-port PORT Listen on HTTP port\");\n    eprintln!(\"  --port PORT      Listen on TCP port\");\n    eprintln!(\"  --socket PATH    Listen on UNIX socket\");\n\n    print_args_body(cmd);\n    print_type_definitions(cmd);\n    print_return_info(cmd);\n    std::process::exit(0);\n}\n\n// -- Helpers ----------------------------------------------------------------\n\nfn print_usage_suffix(cmd: &Command) {\n    let has_opts = cmd.args.iter().any(|a| !matches!(a, Arg::Positional { .. }));\n    if has_opts {\n        eprint!(\" [OPTION...]\");\n    }\n    for arg in &cmd.args {\n        if let Arg::Positional { metavar, .. } = arg {\n            eprint!(\" {}\", metavar.as_deref().unwrap_or(\"ARG\"));\n        }\n    }\n}\n\nfn print_command_body(cmd: &Command) {\n    // Description\n    if !cmd.desc.is_empty() {\n        for (i, line) in cmd.desc.iter().enumerate() {\n            if i == 0 && line.is_empty() {\n                continue;\n            }\n            eprintln!(\"{}\", line);\n        }\n    }\n\n    print_args_body(cmd);\n    print_type_definitions(cmd);\n    print_return_info(cmd);\n}\n\nfn print_args_body(cmd: &Command) {\n    // Positional arguments\n    let has_pos = cmd.args.iter().any(|a| matches!(a, Arg::Positional { .. }));\n    if has_pos {\n        eprintln!(\"\\nPositional arguments:\");\n        for arg in &cmd.args {\n            if let Arg::Positional {\n                metavar,\n                type_desc,\n                desc,\n                ..\n            } = arg\n            {\n                eprint!(\"  {}\", metavar.as_deref().unwrap_or(\"ARG\"));\n                if let Some(first) = desc.first() {\n                    eprint!(\"  {}\", first);\n                }\n                eprintln!();\n                if let Some(td) = type_desc {\n                    eprintln!(\"      type: {}\", td);\n                }\n            }\n        }\n    }\n\n    // Optional arguments (opts and flags)\n    let has_opt = cmd\n        .args\n        .iter()\n        .any(|a| matches!(a, Arg::Optional { .. } | Arg::Flag { .. }));\n    if has_opt {\n        eprintln!(\"\\nOptional arguments:\");\n        for arg in &cmd.args {\n            print_opt_or_flag(arg);\n        }\n    }\n\n    // Group arguments\n    for arg in &cmd.args {\n        if let Arg::Group {\n            metavar,\n            desc,\n            group_opt,\n            entries,\n            ..\n        } = arg\n        {\n            eprintln!(\"\\nGroup arguments:\");\n            eprint!(\"  {}\", metavar.as_deref().unwrap_or(\"\"));\n            if let Some(first) = desc.first() {\n                eprint!(\": {}\", first);\n            }\n            eprintln!();\n\n            if let Some(go) = group_opt {\n                eprint!(\"    \");\n                if let Some(ref s) = go.short_opt {\n                    eprint!(\"-{}, \", s);\n                }\n                if let Some(ref l) = go.long_opt {\n                    eprint!(\"--{} {}\", l, metavar.as_deref().unwrap_or(\"\"));\n                }\n                eprintln!();\n                eprintln!(\"        provide record as file or JSON string\");\n            }\n\n            for entry in entries {\n                print_group_entry(entry);\n            }\n        }\n    }\n}\n\nfn print_opt_or_flag(arg: &Arg) {\n    match arg {\n        Arg::Optional {\n            short_opt,\n            long_opt,\n            metavar,\n            default_val,\n            desc,\n            type_desc,\n            ..\n        } => {\n            eprint!(\"    \");\n            match (short_opt.as_deref(), long_opt.as_deref()) {\n                (Some(s), Some(l)) => eprint!(\n                    \"-{}, --{} {}\",\n                    s,\n                    l,\n                    metavar.as_deref().unwrap_or(\"\")\n                ),\n                (Some(s), None) => {\n                    eprint!(\"-{} {}\", s, metavar.as_deref().unwrap_or(\"\"))\n                }\n                (None, Some(l)) => eprint!(\n                    \"--{} {}\",\n                    l,\n                    metavar.as_deref().unwrap_or(\"\")\n                ),\n                _ => {}\n            }\n            eprintln!();\n            if let Some(d) = default_val {\n                eprintln!(\"        default: {}\", d);\n            }\n            for d in desc {\n                eprintln!(\"        {}\", d);\n            }\n            if let Some(td) = type_desc {\n                eprintln!(\"        type: {}\", td);\n            }\n        }\n        Arg::Flag {\n            short_opt,\n            long_opt,\n            long_rev,\n            default_val,\n            desc,\n            ..\n        } => {\n            eprint!(\"    \");\n            match (short_opt.as_deref(), long_opt.as_deref()) {\n                (Some(s), Some(l)) => eprint!(\"-{}, --{}\", s, l),\n                (Some(s), None) => eprint!(\"-{}\", s),\n                (None, Some(l)) => eprint!(\"--{}\", l),\n                _ => {}\n            }\n            eprintln!();\n            if let Some(rev) = long_rev {\n                eprintln!(\"    --{}\", rev);\n            }\n            if let Some(d) = default_val {\n                eprintln!(\"        default: {}\", d);\n            }\n            for d in desc {\n                eprintln!(\"        {}\", d);\n            }\n        }\n        _ => {}\n    }\n}\n\nfn print_group_entry(entry: &GroupEntry) {\n    let ea = &entry.arg;\n    eprint!(\"    \");\n    match ea {\n        Arg::Optional {\n            short_opt,\n            long_opt,\n            metavar,\n            default_val,\n            desc,\n            ..\n        } => {\n            match (short_opt.as_deref(), long_opt.as_deref()) {\n                (Some(s), Some(l)) => {\n                    eprint!(\"-{}, --{}\", s, l);\n                    if let Some(m) = metavar {\n                        eprint!(\" {}\", m);\n                    }\n                }\n                (Some(s), None) => {\n                    eprint!(\"-{}\", s);\n                    if let Some(m) = metavar {\n                        eprint!(\" {}\", m);\n                    }\n                }\n                (None, Some(l)) => {\n                    eprint!(\"--{}\", l);\n                    if let Some(m) = metavar {\n                        eprint!(\" {}\", m);\n                    }\n                }\n                _ => {}\n            }\n            eprintln!();\n            if let Some(d) = default_val {\n                eprintln!(\"        default: {}\", d);\n            }\n            for d in desc {\n                eprintln!(\"        {}\", d);\n            }\n        }\n        Arg::Flag {\n            short_opt,\n            long_opt,\n            default_val,\n            desc,\n            ..\n        } => {\n            match (short_opt.as_deref(), long_opt.as_deref()) {\n                (Some(s), Some(l)) => eprint!(\"-{}, --{}\", s, l),\n                (Some(s), None) => eprint!(\"-{}\", s),\n                (None, Some(l)) => eprint!(\"--{}\", l),\n                _ => {}\n            }\n            eprintln!();\n            if let Some(d) = default_val {\n                eprintln!(\"        default: {}\", d);\n            }\n            for d in desc {\n                eprintln!(\"        {}\", d);\n            }\n        }\n        _ => {}\n    }\n}\n\nfn print_return_info(cmd: &Command) {\n    eprintln!(\"\\nReturn: {}\", cmd.ret.type_desc);\n    for line in &cmd.ret.desc {\n        eprintln!(\"  {}\", line);\n    }\n}\n\n// -- Schema-walking renderer for the Record / Table Schemas sections ---------\n//\n// In v2 the manifest no longer carries a parallel `type_definitions` list.\n// The same information is reconstructed at help-render time by walking\n// each command's args + return value: for every typed entry whose schema\n// parses to a Map at the top level, we treat the entry's `type` name as\n// the named-type label and its schema's keys + parameter schemas as the\n// field list. The record-vs-table distinction comes from the entry's\n// `kind` constraint.\n\n/// A rendered named-type layout for the help output, sourced from a\n/// parsed schema.\nstruct TypeLayout<'a> {\n    name: &'a str,\n    /// \"record\" | \"object\" | \"table\" -- comes from the `kind` constraint.\n    kind: &'a str,\n    /// (field_name, rendered_type)\n    fields: Vec<(String, String)>,\n}\n\n/// Pretty-render a parsed `Schema` as a morloc-flavored type string,\n/// suitable for the field-type column in the schemas block.\nfn render_schema_type(s: &morloc_runtime::schema::Schema) -> String {\n    use morloc_runtime::schema::SerialType::*;\n    match s.serial_type {\n        Nil => \"()\".into(),\n        Bool => \"Bool\".into(),\n        Sint8 => \"Int8\".into(),\n        Sint16 => \"Int16\".into(),\n        Sint32 => \"Int\".into(),\n        Sint64 => \"Int64\".into(),\n        Uint8 => \"UInt8\".into(),\n        Uint16 => \"UInt16\".into(),\n        Uint32 => \"UInt32\".into(),\n        Uint64 => \"UInt64\".into(),\n        Float32 => \"Float32\".into(),\n        Float64 => \"Real\".into(),\n        String => \"Str\".into(),\n        Array => format!(\n            \"[{}]\",\n            s.parameters\n                .first()\n                .map(render_schema_type)\n                .unwrap_or_else(|| \"?\".into())\n        ),\n        Tuple => {\n            // `String` here is fully qualified because the surrounding\n            // match brings `SerialType::String` into scope as a variant,\n            // shadowing the std `String` type.\n            let inner: Vec<std::string::String> =\n                s.parameters.iter().map(render_schema_type).collect();\n            format!(\"({})\", inner.join(\", \"))\n        }\n        Map => {\n            // A nested record-ish thing. Use the hint string when present\n            // (which carries the language-specific concrete type name);\n            // otherwise show an inline placeholder. Either way, the\n            // nested record will be listed separately in the same\n            // schema block if its name appears as another arg's type.\n            s.hint.clone().unwrap_or_else(|| \"{..}\".into())\n        }\n        Optional => format!(\n            \"?{}\",\n            s.parameters\n                .first()\n                .map(render_schema_type)\n                .unwrap_or_else(|| \"?\".into())\n        ),\n        Tensor => format!(\n            \"Tensor<{}>\",\n            s.parameters\n                .first()\n                .map(render_schema_type)\n                .unwrap_or_else(|| \"?\".into())\n        ),\n    }\n}\n\n/// Try to extract a `TypeLayout` from a (name, schema_string, kind)\n/// triple. Returns None if any input is missing or the schema does not\n/// parse to a top-level Map. Tables (whose fields are arrays in the wire\n/// schema) render their fields by the array's element type, mirroring\n/// how the user wrote them in the source.\nfn extract_named_layout<'a>(\n    type_name: Option<&'a str>,\n    schema_str: Option<&str>,\n    kind: Option<&'a str>,\n) -> Option<TypeLayout<'a>> {\n    use morloc_runtime::schema::SerialType;\n    let name = type_name?;\n    let schema = schema_str?;\n    let kind = kind?;\n    let parsed = morloc_runtime::schema::parse_schema(schema).ok()?;\n    if parsed.serial_type != SerialType::Map {\n        return None;\n    }\n    // For a table, every field's wire schema is an Array -- peel one layer\n    // off so the user sees `name :: Str` instead of `name :: [Str]`.\n    let strip_array = kind == \"table\";\n    let fields = parsed\n        .keys\n        .iter()\n        .zip(parsed.parameters.iter())\n        .map(|(k, p)| {\n            let inner = if strip_array && p.serial_type == SerialType::Array {\n                p.parameters.first().unwrap_or(p)\n            } else {\n                p\n            };\n            (k.clone(), render_schema_type(inner))\n        })\n        .collect();\n    Some(TypeLayout { name, kind, fields })\n}\n\n/// Walk every arg + the return of a command. For each typed entry, try\n/// to build a layout. Deduplicate by type name, preserving discovery\n/// order so the rendering matches the order types appear in the\n/// signature.\nfn collect_command_layouts<'a>(cmd: &'a Command) -> Vec<TypeLayout<'a>> {\n    use std::collections::HashSet;\n    let mut seen: HashSet<&str> = HashSet::new();\n    let mut out: Vec<TypeLayout<'a>> = Vec::new();\n\n    for arg in &cmd.args {\n        // Skip unrolled groups without a group_opt: each field already\n        // appears as its own flag in the usage, so the schema is redundant.\n        // Keep the schema when group_opt is present (the user can pass the\n        // entire record as JSON and needs the full field spec).\n        if let Arg::Group { group_opt: None, .. } = arg {\n            continue;\n        }\n        if let Some(layout) =\n            extract_named_layout(arg.type_desc_str(), arg.schema_str(), arg.kind_constraint())\n        {\n            if seen.insert(layout.name) {\n                out.push(layout);\n            }\n        }\n    }\n\n    let ret_kind = cmd\n        .ret\n        .constraints\n        .iter()\n        .find(|c| c.ctype == \"kind\")\n        .and_then(|c| c.value.as_ref().and_then(|v| v.as_str()));\n    if let Some(layout) =\n        extract_named_layout(Some(&cmd.ret.type_desc), Some(&cmd.ret.schema), ret_kind)\n    {\n        if seen.insert(layout.name) {\n            out.push(layout);\n        }\n    }\n\n    out\n}\n\n/// Print the Record Schemas / Table Schemas sections for any named\n/// types referenced in this command's signature. The whole block is\n/// skipped when there are none.\nfn print_type_definitions(cmd: &Command) {\n    let layouts = collect_command_layouts(cmd);\n    if layouts.is_empty() {\n        return;\n    }\n\n    let records: Vec<&TypeLayout> = layouts.iter().filter(|l| l.kind != \"table\").collect();\n    let tables: Vec<&TypeLayout> = layouts.iter().filter(|l| l.kind == \"table\").collect();\n\n    if !records.is_empty() {\n        eprintln!(\"\\nRecord Schemas:\");\n        print_layouts(&records);\n    }\n    if !tables.is_empty() {\n        eprintln!(\"\\nTable Schemas:\");\n        print_layouts(&tables);\n    }\n}\n\n/// Render a list of layouts. Each layout shows its type name on its own\n/// line followed by the field list with `::`-aligned column widths.\n/// Definitions are separated by blank lines.\nfn print_layouts(defs: &[&TypeLayout]) {\n    for (i, def) in defs.iter().enumerate() {\n        if i > 0 {\n            eprintln!();\n        }\n        eprintln!(\"  {}\", def.name);\n\n        let name_width = def\n            .fields\n            .iter()\n            .map(|(k, _)| k.len())\n            .max()\n            .unwrap_or(0);\n        for (k, v) in &def.fields {\n            eprintln!(\"    {:width$} :: {}\", k, v, width = name_width);\n        }\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-nexus/src/main.rs",
    "content": "//! Morloc Nexus: CLI dispatcher for multi-language pool orchestration.\n//!\n//! Replaces data/nexus.c. Entry point for all morloc programs.\n//! Reads a .manifest JSON, spawns language pool daemons, and routes\n//! function calls to them over Unix sockets.\n\nmod dispatch;\nmod help;\nmod manifest;\nmod process;\n\nuse dispatch::NexusConfig;\n\n/// Resolve the morloc data directory: MORLOC_HOME if set, else ~/.local/share/morloc.\nfn morloc_home() -> String {\n    std::env::var(\"MORLOC_HOME\").unwrap_or_else(|_| {\n        format!(\n            \"{}/.local/share/morloc\",\n            std::env::var(\"HOME\").unwrap_or_else(|_| \"/root\".into())\n        )\n    })\n}\n\nfn main() {\n    let args: Vec<String> = std::env::args().collect();\n\n    let mut config = NexusConfig::default();\n\n    // First pass: parse nexus-level options\n    let opt_end = dispatch::parse_nexus_options(&args, &mut config);\n\n    // Handle --router mode (no manifest needed)\n    if config.router_flag {\n        run_router(&config);\n        std::process::exit(0);\n    }\n\n    // If -h with no manifest argument, show nexus help\n    let prog_name = args.first().map(|s| s.as_str()).unwrap_or(\"morloc-nexus\");\n    if config.help_flag && opt_end >= args.len() {\n        help::print_nexus_usage(prog_name);\n    }\n\n    // Manifest path: either an explicit argument or derived from argv[0].\n    // In daemon mode (`./test --daemon`), the manifest is at `<argv[0]>.manifest`.\n    // In normal mode (`./test add 1 2`), argv[0] is also the manifest source.\n    // An explicit path argument is only needed for multi-command mode.\n    let manifest_path = if opt_end < args.len() {\n        args[opt_end].clone()\n    } else if config.daemon_flag || config.router_flag {\n        // Daemon/router: derive from argv[0]\n        args[0].clone()\n    } else {\n        help::print_nexus_usage(prog_name)\n    };\n    let prog_name = std::path::Path::new(&manifest_path)\n        .file_name()\n        .and_then(|n| n.to_str())\n        .unwrap_or(&manifest_path)\n        .to_string();\n    let mut arg_cursor = if opt_end < args.len() { opt_end + 1 } else { args.len() };\n\n    // Read and parse manifest\n    let payload = match manifest::read_manifest_payload(&manifest_path) {\n        Ok(p) => p,\n        Err(e) => {\n            eprintln!(\"Failed to load manifest '{}': {}\", manifest_path, e);\n            std::process::exit(1);\n        }\n    };\n\n    let manifest = match manifest::parse_manifest(&payload) {\n        Ok(m) => m,\n        Err(e) => {\n            eprintln!(\"Failed to parse manifest '{}': {}\", manifest_path, e);\n            std::process::exit(1);\n        }\n    };\n\n    let single_command = manifest.commands.len() == 1 && manifest.groups.is_empty();\n\n    // Second pass: parse options after manifest path (skip in single-command mode)\n    let mut remaining_args = args.clone();\n    if !single_command {\n        arg_cursor = dispatch::parse_nexus_options(&args[opt_end..], &mut config) + opt_end;\n    } else {\n        // In single-command mode, extract daemon/server long options manually\n        dispatch::extract_global_options(&mut remaining_args, &mut config);\n    }\n\n    // Pool paths in the manifest are absolute, so no chdir is needed.\n    // This lets user programs resolve file paths relative to the caller's CWD.\n    // Source imports in pools resolve via __file__-relative paths (Python sys.path)\n    // or script-relative paths (R .morloc.source) rather than depending on CWD.\n\n    // Validate pool executables exist\n    if let Err(e) = process::validate_pools(&manifest.pools) {\n        eprintln!(\"Error: {}\", e);\n        std::process::exit(1);\n    }\n\n    // Handle help flag with manifest loaded\n    if config.help_flag {\n        if single_command {\n            help::print_command_help_single(&prog_name, &manifest.commands[0]);\n        } else {\n            help::print_usage(&prog_name, &manifest);\n        }\n    }\n\n    // Setup tmpdir and SHM\n    let tmpdir = match process::make_tmpdir() {\n        Ok(t) => t,\n        Err(e) => {\n            eprintln!(\"Error: {}\", e);\n            std::process::exit(1);\n        }\n    };\n    process::set_tmpdir(tmpdir.clone());\n\n    let job_hash = process::make_job_hash(42);\n    let shm_basename = format!(\"morloc-{}\", job_hash);\n\n    // Initialize shared memory via libmorloc.so using dlsym.\n    // CRITICAL: We must use dlsym to call the CDYLIB's shinit, not the rlib's.\n    // The rlib and cdylib have separate static globals (VOLUMES, ALLOC_MUTEX, etc.).\n    // All SHM operations in pool-facing C code go through the cdylib's globals.\n    // If we call the rlib's shinit, the cdylib's globals stay uninitialized.\n    {\n        let _lib = unsafe { libc::dlopen(std::ptr::null(), libc::RTLD_NOW) };\n        // RTLD_DEFAULT (NULL handle) searches in order: executable, then loaded libs\n        // But the rlib symbols come first. Use RTLD_NEXT-style lookup via the .so path.\n        let lib_path = std::ffi::CString::new(\n            format!(\"{}/lib/libmorloc.so\", morloc_home())\n        ).unwrap();\n        let lib = unsafe { libc::dlopen(lib_path.as_ptr(), libc::RTLD_NOW | libc::RTLD_GLOBAL) };\n        if lib.is_null() {\n            let err = unsafe { libc::dlerror() };\n            let err_msg = if err.is_null() {\n                \"unknown error\".to_string()\n            } else {\n                unsafe { std::ffi::CStr::from_ptr(err) }.to_string_lossy().into_owned()\n            };\n            eprintln!(\"Error: failed to load libmorloc.so: {}\", err_msg);\n            process::clean_exit(1);\n        }\n\n        type ShmSetFallbackFn = unsafe extern \"C\" fn(*const std::ffi::c_char);\n        type ShinitFn = unsafe extern \"C\" fn(*const std::ffi::c_char, usize, usize, *mut *mut std::ffi::c_char) -> *mut std::ffi::c_void;\n\n        let set_fb_sym = std::ffi::CString::new(\"shm_set_fallback_dir\").unwrap();\n        let shinit_sym = std::ffi::CString::new(\"shinit\").unwrap();\n\n        let set_fb: ShmSetFallbackFn = unsafe { std::mem::transmute(libc::dlsym(lib, set_fb_sym.as_ptr())) };\n        let do_shinit: ShinitFn = unsafe { std::mem::transmute(libc::dlsym(lib, shinit_sym.as_ptr())) };\n\n        let tmpdir_c = std::ffi::CString::new(tmpdir.as_str()).unwrap();\n        let basename_c = std::ffi::CString::new(shm_basename.as_str()).unwrap();\n        let mut errmsg: *mut std::ffi::c_char = std::ptr::null_mut();\n        unsafe {\n            set_fb(tmpdir_c.as_ptr());\n            let shm = do_shinit(basename_c.as_ptr(), 0, 0xffff, &mut errmsg);\n            if shm.is_null() {\n                let msg = if !errmsg.is_null() {\n                    let s = std::ffi::CStr::from_ptr(errmsg).to_string_lossy().into_owned();\n                    libc::free(errmsg as *mut std::ffi::c_void);\n                    s\n                } else {\n                    \"unknown error\".into()\n                };\n                eprintln!(\"Error: failed to initialize shared memory: {}\", msg);\n                process::clean_exit(1);\n            }\n        }\n        unsafe { libc::dlclose(lib) };\n    }\n\n    // Become subreaper for orphaned grandchildren\n    process::set_child_subreaper();\n\n    // Install signal handlers\n    process::install_signal_handlers();\n\n    // Setup sockets\n    let mut sockets = process::setup_sockets(&manifest.pools, &tmpdir, &shm_basename);\n\n    // Daemon mode\n    if config.daemon_flag {\n        let all_indices: Vec<usize> = (0..manifest.pools.len()).collect();\n        if let Err(e) = process::start_daemons(&mut sockets, &all_indices) {\n            eprintln!(\"Error: {}\", e);\n            process::clean_exit(1);\n        }\n\n        // Build DaemonConfig and call daemon_run in libmorloc.so\n        run_daemon(&config, &mut sockets, &shm_basename, &payload);\n        process::clean_exit(0);\n    }\n\n    // Normal CLI mode\n    if config.packet_path.is_none() {\n        if single_command {\n            // Single-command: dispatch directly to the command, no subcommand lookup\n            // Allow optional command name prefix for backward compatibility\n            let mut cmd_arg_start = arg_cursor;\n            if cmd_arg_start < remaining_args.len()\n                && remaining_args[cmd_arg_start] == manifest.commands[0].name\n            {\n                cmd_arg_start += 1;\n            }\n            dispatch::dispatch_command(\n                &remaining_args,\n                cmd_arg_start,\n                &config,\n                &manifest,\n                &manifest.commands[0],\n                &mut sockets,\n                &prog_name,\n            );\n        } else {\n            if arg_cursor >= remaining_args.len() {\n                help::print_usage(&prog_name, &manifest);\n            }\n            dispatch::dispatch(\n                &remaining_args,\n                arg_cursor,\n                &shm_basename,\n                &config,\n                &manifest,\n                &mut sockets,\n                &prog_name,\n            );\n        }\n    } else {\n        // Call-packet mode: read a pre-built call packet from file,\n        // send to the appropriate pool, write result as MessagePack.\n        // Used by SLURM workers on remote compute nodes.\n        run_call_packet(&config, &tmpdir);\n    }\n\n    process::clean_exit(0);\n}\n\n/// Run the daemon event loop by calling daemon_run in libmorloc.so.\nfn run_daemon(\n    config: &dispatch::NexusConfig,\n    sockets: &mut [process::PoolSocket],\n    shm_basename: &str,\n    manifest_payload: &str,\n) {\n    use std::ffi::{c_char, c_void, CString};\n    use std::ptr;\n\n    // daemon_run and parse_manifest signatures from libmorloc.so\n    extern \"C\" {\n        fn daemon_run(\n            config: *mut c_void,        // *mut DaemonConfig\n            manifest: *mut c_void,       // *mut Manifest (opaque)\n            sockets: *mut c_void,        // *mut MorlocSocket\n            n_pools: usize,\n            shm_basename: *const c_char,\n        );\n        fn parse_manifest(text: *const c_char, errmsg: *mut *mut c_char) -> *mut c_void;\n    }\n\n    // Build C MorlocSocket array (matches daemon_ffi::MorlocSocket layout)\n    #[repr(C)]\n    struct CMorlocSocket {\n        lang: *mut c_char,\n        syscmd: *mut *mut c_char,\n        socket_filename: *mut c_char,\n        pid: i32,\n    }\n\n    let n_pools = sockets.len();\n    let mut c_sockets: Vec<CMorlocSocket> = Vec::with_capacity(n_pools);\n    // Keep CStrings alive for the duration\n    let mut _keepalive: Vec<Vec<CString>> = Vec::new();\n\n    for sock in sockets.iter() {\n        let lang_c = CString::new(sock.lang.as_str()).unwrap();\n        let socket_c = CString::new(sock.socket_path.as_str()).unwrap();\n\n        // Build NULL-terminated syscmd array\n        let mut cmd_ptrs: Vec<*mut c_char> = Vec::new();\n        let mut cmd_strs: Vec<CString> = Vec::new();\n        for arg in &sock.syscmd {\n            let c = CString::new(arg.to_bytes()).unwrap();\n            cmd_ptrs.push(c.as_ptr() as *mut c_char);\n            cmd_strs.push(c);\n        }\n        cmd_ptrs.push(ptr::null_mut());\n\n        c_sockets.push(CMorlocSocket {\n            lang: lang_c.as_ptr() as *mut c_char,\n            syscmd: cmd_ptrs.as_ptr() as *mut *mut c_char,\n            socket_filename: socket_c.as_ptr() as *mut c_char,\n            pid: sock.pid,\n        });\n\n        // Keep everything alive\n        _keepalive.push(cmd_strs);\n        _keepalive.push(vec![lang_c, socket_c]);\n    }\n\n    // Build C DaemonConfig (matches daemon_ffi::DaemonConfig layout)\n    #[repr(C)]\n    struct CDaemonConfig {\n        unix_socket_path: *const c_char,\n        tcp_port: i32,\n        http_port: i32,\n        pool_check_fn: *const c_void,   // Option<fn> as null\n        pool_alive_fn: *const c_void,   // Option<fn> as null\n        n_pools: usize,\n        eval_timeout: i32,\n    }\n\n    let unix_socket_cstr = config.unix_socket_path.as_ref()\n        .map(|p| CString::new(p.as_str()).unwrap());\n\n    let mut daemon_config = CDaemonConfig {\n        unix_socket_path: unix_socket_cstr.as_ref()\n            .map_or(ptr::null(), |c| c.as_ptr()),\n        tcp_port: config.tcp_port.unwrap_or(0),\n        http_port: config.http_port.unwrap_or(0),\n        pool_check_fn: ptr::null(),\n        pool_alive_fn: process::pool_is_alive_ptr(),\n        n_pools,\n        eval_timeout: config.eval_timeout,\n    };\n\n    // Parse manifest via the C FFI (so daemon_run gets the C-layout manifest).\n    // The payload was already extracted from the wrapper script by the main flow.\n    let manifest_c_str = CString::new(manifest_payload).unwrap();\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let c_manifest = unsafe { parse_manifest(manifest_c_str.as_ptr(), &mut errmsg) };\n    if c_manifest.is_null() {\n        let msg = if !errmsg.is_null() {\n            let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n            unsafe { libc::free(errmsg as *mut c_void) };\n            s\n        } else {\n            \"unknown error\".into()\n        };\n        eprintln!(\"Error: failed to parse manifest for daemon: {}\", msg);\n        process::clean_exit(1);\n    }\n\n    let shm_c = CString::new(shm_basename).unwrap();\n\n    unsafe {\n        daemon_run(\n            &mut daemon_config as *mut CDaemonConfig as *mut c_void,\n            c_manifest,\n            c_sockets.as_mut_ptr() as *mut c_void,\n            n_pools,\n            shm_c.as_ptr(),\n        );\n    }\n}\n\n/// Run the multi-program router daemon.\n/// Scans the fdb directory for .manifest files and serves them all via HTTP/TCP/Unix.\nfn run_router(config: &dispatch::NexusConfig) {\n    use std::ffi::{c_char, c_void, CString};\n    use std::ptr;\n\n    extern \"C\" {\n        fn router_init(fdb_path: *const c_char, errmsg: *mut *mut c_char) -> *mut c_void;\n        fn router_run(config: *mut c_void, router: *mut c_void);\n        fn router_free(router: *mut c_void);\n    }\n\n    let fdb_path = config.fdb_path.clone().unwrap_or_else(|| {\n        format!(\"{}/fdb\", morloc_home())\n    });\n    let fdb_c = CString::new(fdb_path.as_str()).unwrap();\n\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let router = unsafe { router_init(fdb_c.as_ptr(), &mut errmsg) };\n    if router.is_null() {\n        let msg = if !errmsg.is_null() {\n            let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n            unsafe { libc::free(errmsg as *mut c_void) };\n            s\n        } else {\n            \"unknown error\".into()\n        };\n        eprintln!(\"Error: failed to initialize router: {}\", msg);\n        std::process::exit(1);\n    }\n\n    // Build DaemonConfig for the router\n    #[repr(C)]\n    struct CDaemonConfig {\n        unix_socket_path: *const c_char,\n        tcp_port: i32,\n        http_port: i32,\n        pool_check_fn: *const c_void,\n        pool_alive_fn: *const c_void,\n        n_pools: usize,\n        eval_timeout: i32,\n    }\n\n    let unix_cstr = config.unix_socket_path.as_ref()\n        .map(|p| CString::new(p.as_str()).unwrap());\n\n    let mut dc = CDaemonConfig {\n        unix_socket_path: unix_cstr.as_ref().map_or(ptr::null(), |c| c.as_ptr()),\n        tcp_port: config.tcp_port.unwrap_or(0),\n        http_port: config.http_port.unwrap_or(0),\n        pool_check_fn: ptr::null(),\n        pool_alive_fn: ptr::null(),\n        n_pools: 0,\n        eval_timeout: if config.eval_timeout > 0 { config.eval_timeout } else { 30 },\n    };\n\n    unsafe {\n        router_run(&mut dc as *mut CDaemonConfig as *mut c_void, router);\n        router_free(router);\n    }\n}\n\n/// Run a pre-built call packet on a remote worker node (SLURM mode).\n/// Reads a call packet from file, sends it to the pool, writes result as MessagePack.\nfn run_call_packet(config: &dispatch::NexusConfig, tmpdir: &str) {\n    use std::ffi::{c_char, c_void, CString};\n    use std::ptr;\n\n    extern \"C\" {\n        fn read_binary_file(\n            filename: *const c_char, file_size: *mut usize, errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn send_and_receive_over_socket(\n            socket_path: *const c_char, packet: *const u8, errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn get_morloc_data_packet_error_message(\n            data: *const u8, errmsg: *mut *mut c_char,\n        ) -> *mut c_char;\n        fn read_schema_from_packet_meta(\n            packet: *const u8, errmsg: *mut *mut c_char,\n        ) -> *mut c_char;\n        fn parse_schema(\n            schema_str: *const c_char, errmsg: *mut *mut c_char,\n        ) -> *mut morloc_runtime::cschema::CSchema;\n        fn get_morloc_data_packet_value(\n            data: *const u8, schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn pack_with_schema(\n            mlc: *const c_void, schema: *const morloc_runtime::cschema::CSchema,\n            mpkptr: *mut *mut c_char, mpk_size: *mut usize, errmsg: *mut *mut c_char,\n        ) -> i32;\n        fn write_atomic(\n            filename: *const c_char, data: *const u8, size: usize, errmsg: *mut *mut c_char,\n        ) -> i32;\n        fn print_morloc_data_packet(\n            packet: *const u8, schema: *const morloc_runtime::cschema::CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    let packet_path = config.packet_path.as_ref().unwrap();\n    let socket_base = match &config.socket_base {\n        Some(s) => s.clone(),\n        None => {\n            eprintln!(\"Error: --socket-base required for call-packet mode\");\n            process::clean_exit(1);\n        }\n    };\n    let socket_path = format!(\"{}/{}\", tmpdir, socket_base);\n\n    let packet_c = CString::new(packet_path.as_str()).unwrap();\n    let socket_c = CString::new(socket_path.as_str()).unwrap();\n    let mut errmsg: *mut c_char = ptr::null_mut();\n\n    // Read call packet from file\n    let mut packet_size: usize = 0;\n    let call_packet = unsafe { read_binary_file(packet_c.as_ptr(), &mut packet_size, &mut errmsg) };\n    if call_packet.is_null() || !errmsg.is_null() {\n        let msg = if !errmsg.is_null() {\n            let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n            unsafe { libc::free(errmsg as *mut c_void) };\n            s\n        } else {\n            \"unknown error\".into()\n        };\n        eprintln!(\"Error: failed to read call packet '{}': {}\", packet_path, msg);\n        process::clean_exit(1);\n    }\n\n    // Send to pool and receive response\n    let result_packet = unsafe {\n        send_and_receive_over_socket(socket_c.as_ptr(), call_packet, &mut errmsg)\n    };\n    unsafe { libc::free(call_packet as *mut c_void) };\n    if result_packet.is_null() || !errmsg.is_null() {\n        let msg = if !errmsg.is_null() {\n            let s = unsafe { std::ffi::CStr::from_ptr(errmsg) }.to_string_lossy().into_owned();\n            unsafe { libc::free(errmsg as *mut c_void) };\n            s\n        } else {\n            \"unknown error\".into()\n        };\n        eprintln!(\"Error: run failed: {}\", msg);\n        process::clean_exit(1);\n    }\n\n    // Check for error in response\n    let run_err = unsafe { get_morloc_data_packet_error_message(result_packet, &mut errmsg) };\n    if !run_err.is_null() {\n        let s = unsafe { std::ffi::CStr::from_ptr(run_err) }.to_string_lossy().into_owned();\n        unsafe { libc::free(run_err as *mut c_void) };\n        eprintln!(\"Error: run failed: {}\", s);\n        process::clean_exit(1);\n    }\n\n    // If output-form is \"packet\", write raw packet to output file\n    if config.output_format == dispatch::OutputFormat::Packet {\n        if let Some(ref output_path) = config.output_path {\n            let schema_str = unsafe { read_schema_from_packet_meta(result_packet, &mut errmsg) };\n            let schema = if !schema_str.is_null() {\n                unsafe { parse_schema(schema_str, &mut errmsg) }\n            } else {\n                ptr::null_mut()\n            };\n            unsafe {\n                print_morloc_data_packet(result_packet, schema, &mut errmsg);\n            };\n            // Also write as msgpack file\n            if !schema.is_null() {\n                let mlc = unsafe { get_morloc_data_packet_value(result_packet, schema, &mut errmsg) };\n                if !mlc.is_null() && errmsg.is_null() {\n                    let mut mpk_data: *mut c_char = ptr::null_mut();\n                    let mut mpk_size: usize = 0;\n                    unsafe { pack_with_schema(mlc as *const c_void, schema, &mut mpk_data, &mut mpk_size, &mut errmsg) };\n                    if !mpk_data.is_null() && errmsg.is_null() {\n                        let mpk_filename = format!(\"{}.mpk\", output_path);\n                        let mpk_c = CString::new(mpk_filename.as_str()).unwrap();\n                        unsafe { write_atomic(mpk_c.as_ptr(), mpk_data as *const u8, mpk_size, &mut errmsg) };\n                        unsafe { libc::free(mpk_data as *mut c_void) };\n                    }\n                }\n            }\n        }\n    }\n\n    unsafe { libc::free(result_packet as *mut c_void) };\n}\n"
  },
  {
    "path": "data/rust/morloc-nexus/src/manifest.rs",
    "content": "//! Manifest types -- thin re-export shim over the canonical schema\n//! definitions in the `morloc-manifest` crate.\n//!\n//! This module exists only so that existing code in this crate (and\n//! its consumers) can keep saying `crate::manifest::Manifest` without\n//! caring whether the types live here or in a sibling crate. The\n//! actual schema, with full doc comments and parsing logic, lives in\n//! `data/rust/morloc-manifest/src/lib.rs`.\n\npub use morloc_manifest::{\n    parse_manifest, read_manifest_payload, Arg, Command,\n    GroupEntry, Manifest, Pool,\n};\n"
  },
  {
    "path": "data/rust/morloc-nexus/src/process.rs",
    "content": "//! Pool daemon process management, signal handling, and lifecycle.\n//!\n//! Replaces the fork/exec, SIGCHLD, SIGTERM, clean_exit logic from nexus.c.\n\nuse std::ffi::CString;\nuse std::path::Path;\nuse std::sync::atomic::{AtomicBool, AtomicI32, Ordering};\nuse std::time::Duration;\n\nuse crate::manifest::Pool;\n\npub const MAX_DAEMONS: usize = 32;\n\nconst INITIAL_PING_TIMEOUT: Duration = Duration::from_millis(10);\nconst INITIAL_RETRY_DELAY: Duration = Duration::from_millis(1);\nconst RETRY_MULTIPLIER: f64 = 1.25;\nconst MAX_RETRIES: usize = 16;\n\n// ── Global state for signal handlers ───────────────────────────────────────\n\n/// PIDs of spawned pool daemons. 0 = unused, -1 = reaped.\nstatic PIDS: [AtomicI32; MAX_DAEMONS] = {\n    const INIT: AtomicI32 = AtomicI32::new(0);\n    [INIT; MAX_DAEMONS]\n};\n\n/// Process group IDs for cleanup.\nstatic PGIDS: [AtomicI32; MAX_DAEMONS] = {\n    const INIT: AtomicI32 = AtomicI32::new(0);\n    [INIT; MAX_DAEMONS]\n};\n\n/// Exit statuses saved by SIGCHLD handler.\nstatic EXIT_STATUSES: [AtomicI32; MAX_DAEMONS] = {\n    const INIT: AtomicI32 = AtomicI32::new(0);\n    [INIT; MAX_DAEMONS]\n};\n\n/// Re-entrancy guard for clean_exit.\nstatic CLEANING_UP: AtomicBool = AtomicBool::new(false);\n\n/// Global tmpdir path (set once in main, read during cleanup).\nstatic TMPDIR: std::sync::OnceLock<String> = std::sync::OnceLock::new();\n\n/// Socket info for each pool.\n///\n/// Pool stderr and stdout are intentionally NOT captured or intercepted by\n/// the nexus: a core morloc guarantee is that anything a sourced function\n/// prints to stderr/stdout is passed through unchanged. Raised exceptions\n/// are caught inside each pool's dispatch wrapper (see pool.py/pool.cpp/\n/// pool.R/pool.jl) and returned as morloc error packets, which the nexus\n/// then annotates with call-site context when bubbling them up.\npub struct PoolSocket {\n    pub lang: String,\n    pub socket_path: String,\n    pub syscmd: Vec<CString>,\n    pub pid: i32,\n}\n\n// ── Signal handlers (async-signal-safe) ────────────────────────────────────\n\n/// SIGCHLD handler: reap terminated children.\nextern \"C\" fn sigchld_handler(_sig: libc::c_int) {\n    #[cfg(target_os = \"linux\")]\n    let saved_errno = unsafe { *libc::__errno_location() };\n    #[cfg(target_os = \"macos\")]\n    let saved_errno = unsafe { *libc::__error() };\n    loop {\n        let mut status: libc::c_int = 0;\n        let pid = unsafe { libc::waitpid(-1, &mut status, libc::WNOHANG) };\n        if pid <= 0 {\n            break;\n        }\n        for i in 0..MAX_DAEMONS {\n            if PIDS[i].load(Ordering::Relaxed) == pid {\n                EXIT_STATUSES[i].store(status, Ordering::Relaxed);\n                PIDS[i].store(-1, Ordering::Relaxed);\n                break;\n            }\n        }\n    }\n    #[cfg(target_os = \"linux\")]\n    unsafe { *libc::__errno_location() = saved_errno };\n    #[cfg(target_os = \"macos\")]\n    unsafe { *libc::__error() = saved_errno };\n}\n\n/// SIGTERM/SIGINT handler: clean shutdown.\nextern \"C\" fn signal_exit_handler(sig: libc::c_int) {\n    if CLEANING_UP.load(Ordering::Relaxed) {\n        unsafe { libc::_exit(128 + sig) };\n    }\n    clean_exit(128 + sig);\n}\n\n/// Install signal handlers.\npub fn install_signal_handlers() {\n    unsafe {\n        // SIGCHLD\n        let mut sa: libc::sigaction = std::mem::zeroed();\n        sa.sa_sigaction = sigchld_handler as *const () as usize;\n        libc::sigemptyset(&mut sa.sa_mask);\n        sa.sa_flags = libc::SA_RESTART | libc::SA_NOCLDSTOP;\n        libc::sigaction(libc::SIGCHLD, &sa, std::ptr::null_mut());\n\n        // SIGTERM and SIGINT\n        let mut sa_exit: libc::sigaction = std::mem::zeroed();\n        sa_exit.sa_sigaction = signal_exit_handler as *const () as usize;\n        libc::sigemptyset(&mut sa_exit.sa_mask);\n        sa_exit.sa_flags = 0;\n        libc::sigaction(libc::SIGTERM, &sa_exit, std::ptr::null_mut());\n        libc::sigaction(libc::SIGINT, &sa_exit, std::ptr::null_mut());\n    }\n}\n\n/// Set the global tmpdir for cleanup.\npub fn set_tmpdir(path: String) {\n    let _ = TMPDIR.set(path);\n}\n\n/// Get the tmpdir path.\npub fn get_tmpdir() -> Option<&'static str> {\n    TMPDIR.get().map(|s| s.as_str())\n}\n\n// ── Clean exit ─────────────────────────────────────────────────────────────\n\n/// Terminate all pool daemons and clean up resources.\n///\n/// Race condition with stderr output: when a pool process is dying (e.g.,\n/// Python printing a traceback), its stderr writes may still be in a pipe\n/// buffer or mid-syscall when we send SIGTERM. The pool's signal handler\n/// (or SIG_DFL) may kill the process before its output reaches the\n/// terminal. We mitigate this by:\n/// 1. Flushing the nexus's own stderr first (so our error message is out)\n/// 2. Giving pools 200ms after SIGTERM before escalating to SIGKILL\n///    (up from the previous 50ms, which was too short for Python's\n///    atexit handlers and multiprocessing cleanup to flush buffers)\npub fn clean_exit(exit_code: i32) -> ! {\n    CLEANING_UP.store(true, Ordering::SeqCst);\n\n    // Flush nexus stderr so our error messages are visible even if\n    // the process is killed by a parent (e.g., shell pipeline).\n    unsafe { libc::fsync(2) };\n\n    // Block SIGCHLD during cleanup\n    unsafe {\n        let mut block_chld: libc::sigset_t = std::mem::zeroed();\n        libc::sigemptyset(&mut block_chld);\n        libc::sigaddset(&mut block_chld, libc::SIGCHLD);\n        libc::sigprocmask(libc::SIG_BLOCK, &block_chld, std::ptr::null_mut());\n    }\n\n    // Send SIGTERM to all pool process groups\n    for i in 0..MAX_DAEMONS {\n        let pgid = PGIDS[i].load(Ordering::Relaxed);\n        if pgid > 0 {\n            unsafe { libc::kill(-pgid, libc::SIGTERM) };\n        }\n    }\n\n    // Wait for groups to exit (up to 200ms per group, then SIGKILL).\n    // The 200ms window serves two purposes:\n    // - Lets pool signal handlers run (Python's signal_handler in pool.py\n    //   calls close_daemon and cleans up shared memory)\n    // - Lets any pending stderr writes (tracebacks, error messages) drain\n    //   to the terminal before the process is force-killed\n    for i in 0..MAX_DAEMONS {\n        let pgid = PGIDS[i].load(Ordering::Relaxed);\n        if pgid <= 0 {\n            continue;\n        }\n\n        // Reap any available children\n        while unsafe { libc::waitpid(-1, std::ptr::null_mut(), libc::WNOHANG) } > 0 {}\n        if unsafe { libc::kill(-pgid, 0) } == -1 {\n            continue;\n        }\n\n        let mut group_dead = false;\n        for _ in 0..100 {\n            while unsafe { libc::waitpid(-1, std::ptr::null_mut(), libc::WNOHANG) } > 0 {}\n            if unsafe { libc::kill(-pgid, 0) } == -1 {\n                group_dead = true;\n                break;\n            }\n            std::thread::sleep(Duration::from_millis(2));\n        }\n\n        if !group_dead {\n            unsafe { libc::kill(-pgid, libc::SIGKILL) };\n            std::thread::sleep(Duration::from_millis(50));\n        }\n    }\n\n    // Final reap\n    while unsafe { libc::waitpid(-1, std::ptr::null_mut(), libc::WNOHANG) } > 0 {}\n\n    // Clean up shared memory segments\n    extern \"C\" {\n        fn shclose(errmsg: *mut *mut std::ffi::c_char) -> bool;\n    }\n    unsafe {\n        let mut err: *mut std::ffi::c_char = std::ptr::null_mut();\n        shclose(&mut err);\n        if !err.is_null() {\n            libc::free(err as *mut libc::c_void);\n        }\n    }\n\n    // Clean up tmpdir\n    if let Some(dir) = get_tmpdir() {\n        let _ = std::fs::remove_dir_all(dir);\n    }\n\n    std::process::exit(exit_code);\n}\n\n// ── Pool daemon spawning ───────────────────────────────────────────────────\n\n/// Setup socket descriptors for all pools from the manifest.\npub fn setup_sockets(pools: &[Pool], tmpdir: &str, shm_basename: &str) -> Vec<PoolSocket> {\n    pools\n        .iter()\n        .map(|pool| {\n            let socket_path = format!(\"{}/{}\", tmpdir, pool.socket);\n\n            // Build syscmd: exec_args... socket_path tmpdir shm_basename\n            let mut syscmd: Vec<CString> = pool\n                .exec\n                .iter()\n                .map(|s| CString::new(s.as_str()).unwrap())\n                .collect();\n            syscmd.push(CString::new(socket_path.as_str()).unwrap());\n            syscmd.push(CString::new(tmpdir).unwrap());\n            syscmd.push(CString::new(shm_basename).unwrap());\n\n            PoolSocket {\n                lang: pool.lang.clone(),\n                socket_path,\n                syscmd,\n                pid: 0,\n            }\n        })\n        .collect()\n}\n\n/// Fork and exec a language pool daemon. Returns child PID.\n///\n/// The child inherits the nexus's stdin/stdout/stderr unchanged: anything a\n/// sourced function prints must reach the terminal byte-for-byte without\n/// morloc interposing. Runtime errors raised inside the pool are caught by\n/// the pool's own dispatch wrapper and returned as morloc error packets.\nfn start_language_server(socket: &PoolSocket) -> Result<i32, String> {\n    let pid = unsafe { libc::fork() };\n\n    if pid == 0 {\n        // Child process\n        unsafe { libc::setpgid(0, 0) };\n\n        let argv: Vec<*const libc::c_char> = socket\n            .syscmd\n            .iter()\n            .map(|s| s.as_ptr())\n            .chain(std::iter::once(std::ptr::null()))\n            .collect();\n\n        unsafe {\n            libc::execvp(argv[0], argv.as_ptr());\n        }\n        // Only reached if exec fails.\n        eprintln!(\n            \"execvp failed for {}: {}\",\n            socket.lang,\n            std::io::Error::last_os_error()\n        );\n        unsafe { libc::_exit(127) };\n    } else if pid > 0 {\n        // Parent: ensure child is in its own process group\n        unsafe { libc::setpgid(pid, pid) };\n        Ok(pid)\n    } else {\n        Err(format!(\"fork failed: {}\", std::io::Error::last_os_error()))\n    }\n}\n\n/// Start pool daemons for the given socket indices and wait for them to respond to pings.\npub fn start_daemons(sockets: &mut [PoolSocket], indices: &[usize]) -> Result<(), String> {\n    for &idx in indices {\n        let pid = start_language_server(&sockets[idx])?;\n        sockets[idx].pid = pid;\n        PIDS[idx].store(pid, Ordering::Relaxed);\n        PGIDS[idx].store(pid, Ordering::Relaxed);\n    }\n\n    // Wait for each daemon to respond to pings\n    for &idx in indices {\n        wait_for_daemon(&sockets[idx], idx)?;\n    }\n\n    Ok(())\n}\n\n/// Ping a daemon with exponential backoff until it responds.\n/// Matches the C nexus behavior: initial delay 1ms, multiplier 1.25,\n/// plus socket timeout that doubles from 10ms to ~10s.\nfn wait_for_daemon(socket: &PoolSocket, pool_index: usize) -> Result<(), String> {\n    use morloc_runtime::packet::PacketHeader;\n    use std::os::unix::net::UnixStream;\n    use std::io::{Read, Write};\n\n    let ping = PacketHeader::ping();\n    let ping_bytes = ping.to_bytes();\n    let mut retry_delay = INITIAL_RETRY_DELAY.as_secs_f64();\n    let mut ping_timeout = INITIAL_PING_TIMEOUT;\n\n    for attempt in 0..=MAX_RETRIES {\n        // Check if child already died. The pool's stderr was inherited\n        // directly, so any traceback it printed is already on the user's\n        // terminal; the nexus just reports the exit status here.\n        if PIDS[pool_index].load(Ordering::Relaxed) == -1 {\n            let status = EXIT_STATUSES[pool_index].load(Ordering::Relaxed);\n            return Err(format!(\n                \"Pool process for '{}' died unexpectedly (status: {})\",\n                socket.lang, status\n            ));\n        }\n\n        // Try to connect and ping\n        match UnixStream::connect(&socket.socket_path) {\n            Ok(mut stream) => {\n                let _ = stream.set_read_timeout(Some(ping_timeout));\n                let _ = stream.set_write_timeout(Some(ping_timeout));\n\n                if stream.write_all(&ping_bytes).is_ok() {\n                    let mut resp = [0u8; 32];\n                    if stream.read_exact(&mut resp).is_ok() {\n                        if let Ok(hdr) = PacketHeader::from_bytes(&resp) {\n                            if hdr.is_ping() {\n                                return Ok(());\n                            }\n                        }\n                    }\n                }\n            }\n            Err(_) => {}\n        }\n\n        if attempt == MAX_RETRIES {\n            return Err(format!(\n                \"Failed to ping pool '{}' at {} after {} retries\",\n                socket.lang, socket.socket_path, MAX_RETRIES\n            ));\n        }\n\n        // Sleep with exponential backoff\n        // Use the larger of retry_delay or ping_timeout to ensure we wait\n        // long enough for slow-starting pools (R, Python)\n        let wait = retry_delay.max(ping_timeout.as_secs_f64());\n        let secs = wait as u64;\n        let nanos = ((wait - secs as f64) * 1e9) as u32;\n        std::thread::sleep(Duration::new(secs, nanos));\n        retry_delay *= RETRY_MULTIPLIER;\n        ping_timeout = ping_timeout * 2;\n    }\n\n    unreachable!()\n}\n\n/// Return a C-compatible function pointer for pool_is_alive.\npub fn pool_is_alive_ptr() -> *const std::ffi::c_void {\n    extern \"C\" fn pool_alive_c(pool_index: usize) -> bool {\n        pool_is_alive(pool_index)\n    }\n    pool_alive_c as *const std::ffi::c_void\n}\n\n/// Check if a pool at given index is alive.\npub fn pool_is_alive(pool_index: usize) -> bool {\n    if pool_index >= MAX_DAEMONS {\n        return false;\n    }\n    let pid = PIDS[pool_index].load(Ordering::Relaxed);\n    if pid <= 0 {\n        return false;\n    }\n    unsafe { libc::kill(pid, 0) == 0 }\n}\n\n/// Get the exit status of a reaped pool, returning signal/exit info.\npub fn pool_death_info(pool_index: usize) -> Option<String> {\n    if PIDS[pool_index].load(Ordering::Relaxed) != -1 {\n        return None;\n    }\n    let st = EXIT_STATUSES[pool_index].load(Ordering::Relaxed);\n    if libc::WIFSIGNALED(st) {\n        let sig = libc::WTERMSIG(st);\n        Some(format!(\"Pool process crashed with signal {sig}\"))\n    } else if libc::WIFEXITED(st) {\n        let code = libc::WEXITSTATUS(st);\n        Some(format!(\"Pool process exited with status {code}\"))\n    } else {\n        Some(\"Pool process died unexpectedly\".into())\n    }\n}\n\n/// Validate that all pool executables exist.\npub fn validate_pools(pools: &[Pool]) -> Result<(), String> {\n    for pool in pools {\n        if let Some(exec) = pool.exec.last() {\n            if !Path::new(exec).exists() {\n                return Err(format!(\n                    \"Build artifacts missing or stale. Pool file '{}' not found. Re-run `morloc make`.\",\n                    exec\n                ));\n            }\n        }\n    }\n    Ok(())\n}\n\n/// Create a temporary directory for this nexus session.\npub fn make_tmpdir() -> Result<String, String> {\n    let template = CString::new(\"/tmp/morloc.XXXXXX\").unwrap();\n    let mut buf = template.into_bytes_with_nul();\n    let ptr = buf.as_mut_ptr() as *mut libc::c_char;\n    let result = unsafe { libc::mkdtemp(ptr) };\n    if result.is_null() {\n        return Err(format!(\n            \"Failed to create temporary directory: {}\",\n            std::io::Error::last_os_error()\n        ));\n    }\n    let cstr = unsafe { std::ffi::CStr::from_ptr(result) };\n    Ok(cstr.to_string_lossy().into_owned())\n}\n\n/// Generate a job hash from seed, pid, and timestamps.\npub fn make_job_hash(seed: u64) -> u64 {\n    use morloc_runtime::hash::xxh64;\n\n    let pid = std::process::id() as u64;\n    let now = std::time::SystemTime::now()\n        .duration_since(std::time::UNIX_EPOCH)\n        .unwrap_or_default();\n    let epoch_ns = now.as_nanos() as u64;\n\n    let data = format!(\"{}:{}:{}\", pid, epoch_ns, seed);\n    xxh64(data.as_bytes())\n}\n\n/// Become a subreaper so orphaned grandchildren get reparented to us.\n/// Only available on Linux; no-op on other platforms.\npub fn set_child_subreaper() {\n    #[cfg(target_os = \"linux\")]\n    unsafe {\n        libc::prctl(libc::PR_SET_CHILD_SUBREAPER, 1, 0, 0, 0);\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/Cargo.toml",
    "content": "[package]\nname = \"morloc-runtime\"\nversion = \"0.81.0\"\nedition = \"2021\"\ndescription = \"Morloc runtime library: shared memory, serialization, IPC, packet protocol\"\n# Version is intentionally synchronized with the morloc compiler version\n# (see ../../../package.yaml). The C-FFI manifest reader rejects\n# manifests whose recorded morloc_version differs from this crate's\n# CARGO_PKG_VERSION at parse time. Bumping the morloc compiler version\n# requires bumping this version in lockstep.\n\n[lib]\ncrate-type = [\"cdylib\", \"staticlib\", \"rlib\"]\n\n[dependencies]\nmorloc-manifest = { path = \"../morloc-manifest\" }\nlibc = { workspace = true }\nserde = { workspace = true }\nserde_json = { workspace = true }\nrmp = \"0.8\"\nrmp-serde = { workspace = true }\ntwox-hash = { workspace = true }\nnix = { workspace = true }\nthiserror = { workspace = true }\n\n[build-dependencies]\ncbindgen = \"0.28\"\n# cc crate no longer needed -- all C files ported to Rust\n"
  },
  {
    "path": "data/rust/morloc-runtime/build.rs",
    "content": "fn main() {\n    let target = std::env::var(\"TARGET\").unwrap_or_default();\n    println!(\"cargo:rustc-link-lib=pthread\");\n    if target.contains(\"linux\") {\n        println!(\"cargo:rustc-link-lib=rt\");\n    }\n    // The morloc compiler version is sourced from CARGO_PKG_VERSION\n    // (this crate's Cargo.toml), which is intentionally kept in\n    // lockstep with the morloc Haskell package.yaml.\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/cbindgen.toml",
    "content": "language = \"C\"\nheader = \"/* Generated by cbindgen - do not edit manually */\"\nautogen_warning = \"\"\ninclude_version = false\ndocumentation_style = \"c99\"\nno_includes = true\n\n[export]\nprefix = \"\"\n\n[fn]\nprefix = \"\"\n\n[parse]\nparse_deps = false\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/arrow_ffi.rs",
    "content": "//! Arrow C Data Interface implementation.\n//! Replaces arrow.c. Provides SHM ↔ Arrow conversion.\n\nuse std::ffi::{c_char, c_void};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::shm::{self, RelPtr};\n\nconst ARROW_SHM_MAGIC: u32 = 0xA770DA7A;\nconst ARROW_BUFFER_ALIGN: usize = 64;\n\nfn arrow_align_up(x: usize) -> usize {\n    (x + ARROW_BUFFER_ALIGN - 1) & !(ARROW_BUFFER_ALIGN - 1)\n}\n\n// ── Arrow C Data Interface structs (matching Apache spec) ────────────────────\n\n#[repr(C)]\npub struct ArrowSchema {\n    pub format: *const c_char,\n    pub name: *const c_char,\n    pub metadata: *const c_char,\n    pub flags: i64,\n    pub n_children: i64,\n    pub children: *mut *mut ArrowSchema,\n    pub dictionary: *mut ArrowSchema,\n    pub release: Option<unsafe extern \"C\" fn(*mut ArrowSchema)>,\n    pub private_data: *mut c_void,\n}\n\n#[repr(C)]\npub struct ArrowArray {\n    pub length: i64,\n    pub null_count: i64,\n    pub offset: i64,\n    pub n_buffers: i64,\n    pub n_children: i64,\n    pub buffers: *mut *const c_void,\n    pub children: *mut *mut ArrowArray,\n    pub dictionary: *mut ArrowArray,\n    pub release: Option<unsafe extern \"C\" fn(*mut ArrowArray)>,\n    pub private_data: *mut c_void,\n}\n\n// ── SHM header types ─────────────────────────────────────────────────────────\n\n#[repr(C)]\npub struct ArrowColumnDesc {\n    pub col_type: u32,       // morloc_serial_type\n    pub length: u64,\n    pub null_count: u64,\n    pub name_offset: u32,\n    pub name_length: u16,\n    pub data_offset: u64,\n    pub data_size: u64,\n}\n\n#[repr(C)]\npub struct ArrowShmHeader {\n    pub magic: u32,\n    pub n_columns: u32,\n    pub n_rows: u64,\n    pub total_size: u64,\n}\n\n// ── Type mapping ─────────────────────────────────────────────────────────────\n\n// Serial type constants matching C enum\nconst MORLOC_NIL: u32 = 0;\nconst MORLOC_BOOL: u32 = 1;\nconst MORLOC_SINT8: u32 = 2;\nconst MORLOC_SINT16: u32 = 3;\nconst MORLOC_SINT32: u32 = 4;\nconst MORLOC_SINT64: u32 = 5;\nconst MORLOC_UINT8: u32 = 6;\nconst MORLOC_UINT16: u32 = 7;\nconst MORLOC_UINT32: u32 = 8;\nconst MORLOC_UINT64: u32 = 9;\nconst MORLOC_FLOAT32: u32 = 10;\nconst MORLOC_FLOAT64: u32 = 11;\nconst MORLOC_STRING: u32 = 12;\n\n#[no_mangle]\npub extern \"C\" fn arrow_element_size(serial_type: u32) -> usize {\n    match serial_type {\n        MORLOC_BOOL | MORLOC_SINT8 | MORLOC_UINT8 => 1,\n        MORLOC_SINT16 | MORLOC_UINT16 => 2,\n        MORLOC_SINT32 | MORLOC_UINT32 | MORLOC_FLOAT32 => 4,\n        MORLOC_SINT64 | MORLOC_UINT64 | MORLOC_FLOAT64 => 8,\n        _ => 0,\n    }\n}\n\n#[no_mangle]\npub extern \"C\" fn arrow_format_string(serial_type: u32) -> *const c_char {\n    match serial_type {\n        MORLOC_BOOL => b\"b\\0\".as_ptr() as *const c_char,\n        MORLOC_SINT8 => b\"c\\0\".as_ptr() as *const c_char,\n        MORLOC_UINT8 => b\"C\\0\".as_ptr() as *const c_char,\n        MORLOC_SINT16 => b\"s\\0\".as_ptr() as *const c_char,\n        MORLOC_UINT16 => b\"S\\0\".as_ptr() as *const c_char,\n        MORLOC_SINT32 => b\"i\\0\".as_ptr() as *const c_char,\n        MORLOC_UINT32 => b\"I\\0\".as_ptr() as *const c_char,\n        MORLOC_SINT64 => b\"l\\0\".as_ptr() as *const c_char,\n        MORLOC_UINT64 => b\"L\\0\".as_ptr() as *const c_char,\n        MORLOC_FLOAT32 => b\"f\\0\".as_ptr() as *const c_char,\n        MORLOC_FLOAT64 => b\"g\\0\".as_ptr() as *const c_char,\n        MORLOC_STRING => b\"u\\0\".as_ptr() as *const c_char,\n        _ => ptr::null(),\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_format_to_type(format: *const c_char) -> u32 {\n    if format.is_null() || *format == 0 || *format.add(1) != 0 {\n        return MORLOC_NIL;\n    }\n    match *format as u8 {\n        b'b' => MORLOC_BOOL,\n        b'c' => MORLOC_SINT8,\n        b'C' => MORLOC_UINT8,\n        b's' => MORLOC_SINT16,\n        b'S' => MORLOC_UINT16,\n        b'i' => MORLOC_SINT32,\n        b'I' => MORLOC_UINT32,\n        b'l' => MORLOC_SINT64,\n        b'L' => MORLOC_UINT64,\n        b'f' => MORLOC_FLOAT32,\n        b'g' => MORLOC_FLOAT64,\n        b'u' => MORLOC_STRING,\n        _ => MORLOC_NIL,\n    }\n}\n\n// ── Column accessors (used by arrow_json.c) ──────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_column_desc(\n    header: *const ArrowShmHeader,\n    col_index: u32,\n) -> *const ArrowColumnDesc {\n    if header.is_null() || col_index >= (*header).n_columns {\n        return ptr::null();\n    }\n    let descs = (header as *const u8).add(std::mem::size_of::<ArrowShmHeader>()) as *const ArrowColumnDesc;\n    descs.add(col_index as usize)\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_column_data(\n    header: *const ArrowShmHeader,\n    col_index: u32,\n) -> *const c_void {\n    let desc = arrow_column_desc(header, col_index);\n    if desc.is_null() { return ptr::null(); }\n    (header as *const u8).add((*desc).data_offset as usize) as *const c_void\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_column_name(\n    header: *const ArrowShmHeader,\n    col_index: u32,\n) -> *const c_char {\n    let desc = arrow_column_desc(header, col_index);\n    if desc.is_null() { return ptr::null(); }\n    (header as *const u8).add((*desc).name_offset as usize) as *const c_char\n}\n\n// ── arrow_to_shm ─────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_to_shm(\n    array: *const ArrowArray,\n    schema: *const ArrowSchema,\n    errmsg: *mut *mut c_char,\n) -> RelPtr {\n    clear_errmsg(errmsg);\n\n    if array.is_null() || schema.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL array or schema\".into()));\n        return shm::RELNULL;\n    }\n\n    // Verify struct type\n    if (*schema).format.is_null() || libc::strcmp((*schema).format, b\"+s\\0\".as_ptr() as *const c_char) != 0 {\n        set_errmsg(errmsg, &MorlocError::Other(\"Expected struct schema (format '+s')\".into()));\n        return shm::RELNULL;\n    }\n\n    let n_cols = (*schema).n_children as usize;\n    let n_rows = (*array).length as usize;\n\n    if n_cols == 0 {\n        set_errmsg(errmsg, &MorlocError::Other(\"Arrow struct must have at least one column\".into()));\n        return shm::RELNULL;\n    }\n\n    let header_size = std::mem::size_of::<ArrowShmHeader>();\n    let descs_size = n_cols * std::mem::size_of::<ArrowColumnDesc>();\n\n    let mut names_size: usize = 0;\n    for i in 0..n_cols {\n        let child_schema = *(*schema).children.add(i);\n        let name = (*child_schema).name;\n        if !name.is_null() { names_size += libc::strlen(name); }\n    }\n\n    let data_start = arrow_align_up(header_size + descs_size + names_size);\n    let mut total_size = data_start;\n\n    for i in 0..n_cols {\n        let child_schema = *(*schema).children.add(i);\n        let col_type = arrow_format_to_type((*child_schema).format);\n        let elem_size = arrow_element_size(col_type);\n        if col_type == MORLOC_STRING {\n            let child = *(*array).children.add(i);\n            let offsets = if (*child).n_buffers >= 2 { *(*child).buffers.add(1) as *const i32 } else { ptr::null() };\n            let str_data_size = if !offsets.is_null() {\n                let off = (*child).offset as usize;\n                (*offsets.add(off + n_rows) - *offsets.add(off)) as usize\n            } else { 0 };\n            total_size = arrow_align_up(total_size)\n                + (n_rows + 1) * std::mem::size_of::<i32>()\n                + str_data_size;\n        } else {\n            if elem_size == 0 {\n                set_errmsg(errmsg, &MorlocError::Other(format!(\"Unsupported Arrow column type for column {}\", i)));\n                return shm::RELNULL;\n            }\n            total_size = arrow_align_up(total_size) + elem_size * n_rows;\n        }\n    }\n\n    let shm_ptr = match shm::shmalloc(total_size) {\n        Ok(p) => p,\n        Err(e) => { set_errmsg(errmsg, &e); return shm::RELNULL; }\n    };\n    ptr::write_bytes(shm_ptr, 0, total_size);\n\n    // Write header\n    let header = &mut *(shm_ptr as *mut ArrowShmHeader);\n    header.magic = ARROW_SHM_MAGIC;\n    header.n_columns = n_cols as u32;\n    header.n_rows = n_rows as u64;\n    header.total_size = total_size as u64;\n\n    let descs = shm_ptr.add(header_size) as *mut ArrowColumnDesc;\n    let mut name_cursor = header_size + descs_size;\n    let mut data_cursor = data_start;\n\n    for i in 0..n_cols {\n        let child_schema = *(*schema).children.add(i);\n        let child_array = *(*array).children.add(i);\n        let col_type = arrow_format_to_type((*child_schema).format);\n\n        data_cursor = arrow_align_up(data_cursor);\n\n        let name = if (*child_schema).name.is_null() { b\"\\0\".as_ptr() as *const c_char } else { (*child_schema).name };\n        let name_len = libc::strlen(name);\n\n        let desc = &mut *descs.add(i);\n        desc.col_type = col_type;\n        desc.length = n_rows as u64;\n        desc.null_count = (*child_array).null_count as u64;\n        desc.name_offset = name_cursor as u32;\n        desc.name_length = name_len as u16;\n        desc.data_offset = data_cursor as u64;\n\n        if name_len > 0 {\n            ptr::copy_nonoverlapping(name as *const u8, shm_ptr.add(name_cursor), name_len);\n        }\n        name_cursor += name_len;\n\n        if col_type == MORLOC_STRING {\n            let src_offsets = if (*child_array).n_buffers >= 2 { *(*child_array).buffers.add(1) as *const i32 } else { ptr::null() };\n            let src_data = if (*child_array).n_buffers >= 3 { *(*child_array).buffers.add(2) as *const u8 } else { ptr::null() };\n            let arr_offset = (*child_array).offset as usize;\n\n            let dst_offsets = shm_ptr.add(data_cursor) as *mut i32;\n            let base = if !src_offsets.is_null() { *src_offsets.add(arr_offset) } else { 0 };\n            for r in 0..=n_rows {\n                *dst_offsets.add(r) = if !src_offsets.is_null() { *src_offsets.add(arr_offset + r) - base } else { 0 };\n            }\n            let offsets_size = (n_rows + 1) * std::mem::size_of::<i32>();\n\n            let str_data_size = if !src_offsets.is_null() {\n                (*src_offsets.add(arr_offset + n_rows) - base) as usize\n            } else { 0 };\n            if str_data_size > 0 && !src_data.is_null() {\n                ptr::copy_nonoverlapping(src_data.add(base as usize), shm_ptr.add(data_cursor + offsets_size), str_data_size);\n            }\n\n            let buf_size = offsets_size + str_data_size;\n            desc.data_size = buf_size as u64;\n            data_cursor += buf_size;\n        } else {\n            let elem_size = arrow_element_size(col_type);\n            let buf_size = elem_size * n_rows;\n            desc.data_size = buf_size as u64;\n\n            if (*child_array).n_buffers >= 2 && !(*(*child_array).buffers.add(1)).is_null() {\n                let src = (*(*child_array).buffers.add(1) as *const u8).add((*child_array).offset as usize * elem_size);\n                if buf_size > 0 {\n                    ptr::copy_nonoverlapping(src, shm_ptr.add(data_cursor), buf_size);\n                }\n            }\n            data_cursor += buf_size;\n        }\n    }\n\n    match shm::abs2rel(shm_ptr) {\n        Ok(r) => r,\n        Err(e) => { set_errmsg(errmsg, &e); shm::RELNULL }\n    }\n}\n\n// ── arrow_validate ───────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_validate(\n    header: *const ArrowShmHeader,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    if header.is_null() { set_errmsg(errmsg, &MorlocError::Other(\"NULL arrow header\".into())); return 1; }\n    if (*header).magic != ARROW_SHM_MAGIC { set_errmsg(errmsg, &MorlocError::Other(\"Invalid arrow SHM magic\".into())); return 1; }\n    if schema.is_null() { set_errmsg(errmsg, &MorlocError::Other(\"NULL schema for arrow validation\".into())); return 1; }\n    // MORLOC_MAP = 16\n    if (*schema).serial_type != crate::schema::SerialType::Map as u32 { set_errmsg(errmsg, &MorlocError::Other(\"Expected MORLOC_MAP schema for arrow table\".into())); return 1; }\n\n    let n_cols = (*header).n_columns as usize;\n    if n_cols != (*schema).size {\n        set_errmsg(errmsg, &MorlocError::Other(format!(\"Column count mismatch: arrow has {}, schema has {}\", n_cols, (*schema).size)));\n        return 1;\n    }\n\n    for i in 0..n_cols {\n        let desc = arrow_column_desc(header, i as u32);\n        if desc.is_null() {\n            set_errmsg(errmsg, &MorlocError::Other(format!(\"NULL column descriptor at index {}\", i)));\n            return 1;\n        }\n        let param_schema = *(*schema).parameters.add(i);\n        if (*desc).col_type != (*param_schema).serial_type as u32 {\n            set_errmsg(errmsg, &MorlocError::Other(format!(\"Column {} type mismatch\", i)));\n            return 1;\n        }\n    }\n\n    0\n}\n\n// ── Release callbacks for arrow_from_shm ─────────────────────────────────────\n\nunsafe extern \"C\" fn arrow_shm_child_schema_release(schema: *mut ArrowSchema) {\n    if schema.is_null() { return; }\n    if !(*schema).name.is_null() { libc::free((*schema).name as *mut c_void); }\n    (*schema).name = ptr::null();\n    (*schema).release = None;\n}\n\nunsafe extern \"C\" fn arrow_shm_child_array_release(array: *mut ArrowArray) {\n    if array.is_null() { return; }\n    if !(*array).buffers.is_null() { libc::free((*array).buffers as *mut c_void); }\n    (*array).buffers = ptr::null_mut();\n    (*array).release = None;\n}\n\nunsafe extern \"C\" fn arrow_shm_schema_release(schema: *mut ArrowSchema) {\n    if schema.is_null() { return; }\n    for i in 0..(*schema).n_children as usize {\n        let child = *(*schema).children.add(i);\n        if !child.is_null() {\n            if let Some(release) = (*child).release { release(child); }\n            libc::free(child as *mut c_void);\n        }\n    }\n    libc::free((*schema).children as *mut c_void);\n    (*schema).children = ptr::null_mut();\n    (*schema).release = None;\n}\n\nunsafe extern \"C\" fn arrow_shm_array_release(array: *mut ArrowArray) {\n    if array.is_null() { return; }\n    for i in 0..(*array).n_children as usize {\n        let child = *(*array).children.add(i);\n        if !child.is_null() {\n            if let Some(release) = (*child).release { release(child); }\n            libc::free(child as *mut c_void);\n        }\n    }\n    libc::free((*array).children as *mut c_void);\n    (*array).children = ptr::null_mut();\n    if !(*array).buffers.is_null() { libc::free((*array).buffers as *mut c_void); }\n    (*array).buffers = ptr::null_mut();\n    (*array).release = None;\n}\n\n// ── arrow_from_shm ───────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn arrow_from_shm(\n    header: *const ArrowShmHeader,\n    out_schema: *mut ArrowSchema,\n    out_array: *mut ArrowArray,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    if header.is_null() { set_errmsg(errmsg, &MorlocError::Other(\"NULL arrow header\".into())); return 1; }\n    if (*header).magic != ARROW_SHM_MAGIC { set_errmsg(errmsg, &MorlocError::Other(\"Invalid arrow SHM magic\".into())); return 1; }\n\n    let n_cols = (*header).n_columns as usize;\n    let n_rows = (*header).n_rows as i64;\n\n    // Parent schema (struct type)\n    ptr::write_bytes(out_schema, 0, 1);\n    (*out_schema).format = b\"+s\\0\".as_ptr() as *const c_char;\n    (*out_schema).n_children = n_cols as i64;\n    (*out_schema).children = libc::calloc(n_cols, std::mem::size_of::<*mut ArrowSchema>()) as *mut *mut ArrowSchema;\n    (*out_schema).release = Some(arrow_shm_schema_release);\n\n    // Parent array\n    ptr::write_bytes(out_array, 0, 1);\n    (*out_array).length = n_rows;\n    (*out_array).n_buffers = 1;\n    (*out_array).buffers = libc::calloc(1, std::mem::size_of::<*const c_void>()) as *mut *const c_void;\n    (*out_array).n_children = n_cols as i64;\n    (*out_array).children = libc::calloc(n_cols, std::mem::size_of::<*mut ArrowArray>()) as *mut *mut ArrowArray;\n    (*out_array).release = Some(arrow_shm_array_release);\n\n    for i in 0..n_cols {\n        let desc = arrow_column_desc(header, i as u32);\n\n        // Child schema\n        let child_s = libc::calloc(1, std::mem::size_of::<ArrowSchema>()) as *mut ArrowSchema;\n        (*child_s).release = Some(arrow_shm_child_schema_release);\n        *(*out_schema).children.add(i) = child_s;\n\n        (*child_s).format = arrow_format_string((*desc).col_type);\n        let raw_name = arrow_column_name(header, i as u32);\n        let name_len = (*desc).name_length as usize;\n        let name_copy = libc::calloc(name_len + 1, 1) as *mut c_char;\n        if !raw_name.is_null() && name_len > 0 {\n            ptr::copy_nonoverlapping(raw_name as *const u8, name_copy as *mut u8, name_len);\n        }\n        (*child_s).name = name_copy;\n\n        // Child array\n        let child_a = libc::calloc(1, std::mem::size_of::<ArrowArray>()) as *mut ArrowArray;\n        (*child_a).release = Some(arrow_shm_child_array_release);\n        *(*out_array).children.add(i) = child_a;\n\n        (*child_a).length = n_rows;\n        (*child_a).null_count = (*desc).null_count as i64;\n\n        let col_buf = arrow_column_data(header, i as u32);\n        if (*desc).col_type == MORLOC_STRING {\n            (*child_a).n_buffers = 3;\n            (*child_a).buffers = libc::calloc(3, std::mem::size_of::<*const c_void>()) as *mut *const c_void;\n            *(*child_a).buffers.add(1) = col_buf; // offsets\n            *(*child_a).buffers.add(2) = (col_buf as *const u8).add((n_rows as usize + 1) * std::mem::size_of::<i32>()) as *const c_void;\n        } else {\n            (*child_a).n_buffers = 2;\n            (*child_a).buffers = libc::calloc(2, std::mem::size_of::<*const c_void>()) as *mut *const c_void;\n            *(*child_a).buffers.add(1) = col_buf; // zero-copy data\n        }\n    }\n\n    0\n}\n\n// ── Arrow JSON/Table output (replaces arrow_json.c) ──────────────────────────\n\nunsafe fn print_arrow_value(desc: *const ArrowColumnDesc, col_data: *const u8, row: u64) {\n    let row = row as usize;\n    match (*desc).col_type {\n        MORLOC_BOOL => {\n            let v = *col_data.add(row);\n            if v != 0 { libc::printf(b\"true\\0\".as_ptr() as *const c_char); }\n            else { libc::printf(b\"false\\0\".as_ptr() as *const c_char); }\n        }\n        MORLOC_SINT8 => { libc::printf(b\"%d\\0\".as_ptr() as *const c_char, *(col_data as *const i8).add(row) as i32); }\n        MORLOC_SINT16 => { libc::printf(b\"%d\\0\".as_ptr() as *const c_char, *(col_data as *const i16).add(row) as i32); }\n        MORLOC_SINT32 => { libc::printf(b\"%d\\0\".as_ptr() as *const c_char, *(col_data as *const i32).add(row)); }\n        MORLOC_SINT64 => { libc::printf(b\"%ld\\0\".as_ptr() as *const c_char, *(col_data as *const i64).add(row)); }\n        MORLOC_UINT8 => { libc::printf(b\"%u\\0\".as_ptr() as *const c_char, *col_data.add(row) as u32); }\n        MORLOC_UINT16 => { libc::printf(b\"%u\\0\".as_ptr() as *const c_char, *(col_data as *const u16).add(row) as u32); }\n        MORLOC_UINT32 => { libc::printf(b\"%u\\0\".as_ptr() as *const c_char, *(col_data as *const u32).add(row)); }\n        MORLOC_UINT64 => { libc::printf(b\"%lu\\0\".as_ptr() as *const c_char, *(col_data as *const u64).add(row)); }\n        MORLOC_FLOAT32 => { libc::printf(b\"%.7g\\0\".as_ptr() as *const c_char, *(col_data as *const f32).add(row) as f64); }\n        MORLOC_FLOAT64 => { libc::printf(b\"%.15g\\0\".as_ptr() as *const c_char, *(col_data as *const f64).add(row)); }\n        MORLOC_STRING => {\n            let offsets = col_data as *const i32;\n            let str_data = offsets.add((*desc).length as usize + 1) as *const u8;\n            let start = *offsets.add(row) as usize;\n            let end = *offsets.add(row + 1) as usize;\n            libc::putchar(b'\"' as i32);\n            for i in start..end {\n                let c = *str_data.add(i);\n                match c {\n                    b'\"' => { libc::printf(b\"\\\\\\\"\\0\".as_ptr() as *const c_char); }\n                    b'\\\\' => { libc::printf(b\"\\\\\\\\\\0\".as_ptr() as *const c_char); }\n                    b'\\n' => { libc::printf(b\"\\\\n\\0\".as_ptr() as *const c_char); }\n                    b'\\r' => { libc::printf(b\"\\\\r\\0\".as_ptr() as *const c_char); }\n                    b'\\t' => { libc::printf(b\"\\\\t\\0\".as_ptr() as *const c_char); }\n                    _ if c < 32 => { libc::printf(b\"\\\\u%04x\\0\".as_ptr() as *const c_char, c as u32); }\n                    _ => { libc::putchar(c as i32); }\n                }\n            }\n            libc::putchar(b'\"' as i32);\n        }\n        _ => { libc::printf(b\"null\\0\".as_ptr() as *const c_char); }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn print_arrow_as_json(\n    data: *const c_void,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    use crate::error::{clear_errmsg, set_errmsg, MorlocError};\n    clear_errmsg(errmsg);\n\n    let header = data as *const ArrowShmHeader;\n    if header.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL arrow data\".into()));\n        return false;\n    }\n    if (*header).magic != ARROW_SHM_MAGIC {\n        set_errmsg(errmsg, &MorlocError::Other(format!(\"Invalid arrow SHM magic: 0x{:08x}\", (*header).magic)));\n        return false;\n    }\n\n    let n_cols = (*header).n_columns;\n    let n_rows = (*header).n_rows;\n\n    libc::putchar(b'[' as i32);\n    for r in 0..n_rows {\n        if r > 0 { libc::putchar(b',' as i32); }\n        libc::putchar(b'{' as i32);\n        for c in 0..n_cols {\n            if c > 0 { libc::putchar(b',' as i32); }\n            let desc = arrow_column_desc(header, c);\n            let name = arrow_column_name(header, c);\n            let col_data = arrow_column_data(header, c);\n            if !name.is_null() {\n                libc::printf(b\"\\\"%.*s\\\":\\0\".as_ptr() as *const c_char, (*desc).name_length as i32, name);\n            }\n            if !desc.is_null() && !col_data.is_null() {\n                print_arrow_value(desc, col_data as *const u8, r);\n            } else {\n                libc::printf(b\"null\\0\".as_ptr() as *const c_char);\n            }\n        }\n        libc::putchar(b'}' as i32);\n    }\n    libc::printf(b\"]\\n\\0\".as_ptr() as *const c_char);\n    true\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn print_arrow_as_table(\n    data: *const c_void,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    use crate::error::{clear_errmsg, set_errmsg, MorlocError};\n    clear_errmsg(errmsg);\n\n    let header = data as *const ArrowShmHeader;\n    if header.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL arrow data\".into()));\n        return false;\n    }\n    if (*header).magic != ARROW_SHM_MAGIC {\n        set_errmsg(errmsg, &MorlocError::Other(format!(\"Invalid arrow SHM magic: 0x{:08x}\", (*header).magic)));\n        return false;\n    }\n\n    let n_cols = (*header).n_columns;\n    let n_rows = (*header).n_rows;\n\n    for c in 0..n_cols {\n        if c > 0 { libc::putchar(b'\\t' as i32); }\n        let desc = arrow_column_desc(header, c);\n        let name = arrow_column_name(header, c);\n        if !name.is_null() && !desc.is_null() {\n            libc::printf(b\"%.*s\\0\".as_ptr() as *const c_char, (*desc).name_length as i32, name);\n        }\n    }\n    libc::putchar(b'\\n' as i32);\n\n    for r in 0..n_rows {\n        for c in 0..n_cols {\n            if c > 0 { libc::putchar(b'\\t' as i32); }\n            let desc = arrow_column_desc(header, c);\n            let col_data = arrow_column_data(header, c);\n            if !desc.is_null() && !col_data.is_null() {\n                print_arrow_value(desc, col_data as *const u8, r);\n            }\n        }\n        libc::putchar(b'\\n' as i32);\n    }\n    true\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/cache.rs",
    "content": "//! File-based packet caching with xxHash keys.\n//! Replaces cache.c.\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::hash;\nuse crate::shm;\n\n// ── hash_voidstar ──────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn hash_voidstar(\n    data: *const c_void,\n    schema: *const CSchema,\n    seed: u64,\n    errmsg: *mut *mut c_char,\n) -> u64 {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    match hash_voidstar_inner(data as *const u8, &rs, seed) {\n        Ok(h) => h,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            0\n        }\n    }\n}\n\nfn hash_voidstar_inner(\n    data: *const u8,\n    schema: &crate::schema::Schema,\n    seed: u64,\n) -> Result<u64, MorlocError> {\n    use crate::schema::SerialType;\n\n    // SAFETY: data points to voidstar data in SHM with layout described by schema.\n    // All reads (Array headers, element data) are within schema-defined bounds.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let arr = &*(data as *const shm::Array);\n                let elem_width = if schema.parameters.is_empty() {\n                    1 // string bytes\n                } else {\n                    schema.parameters[0].width\n                };\n                let elem_data = shm::rel2abs(arr.data)?;\n\n                if schema.is_fixed_width() || schema.serial_type == SerialType::String {\n                    let total = elem_width * arr.size;\n                    let bytes = std::slice::from_raw_parts(elem_data, total);\n                    Ok(hash::xxh64_with_seed(bytes, seed))\n                } else {\n                    let mut h = seed;\n                    for i in 0..arr.size {\n                        h = hash_voidstar_inner(\n                            elem_data.add(i * elem_width),\n                            &schema.parameters[0],\n                            h,\n                        )?;\n                    }\n                    Ok(h)\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                if schema.is_fixed_width() {\n                    let bytes = std::slice::from_raw_parts(data, schema.width);\n                    Ok(hash::xxh64_with_seed(bytes, seed))\n                } else {\n                    let mut h = seed;\n                    for i in 0..schema.parameters.len() {\n                        h = hash_voidstar_inner(\n                            data.add(schema.offsets[i]),\n                            &schema.parameters[i],\n                            h,\n                        )?;\n                    }\n                    Ok(h)\n                }\n            }\n            _ => {\n                let bytes = std::slice::from_raw_parts(data, schema.width);\n                Ok(hash::xxh64_with_seed(bytes, seed))\n            }\n        }\n    }\n}\n\n// ── hash_morloc_packet ─────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn hash_morloc_packet(\n    packet: *const u8,\n    schema: *const CSchema,\n    seed: u64,\n    hash_out: *mut u64,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    *hash_out = 0;\n\n    extern \"C\" {\n        fn read_morloc_packet_header(\n            msg: *const u8,\n            errmsg: *mut *mut c_char,\n        ) -> *const crate::packet::PacketHeader;\n        fn morloc_packet_size(packet: *const u8, errmsg: *mut *mut c_char) -> usize;\n        fn get_morloc_data_packet_value(\n            data: *const u8,\n            schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let header = read_morloc_packet_header(packet, &mut err);\n    if header.is_null() {\n        if !err.is_null() {\n            *errmsg = err;\n        }\n        return false;\n    }\n\n    let cmd_type = (*header).command_type();\n    if cmd_type == crate::packet::PACKET_TYPE_CALL {\n        let midx = { (*header).command.call.midx };\n        *hash_out = crate::utility::mix(seed, midx as u64);\n        let offset = { (*header).offset } as usize;\n        let length = { (*header).length } as usize;\n        let arg_data = packet.add(32 + offset);\n        let mut arg_start = 0usize;\n        while arg_start < length {\n            let arg_size = morloc_packet_size(arg_data.add(arg_start), &mut err);\n            if !err.is_null() {\n                *errmsg = err;\n                return false;\n            }\n            let arg_bytes = std::slice::from_raw_parts(arg_data.add(arg_start), arg_size);\n            *hash_out = crate::utility::mix(*hash_out, hash::xxh64_with_seed(arg_bytes, *hash_out));\n            arg_start += arg_size;\n        }\n    } else if cmd_type == crate::packet::PACKET_TYPE_DATA {\n        let voidstar = get_morloc_data_packet_value(packet, schema, &mut err);\n        if voidstar.is_null() {\n            if !err.is_null() {\n                *errmsg = err;\n            }\n            return false;\n        }\n        let rs = CSchema::to_rust(schema);\n        match hash_voidstar_inner(voidstar, &rs, seed) {\n            Ok(h) => *hash_out = h,\n            Err(e) => {\n                set_errmsg(errmsg, &e);\n                return false;\n            }\n        }\n    } else {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\"Cannot hash packet with command 0x{:02x}\", cmd_type)),\n        );\n        return false;\n    }\n\n    true\n}\n\n// ── Cache filename generation ──────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_cache_filename_ext(\n    key: u64,\n    cache_path: *const c_char,\n    ext: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n    let path = CStr::from_ptr(cache_path).to_string_lossy();\n    let extension = CStr::from_ptr(ext).to_string_lossy();\n    let filename = format!(\"{}/{:016x}{}\", path, key, extension);\n    match CString::new(filename) {\n        Ok(cs) => cs.into_raw(),\n        Err(_) => {\n            set_errmsg(errmsg, &MorlocError::Other(\"CString conversion failed\".into()));\n            ptr::null_mut()\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_cache_filename(\n    key: u64,\n    cache_path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    let ext = CString::new(\".packet\").unwrap();\n    make_cache_filename_ext(key, cache_path, ext.as_ptr(), errmsg)\n}\n\n// ── Cache operations ───────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn put_cache_packet(\n    voidstar: *const u8,\n    schema: *const CSchema,\n    key: u64,\n    cache_path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn make_mpk_data_packet(filename: *const c_char, schema: *const CSchema) -> *mut u8;\n        fn morloc_packet_size(packet: *const u8, errmsg: *mut *mut c_char) -> usize;\n        fn pack_with_schema(\n            mlc: *const c_void, schema: *const CSchema,\n            mpk: *mut *mut c_char, mpk_size: *mut usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n        fn write_atomic(\n            filename: *const c_char, data: *const u8, size: usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n\n    // Generate filenames\n    let pkt_filename = make_cache_filename(key, cache_path, &mut err);\n    if pkt_filename.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    let dat_ext = CString::new(\".dat\").unwrap();\n    let dat_filename = make_cache_filename_ext(key, cache_path, dat_ext.as_ptr(), &mut err);\n    if dat_filename.is_null() {\n        libc::free(pkt_filename as *mut c_void);\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Create data packet pointing to the .dat file\n    let data_packet = make_mpk_data_packet(dat_filename, schema);\n    if data_packet.is_null() {\n        libc::free(pkt_filename as *mut c_void);\n        libc::free(dat_filename as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to create data packet\".into()));\n        return ptr::null_mut();\n    }\n\n    let pkt_size = morloc_packet_size(data_packet, &mut err);\n\n    // Pack voidstar to msgpack\n    let mut mpk_data: *mut c_char = ptr::null_mut();\n    let mut mpk_size: usize = 0;\n    let rc = pack_with_schema(voidstar as *const c_void, schema, &mut mpk_data, &mut mpk_size, &mut err);\n    if rc != 0 {\n        libc::free(data_packet as *mut c_void);\n        libc::free(pkt_filename as *mut c_void);\n        libc::free(dat_filename as *mut c_void);\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Write packet file\n    write_atomic(pkt_filename, data_packet, pkt_size, &mut err);\n    libc::free(data_packet as *mut c_void);\n    if !err.is_null() {\n        libc::free(mpk_data as *mut c_void);\n        libc::free(pkt_filename as *mut c_void);\n        libc::free(dat_filename as *mut c_void);\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Write data file\n    write_atomic(dat_filename, mpk_data as *const u8, mpk_size, &mut err);\n    libc::free(mpk_data as *mut c_void);\n    libc::free(dat_filename as *mut c_void);\n    if !err.is_null() {\n        libc::free(pkt_filename as *mut c_void);\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Return the packet filename\n    let result = libc::strdup(pkt_filename);\n    libc::free(pkt_filename as *mut c_void);\n    result\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn get_cache_packet(\n    key: u64,\n    cache_path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n\n    let filename = make_cache_filename(key, cache_path, &mut err);\n    if filename.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    extern \"C\" {\n        fn read_binary_file(\n            filename: *const c_char, file_size: *mut usize,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n    }\n\n    let mut file_size: usize = 0;\n    let data = read_binary_file(filename, &mut file_size, &mut err);\n    libc::free(filename as *mut c_void);\n    if data.is_null() {\n        *errmsg = err;\n    }\n    data\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn del_cache_packet(\n    key: u64,\n    cache_path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n\n    let filename = make_cache_filename(key, cache_path, &mut err);\n    if filename.is_null() {\n        *errmsg = err;\n        return false;\n    }\n\n    let rc = libc::unlink(filename);\n    if rc != 0 {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\n                \"Failed to delete cache file '{}'\",\n                CStr::from_ptr(filename).to_string_lossy()\n            )),\n        );\n        libc::free(filename as *mut c_void);\n        return false;\n    }\n    libc::free(filename as *mut c_void);\n    true\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn check_cache_packet(\n    key: u64,\n    cache_path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n\n    let filename = make_cache_filename(key, cache_path, &mut err);\n    if filename.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    let mut sb: libc::stat = std::mem::zeroed();\n    if libc::stat(filename, &mut sb) == 0 {\n        let result = libc::strdup(filename);\n        libc::free(filename as *mut c_void);\n        return result;\n    }\n    libc::free(filename as *mut c_void);\n    ptr::null_mut() // Not an error — cache miss\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/cli.rs",
    "content": "//! CLI argument handling and voidstar utility functions.\n//! Replaces cli.c.\n\nuse std::ffi::{c_char, c_void, CStr};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::packet;\nuse crate::shm;\n\n// ── argument_t lifecycle ───────────────────────────────────────────────────\n\n// argument_t is defined in eval.h (C). We use it opaquely via libc pointers.\n// The struct: { value: *mut c_char, fields: *mut *mut c_char, default_fields: *mut *mut c_char, size: usize }\n#[repr(C)]\npub struct ArgumentT {\n    pub value: *mut c_char,\n    pub fields: *mut *mut c_char,\n    pub default_fields: *mut *mut c_char,\n    pub size: usize,\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn initialize_positional(value: *mut c_char) -> *mut ArgumentT {\n    let arg = libc::calloc(1, std::mem::size_of::<ArgumentT>()) as *mut ArgumentT;\n    if arg.is_null() {\n        return ptr::null_mut();\n    }\n    (*arg).value = if value.is_null() {\n        ptr::null_mut()\n    } else {\n        libc::strdup(value)\n    };\n    (*arg).size = 0;\n    arg\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn initialize_unrolled(\n    size: usize,\n    default_value: *mut c_char,\n    fields: *mut *mut c_char,\n    default_fields: *mut *mut c_char,\n) -> *mut ArgumentT {\n    let arg = libc::calloc(1, std::mem::size_of::<ArgumentT>()) as *mut ArgumentT;\n    if arg.is_null() {\n        return ptr::null_mut();\n    }\n    (*arg).value = if default_value.is_null() {\n        ptr::null_mut()\n    } else {\n        libc::strdup(default_value)\n    };\n    (*arg).size = size;\n\n    (*arg).fields = libc::calloc(size, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n    for i in 0..size {\n        let f = *fields.add(i);\n        if !f.is_null() {\n            *(*arg).fields.add(i) = libc::strdup(f);\n        }\n    }\n\n    (*arg).default_fields =\n        libc::calloc(size, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n    for i in 0..size {\n        let d = *default_fields.add(i);\n        if !d.is_null() {\n            *(*arg).default_fields.add(i) = libc::strdup(d);\n        }\n    }\n\n    arg\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn free_argument_t(arg: *mut ArgumentT) {\n    if arg.is_null() {\n        return;\n    }\n    if !(*arg).value.is_null() {\n        libc::free((*arg).value as *mut c_void);\n    }\n    if !(*arg).fields.is_null() {\n        for i in 0..(*arg).size {\n            let f = *(*arg).fields.add(i);\n            if !f.is_null() {\n                libc::free(f as *mut c_void);\n            }\n        }\n        libc::free((*arg).fields as *mut c_void);\n    }\n    if !(*arg).default_fields.is_null() {\n        for i in 0..(*arg).size {\n            let d = *(*arg).default_fields.add(i);\n            if !d.is_null() {\n                libc::free(d as *mut c_void);\n            }\n        }\n        libc::free((*arg).default_fields as *mut c_void);\n    }\n    libc::free(arg as *mut c_void);\n}\n\n// ── shfree_by_schema ───────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn shfree_by_schema(\n    ptr: *mut c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    if ptr.is_null() || schema.is_null() {\n        return true;\n    }\n    let rs = CSchema::to_rust(schema);\n    match shfree_by_schema_inner(ptr as *mut u8, &rs) {\n        Ok(_) => true,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            false\n        }\n    }\n}\n\nfn shfree_by_schema_inner(\n    ptr: *mut u8,\n    schema: &crate::schema::Schema,\n) -> Result<(), MorlocError> {\n    use crate::schema::SerialType;\n\n    // SAFETY: ptr points to voidstar data in SHM with layout described by schema.\n    // We recursively visit sub-structures and zero metadata before the parent shfree.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let arr = &*(ptr as *const shm::Array);\n                if arr.data > 0 {\n                    if !schema.parameters.is_empty() && !schema.parameters[0].is_fixed_width() {\n                        let arr_data = shm::rel2abs(arr.data)?;\n                        let elem_width = schema.parameters[0].width;\n                        for i in 0..arr.size {\n                            shfree_by_schema_inner(\n                                arr_data.add(i * elem_width),\n                                &schema.parameters[0],\n                            )?;\n                        }\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                for i in 0..schema.parameters.len() {\n                    let child = ptr.add(schema.offsets[i]);\n                    shfree_by_schema_inner(child, &schema.parameters[i])?;\n                }\n            }\n            SerialType::Tensor => {\n                // shape and data are inline, freed by parent shfree\n            }\n            _ => {\n                // fixed-size: no sub-data\n            }\n        }\n        // Zero this node's metadata\n        std::ptr::write_bytes(ptr, 0, schema.width);\n    }\n    Ok(())\n}\n\n// ── adjust_voidstar_relptrs ────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn adjust_voidstar_relptrs(\n    data: *mut c_void,\n    schema: *const CSchema,\n    base_rel: shm::RelPtr,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    match adjust_relptrs_inner(data as *mut u8, &rs, base_rel) {\n        Ok(_) => 0,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            1\n        }\n    }\n}\n\nfn adjust_relptrs_inner(\n    data: *mut u8,\n    schema: &crate::schema::Schema,\n    base_rel: shm::RelPtr,\n) -> Result<(), MorlocError> {\n    use crate::schema::SerialType;\n\n    // SAFETY: data points to a voidstar blob in SHM. We adjust relptrs in-place;\n    // all pointer arithmetic stays within the blob's bounds as defined by schema.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let arr = &mut *(data as *mut shm::Array);\n                arr.data += base_rel;\n                if !schema.parameters.is_empty() && !schema.parameters[0].is_fixed_width() {\n                    let arr_data = shm::rel2abs(arr.data)?;\n                    let elem_width = schema.parameters[0].width;\n                    for i in 0..arr.size {\n                        adjust_relptrs_inner(\n                            arr_data.add(i * elem_width),\n                            &schema.parameters[0],\n                            base_rel,\n                        )?;\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                for i in 0..schema.parameters.len() {\n                    adjust_relptrs_inner(\n                        data.add(schema.offsets[i]),\n                        &schema.parameters[i],\n                        base_rel,\n                    )?;\n                }\n            }\n            SerialType::Optional => {\n                let tag = *data;\n                if tag != 0 && !schema.parameters.is_empty() {\n                    let inner_offset = schema.offsets.first().copied().unwrap_or(\n                        shm::align_up(1, schema.parameters[0].alignment().max(1)),\n                    );\n                    adjust_relptrs_inner(\n                        data.add(inner_offset),\n                        &schema.parameters[0],\n                        base_rel,\n                    )?;\n                }\n            }\n            SerialType::Tensor => {\n                let tensor = &mut *(data as *mut shm::Tensor);\n                if tensor.total_elements > 0 {\n                    tensor.shape += base_rel;\n                    tensor.data += base_rel;\n                }\n            }\n            _ => {}\n        }\n    }\n    Ok(())\n}\n\n// ── read_voidstar_binary ───────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_voidstar_binary(\n    blob: *const u8,\n    blob_size: usize,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n\n    let base = match shm::shmalloc(blob_size) {\n        Ok(p) => p,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            return ptr::null_mut();\n        }\n    };\n    std::ptr::copy_nonoverlapping(blob, base, blob_size);\n\n    let base_rel = match shm::abs2rel(base) {\n        Ok(r) => r,\n        Err(e) => {\n            let _ = shm::shfree(base);\n            set_errmsg(errmsg, &e);\n            return ptr::null_mut();\n        }\n    };\n\n    if let Err(e) = adjust_relptrs_inner(base, &rs, base_rel) {\n        let _ = shm::shfree(base);\n        set_errmsg(errmsg, &e);\n        return ptr::null_mut();\n    }\n\n    base as *mut c_void\n}\n\n// ── load_morloc_data_file ──────────────────────────────────────────────────\n// This function is complex and calls many C functions (read_json_with_schema,\n// unpack_with_schema). Keep delegating to C for now via extern declarations.\n\n#[no_mangle]\npub unsafe extern \"C\" fn load_morloc_data_file(\n    path: *const c_char,\n    data: *mut u8,\n    data_size: usize,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn read_json_with_schema(\n            dest: *mut u8, json: *mut c_char, schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn unpack_with_schema(\n            mpk: *const c_char, mpk_size: usize, schema: *const CSchema,\n            mlcptr: *mut *mut c_void, errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    if data_size == 0 {\n        set_errmsg(errmsg, &MorlocError::Other(\"Cannot parse 0-length data\".into()));\n        return ptr::null_mut();\n    }\n\n    let path_str = CStr::from_ptr(path).to_string_lossy();\n    let mut err: *mut c_char = ptr::null_mut();\n\n    // 1. Extension-based dispatch\n    if path_str.ends_with(\".json\") {\n        let json_buf = libc::realloc(data as *mut c_void, data_size + 1) as *mut u8;\n        if json_buf.is_null() {\n            libc::free(data as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"realloc failed\".into()));\n            return ptr::null_mut();\n        }\n        *json_buf.add(data_size) = 0;\n        let result = read_json_with_schema(ptr::null_mut(), json_buf as *mut c_char, schema, &mut err);\n        if !err.is_null() {\n            libc::free(json_buf as *mut c_void);\n            *errmsg = err;\n            return ptr::null_mut();\n        }\n        libc::free(json_buf as *mut c_void);\n        return result as *mut c_void;\n    }\n\n    if path_str.ends_with(\".mpk\") || path_str.ends_with(\".msgpack\") {\n        let mut result: *mut c_void = ptr::null_mut();\n        unpack_with_schema(data as *const c_char, data_size, schema, &mut result, &mut err);\n        libc::free(data as *mut c_void);\n        if !err.is_null() {\n            *errmsg = err;\n            return ptr::null_mut();\n        }\n        return result;\n    }\n\n    // 2. Check for morloc packet header\n    if data_size >= 32 {\n        let magic = *(data as *const u32);\n        if magic == packet::PACKET_MAGIC {\n            let header_bytes: &[u8; 32] = &*(data as *const [u8; 32]);\n            if let Ok(header) = packet::PacketHeader::from_bytes(header_bytes) {\n                if !header.is_data() {\n                    libc::free(data as *mut c_void);\n                    set_errmsg(errmsg, &MorlocError::Other(format!(\"Expected data packet in '{}'\", path_str)));\n                    return ptr::null_mut();\n                }\n                let offset = { header.offset } as usize;\n                let length = { header.length } as usize;\n                let payload = data.add(32 + offset);\n                let format = { header.command.data.format };\n\n                if format == packet::PACKET_FORMAT_VOIDSTAR {\n                    let result = read_voidstar_binary(payload, length, schema, &mut err);\n                    libc::free(data as *mut c_void);\n                    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n                    return result;\n                } else if format == packet::PACKET_FORMAT_MSGPACK {\n                    let mut result: *mut c_void = ptr::null_mut();\n                    unpack_with_schema(payload as *const c_char, length, schema, &mut result, &mut err);\n                    libc::free(data as *mut c_void);\n                    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n                    return result;\n                } else {\n                    libc::free(data as *mut c_void);\n                    set_errmsg(errmsg, &MorlocError::Other(format!(\"Unsupported format 0x{:02x} in '{}'\", format, path_str)));\n                    return ptr::null_mut();\n                }\n            }\n        }\n    }\n\n    // 3. Try JSON\n    let first_byte = *data;\n    let may_be_json = matches!(first_byte,\n        b'\\'' | b'\"' | b'[' | b'{' | b't' | b'f' | b'n' |\n        b'\\t' | b'\\n' | b'\\r' | b' ' |\n        b'0'..=b'9' | b'-'\n    );\n\n    if (data_size > 1 && may_be_json) || (data_size == 1 && first_byte >= b'0' && first_byte <= b'9') {\n        let json_buf = libc::realloc(data as *mut c_void, data_size + 1) as *mut u8;\n        if !json_buf.is_null() {\n            *json_buf.add(data_size) = 0;\n            let result = read_json_with_schema(ptr::null_mut(), json_buf as *mut c_char, schema, &mut err);\n            if err.is_null() && !result.is_null() {\n                libc::free(json_buf as *mut c_void);\n                return result as *mut c_void;\n            }\n            if !err.is_null() { libc::free(err as *mut c_void); err = ptr::null_mut(); }\n            // Fall through to try msgpack\n            // Note: data pointer may have been invalidated by realloc\n            // Use json_buf as the data pointer going forward\n            let mut result: *mut c_void = ptr::null_mut();\n            unpack_with_schema(json_buf as *const c_char, data_size, schema, &mut result, &mut err);\n            libc::free(json_buf as *mut c_void);\n            if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n            return result;\n        }\n    }\n\n    // 4. Try msgpack\n    let mut result: *mut c_void = ptr::null_mut();\n    unpack_with_schema(data as *const c_char, data_size, schema, &mut result, &mut err);\n    libc::free(data as *mut c_void);\n    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n    result\n}\n\n// ── upload_packet (static helper) ────────────────────────────────────────────\n\n/// Copy a voidstar packet into SHM, adjusting relptrs.\n///\n/// # Safety\n/// `dest` must point to schema.width writable bytes in SHM.\n/// `data` must point to a valid voidstar blob within [data, data_end].\nunsafe fn upload_packet(\n    dest: *mut u8,\n    data: *const u8,\n    data_end: usize,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n\n    match upload_packet_inner(dest, data, data_end, schema, &rs) {\n        Ok(_) => 0,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            1\n        }\n    }\n}\n\nunsafe fn upload_packet_inner(\n    dest: *mut u8,\n    data: *const u8,\n    data_end: usize,\n    schema: *const CSchema,\n    rs: &crate::schema::Schema,\n) -> Result<(), MorlocError> {\n    use crate::schema::SerialType;\n\n    match rs.serial_type {\n        SerialType::String | SerialType::Array => {\n            if (data as usize + rs.width - 1) <= data_end {\n                return Err(MorlocError::Packet(\"Data is too small to store an array header\".into()));\n            }\n            ptr::copy_nonoverlapping(data, dest, rs.width);\n            let arr = &mut *(dest as *mut shm::Array);\n            let arr_data_offset = arr.data as usize;\n            let arr_data = data.add(arr_data_offset);\n            let elem_width = rs.parameters[0].width;\n            let arr_size = arr.size * elem_width;\n\n            if (arr_data as usize + arr_size - 1) > data_end {\n                return Err(MorlocError::Packet(\"Data is too small to contain array values\".into()));\n            }\n\n            let data_ptr = shm::shmemcpy(arr_data, arr_size)?;\n\n            if !rs.is_fixed_width() {\n                let elem_schema = &rs.parameters[0];\n                // Need the C schema for each element\n                let elem_c_schema = (*schema).parameters;\n                if !elem_c_schema.is_null() {\n                    let elem_cs = *elem_c_schema;\n                    for i in 0..arr.size {\n                        upload_packet_inner(\n                            data_ptr.add(i * elem_width),\n                            arr_data.add(i * elem_width),\n                            data_end,\n                            elem_cs,\n                            elem_schema,\n                        )?;\n                    }\n                }\n            }\n\n            arr.data = shm::abs2rel(data_ptr)?;\n        }\n        SerialType::Tuple | SerialType::Map => {\n            for i in 0..rs.parameters.len() {\n                let elem_cs = if (*schema).parameters.is_null() {\n                    return Err(MorlocError::Packet(\"NULL parameters in schema\".into()));\n                } else {\n                    *(*schema).parameters.add(i)\n                };\n                upload_packet_inner(\n                    dest.add(rs.offsets[i]),\n                    data.add(rs.offsets[i]),\n                    data_end,\n                    elem_cs,\n                    &rs.parameters[i],\n                )?;\n            }\n        }\n        _ => {\n            if (data as usize + rs.width - 1) > data_end {\n                return Err(MorlocError::Packet(\"Given data packet is too small\".into()));\n            }\n            ptr::copy_nonoverlapping(data, dest, rs.width);\n        }\n    }\n    Ok(())\n}\n\n// ── parse_cli_data_argument_singular ─────────────────────────────────────────\n\nunsafe fn parse_cli_data_argument_singular(\n    mut dest: *mut u8,\n    arg: *mut c_char,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn read_json_with_schema(\n            dest: *mut u8, json: *mut c_char, schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn file_exists(filename: *const c_char) -> bool;\n        fn read_binary_fd(file: *mut libc::FILE, file_size: *mut usize, errmsg: *mut *mut c_char) -> *mut u8;\n    }\n\n    let rs = CSchema::to_rust(schema);\n    let mut err: *mut c_char = ptr::null_mut();\n    let mut fd: *mut libc::FILE = ptr::null_mut();\n\n    // handle STDIN\n    let stdin_path = b\"/dev/stdin\\0\";\n    let dash_path = b\"-\\0\";\n    if libc::strcmp(arg, stdin_path.as_ptr() as *const c_char) == 0\n        || libc::strcmp(arg, dash_path.as_ptr() as *const c_char) == 0\n    {\n        fd = libc::fdopen(libc::STDIN_FILENO, b\"rb\\0\".as_ptr() as *const c_char);\n    } else if file_exists(arg) {\n        fd = libc::fopen(arg, b\"rb\\0\".as_ptr() as *const c_char);\n        if fd.is_null() {\n            set_errmsg(errmsg, &MorlocError::Other(\n                format!(\"The argument '{}' is a filename, but it can't be read\",\n                    CStr::from_ptr(arg).to_string_lossy())\n            ));\n            return ptr::null_mut();\n        }\n    }\n\n    if fd.is_null() {\n        // Literal JSON data\n        if dest.is_null() {\n            match shm::shcalloc(1, rs.width) {\n                Ok(p) => dest = p,\n                Err(e) => { set_errmsg(errmsg, &e); return ptr::null_mut(); }\n            }\n        }\n        dest = read_json_with_schema(dest, arg, schema, &mut err);\n        if !err.is_null() {\n            *errmsg = err;\n            return ptr::null_mut();\n        }\n        return dest;\n    }\n\n    // File or stdin\n    let mut data_size: usize = 0;\n    let data = read_binary_fd(fd, &mut data_size, &mut err);\n    // Don't close stdin\n    if fd != libc::fdopen(libc::STDIN_FILENO, b\"rb\\0\".as_ptr() as *const c_char) {\n        libc::fclose(fd);\n    }\n    if !err.is_null() {\n        if !data.is_null() { libc::free(data as *mut c_void); }\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Special case: RPTR packets\n    if data_size >= 32 {\n        let magic = *(data as *const u32);\n        if magic == packet::PACKET_MAGIC {\n            let header = &*(data as *const packet::PacketHeader);\n            let source = header.command.data.source;\n            let format = header.command.data.format;\n            if source == packet::PACKET_SOURCE_RPTR && format == packet::PACKET_FORMAT_VOIDSTAR {\n                if dest.is_null() {\n                    match shm::shcalloc(1, rs.width) {\n                        Ok(p) => dest = p,\n                        Err(e) => {\n                            libc::free(data as *mut c_void);\n                            set_errmsg(errmsg, &e);\n                            return ptr::null_mut();\n                        }\n                    }\n                }\n                let voidstar_ptr = data.add(32 + header.offset as usize);\n                if upload_packet(dest, voidstar_ptr, voidstar_ptr as usize + data_size - 1, schema, &mut err) != 0 {\n                    libc::free(data as *mut c_void);\n                    *errmsg = err;\n                    return ptr::null_mut();\n                }\n                libc::free(data as *mut c_void);\n                return dest;\n            }\n        }\n    }\n\n    // All other formats: canonical file loader (takes ownership of data)\n    dest = load_morloc_data_file(arg, data, data_size, schema, &mut err) as *mut u8;\n    if !err.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n    dest\n}\n\n// ── parse_cli_data_argument_unrolled ─────────────────────────────────────────\n\nunsafe fn parse_cli_data_argument_unrolled(\n    mut dest: *mut u8,\n    default_value: *mut c_char,\n    fields: *mut *mut c_char,\n    default_fields: *mut *mut c_char,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    let mut err: *mut c_char = ptr::null_mut();\n    let mut using_record_default = false;\n\n    if dest.is_null() {\n        match shm::shcalloc(1, rs.width) {\n            Ok(p) => dest = p,\n            Err(e) => { set_errmsg(errmsg, &e); return ptr::null_mut(); }\n        }\n    }\n\n    if !default_value.is_null() {\n        dest = parse_cli_data_argument_singular(dest, default_value, schema, &mut err);\n        if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n        using_record_default = true;\n    }\n\n    use crate::schema::SerialType;\n    match rs.serial_type {\n        SerialType::Tuple | SerialType::Map => {\n            for i in 0..rs.parameters.len() {\n                let element_dest = dest.add(rs.offsets[i]);\n                let field_val = *fields.add(i);\n                let elem_cs = *(*schema).parameters.add(i);\n\n                if !field_val.is_null() {\n                    // Free memory from default for this field\n                    shfree_by_schema(element_dest as *mut c_void, elem_cs, &mut err);\n                    if !err.is_null() { libc::free(err as *mut c_void); err = ptr::null_mut(); }\n\n                    let result = parse_cli_data_argument_singular(\n                        element_dest, field_val, elem_cs, &mut err,\n                    );\n                    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n                    let _ = result; // result writes into element_dest\n                } else if using_record_default {\n                    continue;\n                } else {\n                    let default_field = *default_fields.add(i);\n                    if !default_field.is_null() {\n                        let result = parse_cli_data_argument_singular(\n                            element_dest, default_field, elem_cs, &mut err,\n                        );\n                        if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n                        let _ = result;\n                    } else {\n                        set_errmsg(errmsg, &MorlocError::Other(\n                            format!(\"Field {} missing with no default or default record\", i)\n                        ));\n                        return ptr::null_mut();\n                    }\n                }\n            }\n        }\n        _ => {\n            set_errmsg(errmsg, &MorlocError::Other(\"Only record and tuple types may be unrolled\".into()));\n            return ptr::null_mut();\n        }\n    }\n    dest\n}\n\n// ── parse_cli_data_argument ──────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn parse_cli_data_argument(\n    dest: *mut u8,\n    arg: *const ArgumentT,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n\n    let result = if (*arg).fields.is_null() {\n        parse_cli_data_argument_singular(dest, (*arg).value, schema, &mut err)\n    } else {\n        parse_cli_data_argument_unrolled(\n            dest, (*arg).value, (*arg).fields, (*arg).default_fields, schema, &mut err,\n        )\n    };\n\n    if !err.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n    if result.is_null() {\n        return ptr::null_mut();\n    }\n\n    let relptr = match shm::abs2rel(result) {\n        Ok(r) => r,\n        Err(e) => { set_errmsg(errmsg, &e); return ptr::null_mut(); }\n    };\n\n    // Call the Rust make_standard_data_packet FFI\n    crate::packet_ffi::make_standard_data_packet(relptr, schema)\n}\n\n// ── make_call_packet_from_cli ────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_call_packet_from_cli(\n    dest: *mut u8,\n    mid: u32,\n    args: *mut *mut ArgumentT,   // NULL-terminated\n    arg_schema_strs: *mut *mut c_char, // NULL-terminated\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n\n    // Count and parse schemas\n    let mut nschemas: usize = 0;\n    while !(*arg_schema_strs.add(nschemas)).is_null() {\n        nschemas += 1;\n    }\n\n    let mut schemas: Vec<*mut CSchema> = Vec::with_capacity(nschemas);\n    for i in 0..nschemas {\n        let schema = crate::ffi::parse_schema(*arg_schema_strs.add(i), &mut err);\n        if !err.is_null() {\n            for s in &schemas { CSchema::free(*s); }\n            *errmsg = err;\n            return ptr::null_mut();\n        }\n        schemas.push(schema);\n    }\n\n    // Count args\n    let mut nargs: usize = 0;\n    while !(*args.add(nargs)).is_null() {\n        nargs += 1;\n    }\n\n    // Parse each argument into a data packet\n    let mut packet_args: Vec<*const u8> = Vec::with_capacity(nargs);\n    for i in 0..nargs {\n        let packet = parse_cli_data_argument(dest, *args.add(i), schemas[i], &mut err);\n        if !err.is_null() {\n            for p in &packet_args { libc::free(*p as *mut c_void); }\n            for s in &schemas { CSchema::free(*s); }\n            *errmsg = err;\n            return ptr::null_mut();\n        }\n        packet_args.push(packet as *const u8);\n    }\n\n    // Build call packet\n    let call_packet = crate::packet_ffi::make_morloc_local_call_packet(\n        mid, packet_args.as_ptr(), nargs, &mut err,\n    );\n\n    for p in &packet_args { libc::free(*p as *mut c_void); }\n    for s in &schemas { CSchema::free(*s); }\n\n    if !err.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n    call_packet\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/cschema.rs",
    "content": "//! C-compatible Schema type for FFI.\n//! This module is always compiled (even with no-ffi-exports feature).\n\nuse std::ffi::{c_char, CStr, CString};\nuse std::ptr;\n\nuse crate::schema::{Schema, SerialType};\n\n/// C-compatible Schema struct matching the C `Schema` layout.\n#[repr(C)]\npub struct CSchema {\n    pub serial_type: u32,\n    pub size: usize,\n    pub width: usize,\n    pub offsets: *mut usize,\n    pub hint: *mut c_char,\n    pub parameters: *mut *mut CSchema,\n    pub keys: *mut *mut c_char,\n}\n\nimpl CSchema {\n    pub fn from_rust(schema: &Schema) -> *mut CSchema {\n        let cs = Box::new(CSchema {\n            serial_type: schema.serial_type as u32,\n            size: schema.size,\n            width: schema.width,\n            offsets: if schema.offsets.is_empty() {\n                ptr::null_mut()\n            } else {\n                let mut v = schema.offsets.clone().into_boxed_slice();\n                let p = v.as_mut_ptr();\n                std::mem::forget(v);\n                p\n            },\n            hint: match &schema.hint {\n                Some(s) => CString::new(s.as_str()).unwrap_or_default().into_raw(),\n                None => ptr::null_mut(),\n            },\n            parameters: if schema.parameters.is_empty() {\n                ptr::null_mut()\n            } else {\n                let mut ptrs: Vec<*mut CSchema> = schema\n                    .parameters\n                    .iter()\n                    .map(|p| CSchema::from_rust(p))\n                    .collect();\n                let p = ptrs.as_mut_ptr();\n                std::mem::forget(ptrs);\n                p\n            },\n            keys: if schema.keys.is_empty() {\n                ptr::null_mut()\n            } else {\n                let mut ptrs: Vec<*mut c_char> = schema\n                    .keys\n                    .iter()\n                    .map(|k| CString::new(k.as_str()).unwrap_or_default().into_raw())\n                    .collect();\n                let p = ptrs.as_mut_ptr();\n                std::mem::forget(ptrs);\n                p\n            },\n        });\n        Box::into_raw(cs)\n    }\n\n    /// Convert a C-allocated CSchema to a Rust Schema by deep-copying all data.\n    ///\n    /// # Safety\n    /// `cs` must be null or a valid pointer to a CSchema allocated by `from_rust`\n    /// or equivalent C code. All child pointers must be valid for `cs.size` entries.\n    pub unsafe fn to_rust(cs: *const CSchema) -> Schema {\n        if cs.is_null() {\n            return Schema::primitive(SerialType::Nil);\n        }\n        let cs = &*cs;\n        // SAFETY: SerialType is #[repr(u32)] and cs.serial_type was set from a valid SerialType.\n        let serial_type = std::mem::transmute::<u32, SerialType>(cs.serial_type);\n\n        let offsets = if cs.offsets.is_null() || cs.size == 0 {\n            Vec::new()\n        } else {\n            let n = match serial_type {\n                SerialType::Tuple | SerialType::Map => cs.size,\n                SerialType::Optional | SerialType::Tensor => 1,\n                _ => 0,\n            };\n            if n > 0 {\n                std::slice::from_raw_parts(cs.offsets, n).to_vec()\n            } else {\n                Vec::new()\n            }\n        };\n\n        let parameters = if cs.parameters.is_null() || cs.size == 0 {\n            Vec::new()\n        } else {\n            (0..cs.size)\n                .map(|i| CSchema::to_rust(*cs.parameters.add(i)))\n                .collect()\n        };\n\n        let keys = if cs.keys.is_null() || cs.size == 0 {\n            Vec::new()\n        } else {\n            (0..cs.size)\n                .filter_map(|i| {\n                    let p = *cs.keys.add(i);\n                    if p.is_null() { None }\n                    else { Some(CStr::from_ptr(p).to_string_lossy().into_owned()) }\n                })\n                .collect()\n        };\n\n        let hint = if cs.hint.is_null() {\n            None\n        } else {\n            Some(CStr::from_ptr(cs.hint).to_string_lossy().into_owned())\n        };\n\n        Schema {\n            serial_type,\n            size: cs.size,\n            width: cs.width,\n            offsets,\n            hint,\n            parameters,\n            keys,\n        }\n    }\n\n    /// Free a CSchema and all its children (same logic as ffi::free_schema).\n    ///\n    /// # Safety\n    /// `schema` must be null or a valid pointer previously returned by `from_rust`.\n    pub unsafe fn free(schema: *mut CSchema) {\n        if schema.is_null() { return; }\n        let cs = Box::from_raw(schema);\n        // SAFETY: cs.serial_type was set from a valid SerialType in from_rust.\n        let st = std::mem::transmute::<u32, SerialType>(cs.serial_type);\n        if !cs.offsets.is_null() {\n            let n = match st {\n                SerialType::Tuple | SerialType::Map => cs.size,\n                SerialType::Optional | SerialType::Tensor => 1,\n                _ => 0,\n            };\n            if n > 0 { let _ = Vec::from_raw_parts(cs.offsets, n, n); }\n        }\n        if !cs.hint.is_null() { let _ = CString::from_raw(cs.hint); }\n        if !cs.parameters.is_null() && cs.size > 0 {\n            let ptrs = Vec::from_raw_parts(cs.parameters, cs.size, cs.size);\n            for p in ptrs { CSchema::free(p); }\n        }\n        if !cs.keys.is_null() && cs.size > 0 {\n            let ptrs = Vec::from_raw_parts(cs.keys, cs.size, cs.size);\n            for p in ptrs { if !p.is_null() { let _ = CString::from_raw(p); } }\n        }\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/daemon_ffi.rs",
    "content": "//! C ABI wrappers for daemon subsystems.\n//! Replaces daemon.c. Uses serde_json, HashMap, VecDeque, and std::thread.\n\nuse std::collections::HashMap;\nuse std::collections::VecDeque;\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\nuse std::sync::atomic::{AtomicBool, AtomicI32, Ordering};\nuse std::sync::{Arc, Condvar, Mutex};\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::hash;\nuse crate::http_ffi::{DaemonMethod, DaemonRequest, HttpRequest};\n\n// -- Constants ----------------------------------------------------------------\n\nconst DEFAULT_XXHASH_SEED: u64 = 0;\nconst MAX_LP_MESSAGE: u32 = 64 * 1024 * 1024;\n\n// -- Global state -------------------------------------------------------------\n\nstatic SHUTDOWN_REQUESTED: AtomicBool = AtomicBool::new(false);\nstatic G_EVAL_TIMEOUT: AtomicI32 = AtomicI32::new(30);\n\n// SAFETY: These globals are set once during daemon_run initialization (single-threaded)\n// and only read afterwards. The daemon is single-threaded for request dispatch.\nstatic mut G_POOL_ALIVE_FN: Option<unsafe extern \"C\" fn(usize) -> bool> = None;\nstatic mut G_N_POOLS: usize = 0;\nstatic mut G_BINDING_STORE: *mut BindingStore = ptr::null_mut();\n\n// -- C-compatible types -------------------------------------------------------\n\n/// Matches morloc_socket_t from call.h\n#[repr(C)]\npub struct MorlocSocket {\n    pub lang: *mut c_char,\n    pub syscmd: *mut *mut c_char,\n    pub socket_filename: *mut c_char,\n    pub pid: i32,\n}\n\n/// Matches daemon_config_t from daemon.h\n#[repr(C)]\npub struct DaemonConfig {\n    pub unix_socket_path: *const c_char,\n    pub tcp_port: i32,\n    pub http_port: i32,\n    pub pool_check_fn: Option<unsafe extern \"C\" fn(*mut MorlocSocket, usize)>,\n    pub pool_alive_fn: Option<unsafe extern \"C\" fn(usize) -> bool>,\n    pub n_pools: usize,\n    pub eval_timeout: i32,\n}\n\n/// Matches daemon_response_t from daemon.h\n#[repr(C)]\npub struct DaemonResponse {\n    pub id: *mut c_char,\n    pub success: bool,\n    pub result_json: *mut c_char,\n    pub error: *mut c_char,\n}\n\n// -- Binding store (replaces linear-probe hash table with HashMap) ------------\n\nstruct BindingEntry {\n    hash: u64,\n    expr: String,\n    #[allow(dead_code)]\n    artifact_dir: String,\n    type_sig: Option<String>,\n    names: Vec<String>,\n}\n\nstruct BindingStore {\n    entries: HashMap<u64, BindingEntry>,\n    /// Index from name -> hash for name-based lookup\n    name_index: HashMap<String, u64>,\n    base_dir: String,\n}\n\nimpl BindingStore {\n    fn new(base_dir: &str) -> Self {\n        let _ = std::fs::create_dir_all(base_dir);\n        BindingStore {\n            entries: HashMap::new(),\n            name_index: HashMap::new(),\n            base_dir: base_dir.to_string(),\n        }\n    }\n\n    fn lookup_hash(&self, hash: u64) -> Option<&BindingEntry> {\n        self.entries.get(&hash)\n    }\n\n    fn lookup_name(&self, name: &str) -> Option<&BindingEntry> {\n        let hash = self.name_index.get(name)?;\n        self.entries.get(hash)\n    }\n\n    fn add_name(&mut self, hash: u64, name: &str) {\n        if let Some(entry) = self.entries.get_mut(&hash) {\n            if !entry.names.contains(&name.to_string()) {\n                entry.names.push(name.to_string());\n            }\n        }\n        self.name_index.insert(name.to_string(), hash);\n    }\n\n    fn bind(&mut self, expr: &str, name: Option<&str>, eval_timeout: i32) -> Option<u64> {\n        let hv = hash::xxh64_with_seed(expr.as_bytes(), DEFAULT_XXHASH_SEED);\n\n        if self.entries.contains_key(&hv) {\n            if let Some(n) = name {\n                self.add_name(hv, n);\n            }\n            return Some(hv);\n        }\n\n        let hash_hex = format!(\"{:016x}\", hv);\n        let artifact_dir = format!(\"{}/{}\", self.base_dir, hash_hex);\n\n        // Fork morloc eval --save\n        unsafe {\n            let mut stdout_pipe = [0i32; 2];\n            let mut stderr_pipe = [0i32; 2];\n            if libc::pipe(stdout_pipe.as_mut_ptr()) != 0\n                || libc::pipe(stderr_pipe.as_mut_ptr()) != 0\n            {\n                return None;\n            }\n\n            let pid = libc::fork();\n            if pid < 0 {\n                libc::close(stdout_pipe[0]);\n                libc::close(stdout_pipe[1]);\n                libc::close(stderr_pipe[0]);\n                libc::close(stderr_pipe[1]);\n                return None;\n            }\n\n            if pid == 0 {\n                // Child\n                libc::close(stdout_pipe[0]);\n                libc::close(stderr_pipe[0]);\n                libc::dup2(stdout_pipe[1], libc::STDOUT_FILENO);\n                libc::dup2(stderr_pipe[1], libc::STDERR_FILENO);\n                libc::close(stdout_pipe[1]);\n                libc::close(stderr_pipe[1]);\n\n                if eval_timeout > 0 {\n                    let cpu_limit = libc::rlimit {\n                        rlim_cur: eval_timeout as libc::rlim_t,\n                        rlim_max: (eval_timeout + 5) as libc::rlim_t,\n                    };\n                    libc::setrlimit(libc::RLIMIT_CPU, &cpu_limit);\n                    let as_limit = libc::rlimit {\n                        rlim_cur: 2 * 1024 * 1024 * 1024,\n                        rlim_max: 2 * 1024 * 1024 * 1024,\n                    };\n                    libc::setrlimit(libc::RLIMIT_AS, &as_limit);\n                }\n\n                let cmd = CString::new(\"morloc\").unwrap();\n                let arg_eval = CString::new(\"eval\").unwrap();\n                let arg_save = CString::new(\"--save\").unwrap();\n                let arg_hex = CString::new(hash_hex.as_str()).unwrap();\n                let arg_expr = CString::new(expr).unwrap();\n                libc::execlp(\n                    cmd.as_ptr(),\n                    cmd.as_ptr(),\n                    arg_eval.as_ptr(),\n                    arg_save.as_ptr(),\n                    arg_hex.as_ptr(),\n                    arg_expr.as_ptr(),\n                    ptr::null::<c_char>(),\n                );\n                libc::_exit(127);\n            }\n\n            // Parent\n            libc::close(stdout_pipe[1]);\n            libc::close(stderr_pipe[1]);\n\n            let mut stderr_buf = vec![0u8; 4096];\n            let mut stderr_len: usize = 0;\n            loop {\n                let n = libc::read(\n                    stderr_pipe[0],\n                    stderr_buf.as_mut_ptr().add(stderr_len) as *mut c_void,\n                    stderr_buf.len() - stderr_len - 1,\n                );\n                if n <= 0 {\n                    break;\n                }\n                stderr_len += n as usize;\n            }\n            libc::close(stdout_pipe[0]);\n            libc::close(stderr_pipe[0]);\n\n            let mut status: i32 = 0;\n            libc::waitpid(pid, &mut status, 0);\n\n            if !libc::WIFEXITED(status) || libc::WEXITSTATUS(status) != 0 {\n                stderr_buf.truncate(stderr_len);\n                let msg = String::from_utf8_lossy(&stderr_buf);\n                eprintln!(\"binding_store_bind: morloc eval --save failed: {}\", msg);\n                return None;\n            }\n        }\n\n        let entry = BindingEntry {\n            hash: hv,\n            expr: expr.to_string(),\n            artifact_dir,\n            type_sig: None,\n            names: Vec::new(),\n        };\n        self.entries.insert(hv, entry);\n        if let Some(n) = name {\n            self.add_name(hv, n);\n        }\n\n        Some(hv)\n    }\n\n    fn list_json(&self) -> String {\n        #[derive(serde::Serialize)]\n        struct BindingInfo {\n            hash: String,\n            expr: String,\n            #[serde(skip_serializing_if = \"Option::is_none\")]\n            r#type: Option<String>,\n            names: Vec<String>,\n        }\n        #[derive(serde::Serialize)]\n        struct BindingsList {\n            bindings: Vec<BindingInfo>,\n        }\n        let bindings: Vec<BindingInfo> = self\n            .entries\n            .values()\n            .map(|e| BindingInfo {\n                hash: format!(\"{:016x}\", e.hash),\n                expr: e.expr.clone(),\n                r#type: e.type_sig.clone(),\n                names: e.names.clone(),\n            })\n            .collect();\n        serde_json::to_string(&BindingsList { bindings }).unwrap_or_default()\n    }\n\n    fn unbind(&mut self, name: &str) -> bool {\n        let hash = match self.name_index.remove(name) {\n            Some(h) => h,\n            None => return false,\n        };\n        if let Some(entry) = self.entries.get_mut(&hash) {\n            entry.names.retain(|n| n != name);\n        }\n        true\n    }\n}\n\n// -- C-exported binding store functions ---------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn binding_store_init(base_dir: *const c_char) -> *mut c_void {\n    let dir = CStr::from_ptr(base_dir).to_string_lossy().into_owned();\n    let store = Box::new(BindingStore::new(&dir));\n    Box::into_raw(store) as *mut c_void\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn binding_store_free(store: *mut c_void) {\n    if !store.is_null() {\n        drop(Box::from_raw(store as *mut BindingStore));\n    }\n}\n\n// -- Request parsing (serde_json) ---------------------------------------------\n\n#[derive(serde::Deserialize)]\nstruct JsonRequest {\n    id: Option<String>,\n    method: Option<String>,\n    command: Option<String>,\n    args: Option<serde_json::Value>,\n    expr: Option<String>,\n    name: Option<String>,\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_parse_request(\n    json: *const c_char,\n    len: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut DaemonRequest {\n    clear_errmsg(errmsg);\n\n    let slice = std::slice::from_raw_parts(json as *const u8, len);\n    let text = match std::str::from_utf8(slice) {\n        Ok(s) => s,\n        Err(_) => {\n            set_errmsg(errmsg, &MorlocError::Other(\"Invalid UTF-8 in request\".into()));\n            return ptr::null_mut();\n        }\n    };\n\n    let parsed: JsonRequest = match serde_json::from_str(text) {\n        Ok(r) => r,\n        Err(e) => {\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(format!(\"Failed to parse request JSON: {}\", e)),\n            );\n            return ptr::null_mut();\n        }\n    };\n\n    let req = libc::calloc(1, std::mem::size_of::<DaemonRequest>()) as *mut DaemonRequest;\n    if req.is_null() {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to allocate daemon_request_t\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    if let Some(id) = &parsed.id {\n        let c = CString::new(id.as_str()).unwrap_or_default();\n        (*req).id = libc::strdup(c.as_ptr());\n    }\n\n    if let Some(method) = &parsed.method {\n        (*req).method = match method.as_str() {\n            \"call\" => DaemonMethod::Call,\n            \"discover\" => DaemonMethod::Discover,\n            \"health\" => DaemonMethod::Health,\n            \"eval\" => DaemonMethod::Eval,\n            \"typecheck\" => DaemonMethod::Typecheck,\n            \"bind\" => DaemonMethod::Bind,\n            \"bindings\" => DaemonMethod::Bindings,\n            \"unbind\" => DaemonMethod::Unbind,\n            _ => {\n                daemon_free_request(req);\n                set_errmsg(\n                    errmsg,\n                    &MorlocError::Other(format!(\"Unknown method: {}\", method)),\n                );\n                return ptr::null_mut();\n            }\n        };\n    }\n\n    if let Some(cmd) = &parsed.command {\n        let c = CString::new(cmd.as_str()).unwrap_or_default();\n        (*req).command = libc::strdup(c.as_ptr());\n    }\n\n    if let Some(args) = &parsed.args {\n        let args_str = serde_json::to_string(args).unwrap_or_default();\n        let c = CString::new(args_str).unwrap_or_default();\n        (*req).args_json = libc::strdup(c.as_ptr());\n    }\n\n    if let Some(expr) = &parsed.expr {\n        let c = CString::new(expr.as_str()).unwrap_or_default();\n        (*req).expr = libc::strdup(c.as_ptr());\n    }\n\n    if let Some(name) = &parsed.name {\n        let c = CString::new(name.as_str()).unwrap_or_default();\n        (*req).name = libc::strdup(c.as_ptr());\n    }\n\n    req\n}\n\n// -- Response parsing (serde_json) --------------------------------------------\n\n#[derive(serde::Deserialize)]\nstruct JsonResponse {\n    id: Option<String>,\n    status: Option<String>,\n    result: Option<serde_json::Value>,\n    error: Option<String>,\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_parse_response(\n    json: *const c_char,\n    len: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut DaemonResponse {\n    clear_errmsg(errmsg);\n\n    let slice = std::slice::from_raw_parts(json as *const u8, len);\n    let text = match std::str::from_utf8(slice) {\n        Ok(s) => s,\n        Err(_) => {\n            set_errmsg(errmsg, &MorlocError::Other(\"Invalid UTF-8 in response\".into()));\n            return ptr::null_mut();\n        }\n    };\n\n    let parsed: JsonResponse = match serde_json::from_str(text) {\n        Ok(r) => r,\n        Err(e) => {\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(format!(\"Failed to parse response JSON: {}\", e)),\n            );\n            return ptr::null_mut();\n        }\n    };\n\n    let resp = libc::calloc(1, std::mem::size_of::<DaemonResponse>()) as *mut DaemonResponse;\n    if resp.is_null() {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to allocate daemon_response_t\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    if let Some(id) = &parsed.id {\n        let c = CString::new(id.as_str()).unwrap_or_default();\n        (*resp).id = libc::strdup(c.as_ptr());\n    }\n\n    (*resp).success = parsed\n        .status\n        .as_deref()\n        .map(|s| s == \"ok\")\n        .unwrap_or(false);\n\n    if let Some(result) = &parsed.result {\n        let s = serde_json::to_string(result).unwrap_or_default();\n        let c = CString::new(s).unwrap_or_default();\n        (*resp).result_json = libc::strdup(c.as_ptr());\n    }\n\n    if let Some(error) = &parsed.error {\n        let c = CString::new(error.as_str()).unwrap_or_default();\n        (*resp).error = libc::strdup(c.as_ptr());\n    }\n\n    resp\n}\n\n// -- Free functions -----------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_free_request(req: *mut DaemonRequest) {\n    if req.is_null() {\n        return;\n    }\n    if !(*req).id.is_null() {\n        libc::free((*req).id as *mut c_void);\n    }\n    if !(*req).command.is_null() {\n        libc::free((*req).command as *mut c_void);\n    }\n    if !(*req).args_json.is_null() {\n        libc::free((*req).args_json as *mut c_void);\n    }\n    if !(*req).expr.is_null() {\n        libc::free((*req).expr as *mut c_void);\n    }\n    if !(*req).name.is_null() {\n        libc::free((*req).name as *mut c_void);\n    }\n    libc::free(req as *mut c_void);\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_free_response(resp: *mut DaemonResponse) {\n    if resp.is_null() {\n        return;\n    }\n    if !(*resp).id.is_null() {\n        libc::free((*resp).id as *mut c_void);\n    }\n    if !(*resp).result_json.is_null() {\n        libc::free((*resp).result_json as *mut c_void);\n    }\n    if !(*resp).error.is_null() {\n        libc::free((*resp).error as *mut c_void);\n    }\n    libc::free(resp as *mut c_void);\n}\n\n// -- Response serialization (serde_json) --------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_serialize_response(\n    response: *mut DaemonResponse,\n    out_len: *mut usize,\n) -> *mut c_char {\n    let mut map = serde_json::Map::new();\n\n    if !(*response).id.is_null() {\n        let id = CStr::from_ptr((*response).id).to_string_lossy();\n        map.insert(\"id\".into(), serde_json::Value::String(id.into_owned()));\n    }\n\n    map.insert(\n        \"status\".into(),\n        serde_json::Value::String(\n            if (*response).success { \"ok\" } else { \"error\" }.into(),\n        ),\n    );\n\n    if (*response).success && !(*response).result_json.is_null() {\n        let raw = CStr::from_ptr((*response).result_json).to_string_lossy();\n        // Try to parse as JSON value; if it fails, store as raw string\n        match serde_json::from_str::<serde_json::Value>(&raw) {\n            Ok(v) => {\n                map.insert(\"result\".into(), v);\n            }\n            Err(_) => {\n                map.insert(\"result\".into(), serde_json::Value::String(raw.into_owned()));\n            }\n        }\n    }\n\n    if !(*response).success && !(*response).error.is_null() {\n        let err = CStr::from_ptr((*response).error).to_string_lossy();\n        map.insert(\"error\".into(), serde_json::Value::String(err.into_owned()));\n    }\n\n    let json_str = serde_json::to_string(&map).unwrap_or_else(|_| \"{}\".into());\n    if !out_len.is_null() {\n        *out_len = json_str.len();\n    }\n    let c = CString::new(json_str).unwrap_or_default();\n    libc::strdup(c.as_ptr())\n}\n\n// -- Discovery ----------------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_build_discovery(manifest: *mut c_void) -> *mut c_char {\n    extern \"C\" {\n        fn manifest_to_discovery_json(manifest: *const c_void) -> *mut c_char;\n    }\n    manifest_to_discovery_json(manifest)\n}\n\n// -- Eval timeout -------------------------------------------------------------\n\n#[no_mangle]\npub extern \"C\" fn daemon_set_eval_timeout(timeout_sec: i32) {\n    let t = if timeout_sec > 0 { timeout_sec } else { 30 };\n    G_EVAL_TIMEOUT.store(t, Ordering::Relaxed);\n}\n\n// -- Fork-based eval/typecheck ------------------------------------------------\n\n/// Fork `morloc <subcmd> <expr>`, capture stdout/stderr, return a DaemonResponse.\nunsafe fn fork_morloc_command(subcmd: &str, expr: *const c_char) -> *mut DaemonResponse {\n    let resp = libc::calloc(1, std::mem::size_of::<DaemonResponse>()) as *mut DaemonResponse;\n\n    let mut stdout_pipe = [0i32; 2];\n    let mut stderr_pipe = [0i32; 2];\n    if libc::pipe(stdout_pipe.as_mut_ptr()) != 0 || libc::pipe(stderr_pipe.as_mut_ptr()) != 0 {\n        (*resp).success = false;\n        let c = CString::new(format!(\"Failed to create pipes for {}\", subcmd)).unwrap_or_default();\n        (*resp).error = libc::strdup(c.as_ptr());\n        return resp;\n    }\n\n    let pid = libc::fork();\n    if pid < 0 {\n        (*resp).success = false;\n        let c = CString::new(format!(\"Failed to fork for {}\", subcmd)).unwrap_or_default();\n        (*resp).error = libc::strdup(c.as_ptr());\n        libc::close(stdout_pipe[0]);\n        libc::close(stdout_pipe[1]);\n        libc::close(stderr_pipe[0]);\n        libc::close(stderr_pipe[1]);\n        return resp;\n    }\n\n    if pid == 0 {\n        // Child\n        libc::close(stdout_pipe[0]);\n        libc::close(stderr_pipe[0]);\n        libc::dup2(stdout_pipe[1], libc::STDOUT_FILENO);\n        libc::dup2(stderr_pipe[1], libc::STDERR_FILENO);\n        libc::close(stdout_pipe[1]);\n        libc::close(stderr_pipe[1]);\n\n        let timeout = G_EVAL_TIMEOUT.load(Ordering::Relaxed);\n        if timeout > 0 {\n            let cpu_limit = libc::rlimit {\n                rlim_cur: timeout as libc::rlim_t,\n                rlim_max: (timeout + 5) as libc::rlim_t,\n            };\n            libc::setrlimit(libc::RLIMIT_CPU, &cpu_limit);\n            let as_limit = libc::rlimit {\n                rlim_cur: 2 * 1024 * 1024 * 1024,\n                rlim_max: 2 * 1024 * 1024 * 1024,\n            };\n            libc::setrlimit(libc::RLIMIT_AS, &as_limit);\n        }\n\n        let cmd = CString::new(\"morloc\").unwrap();\n        let arg_subcmd = CString::new(subcmd).unwrap();\n        libc::execlp(\n            cmd.as_ptr(),\n            cmd.as_ptr(),\n            arg_subcmd.as_ptr(),\n            expr,\n            ptr::null::<c_char>(),\n        );\n        libc::_exit(127);\n    }\n\n    // Parent\n    libc::close(stdout_pipe[1]);\n    libc::close(stderr_pipe[1]);\n\n    let stdout_buf = read_fd_to_vec(stdout_pipe[0]);\n    libc::close(stdout_pipe[0]);\n    let stderr_buf = read_fd_to_vec(stderr_pipe[0]);\n    libc::close(stderr_pipe[0]);\n\n    let mut status: i32 = 0;\n    libc::waitpid(pid, &mut status, 0);\n\n    if libc::WIFEXITED(status) && libc::WEXITSTATUS(status) == 0 {\n        let mut out = String::from_utf8_lossy(&stdout_buf).into_owned();\n        // Trim trailing newlines\n        while out.ends_with('\\n') || out.ends_with('\\r') {\n            out.pop();\n        }\n        (*resp).success = true;\n        let c = CString::new(out).unwrap_or_default();\n        (*resp).result_json = libc::strdup(c.as_ptr());\n    } else {\n        (*resp).success = false;\n        let errmsg = if !stderr_buf.is_empty() {\n            String::from_utf8_lossy(&stderr_buf).into_owned()\n        } else if libc::WIFSIGNALED(status) {\n            format!(\"morloc {} killed by signal {}\", subcmd, libc::WTERMSIG(status))\n        } else {\n            let code = if libc::WIFEXITED(status) {\n                libc::WEXITSTATUS(status)\n            } else {\n                -1\n            };\n            format!(\"morloc {} exited with code {}\", subcmd, code)\n        };\n        let c = CString::new(errmsg).unwrap_or_default();\n        (*resp).error = libc::strdup(c.as_ptr());\n    }\n\n    resp\n}\n\n/// Read an fd to completion into a Vec<u8>.\nunsafe fn read_fd_to_vec(fd: i32) -> Vec<u8> {\n    let mut buf = Vec::with_capacity(65536);\n    let mut tmp = [0u8; 8192];\n    loop {\n        let n = libc::read(fd, tmp.as_mut_ptr() as *mut c_void, tmp.len());\n        if n <= 0 {\n            break;\n        }\n        buf.extend_from_slice(&tmp[..n as usize]);\n    }\n    buf\n}\n\n// -- Dispatch -----------------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_dispatch(\n    manifest: *mut c_void,\n    request: *mut DaemonRequest,\n    sockets: *mut MorlocSocket,\n    _shm_basename: *const c_char,\n) -> *mut DaemonResponse {\n    let resp = libc::calloc(1, std::mem::size_of::<DaemonResponse>()) as *mut DaemonResponse;\n\n    // Echo request id\n    if !(*request).id.is_null() {\n        (*resp).id = libc::strdup((*request).id);\n    }\n\n    match (*request).method {\n        DaemonMethod::Health => {\n            (*resp).success = true;\n            if let Some(alive_fn) = G_POOL_ALIVE_FN {\n                let mut arr = Vec::with_capacity(G_N_POOLS);\n                for i in 0..G_N_POOLS {\n                    arr.push(serde_json::Value::Bool(alive_fn(i)));\n                }\n                let json = serde_json::to_string(&arr).unwrap_or_default();\n                let c = CString::new(json).unwrap_or_default();\n                (*resp).result_json = libc::strdup(c.as_ptr());\n            }\n            return resp;\n        }\n        DaemonMethod::Discover => {\n            (*resp).success = true;\n            (*resp).result_json = daemon_build_discovery(manifest);\n            return resp;\n        }\n        DaemonMethod::Eval => {\n            if (*request).expr.is_null() {\n                (*resp).success = false;\n                let c = CString::new(\"Missing 'expr' field in eval request\").unwrap();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n\n            // Check binding store for cached expression\n            if !G_BINDING_STORE.is_null() {\n                let expr_str = CStr::from_ptr((*request).expr).to_string_lossy();\n                let store = &*G_BINDING_STORE;\n                let hv = hash::xxh64_with_seed(expr_str.as_bytes(), DEFAULT_XXHASH_SEED);\n                let _cached = store\n                    .lookup_hash(hv)\n                    .or_else(|| store.lookup_name(&expr_str));\n                // TODO: direct binary execution for bound functions\n            }\n\n            let eval_resp = fork_morloc_command(\"eval\", (*request).expr);\n            if !(*request).id.is_null() {\n                (*eval_resp).id = libc::strdup((*request).id);\n            }\n            libc::free(resp as *mut c_void);\n            return eval_resp;\n        }\n        DaemonMethod::Typecheck => {\n            if (*request).expr.is_null() {\n                (*resp).success = false;\n                let c = CString::new(\"Missing 'expr' field in typecheck request\").unwrap();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n            let tc_resp = fork_morloc_command(\"typecheck\", (*request).expr);\n            if !(*request).id.is_null() {\n                (*tc_resp).id = libc::strdup((*request).id);\n            }\n            libc::free(resp as *mut c_void);\n            return tc_resp;\n        }\n        DaemonMethod::Bind => {\n            if (*request).expr.is_null() {\n                (*resp).success = false;\n                let c = CString::new(\"Missing 'expr' field in bind request\").unwrap();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n            if G_BINDING_STORE.is_null() {\n                (*resp).success = false;\n                let c = CString::new(\"Binding store not initialized\").unwrap();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n            let store = &mut *G_BINDING_STORE;\n            let expr_str = CStr::from_ptr((*request).expr).to_string_lossy().into_owned();\n            let name = if (*request).name.is_null() {\n                None\n            } else {\n                Some(CStr::from_ptr((*request).name).to_string_lossy().into_owned())\n            };\n            let timeout = G_EVAL_TIMEOUT.load(Ordering::Relaxed);\n            match store.bind(&expr_str, name.as_deref(), timeout) {\n                Some(hv) => {\n                    let mut map = serde_json::Map::new();\n                    map.insert(\n                        \"hash\".into(),\n                        serde_json::Value::String(format!(\"{:016x}\", hv)),\n                    );\n                    map.insert(\"expr\".into(), serde_json::Value::String(expr_str));\n                    if let Some(n) = &name {\n                        map.insert(\"name\".into(), serde_json::Value::String(n.clone()));\n                    }\n                    if let Some(entry) = store.lookup_hash(hv) {\n                        if let Some(ref ts) = entry.type_sig {\n                            map.insert(\"type\".into(), serde_json::Value::String(ts.clone()));\n                        }\n                    }\n                    let json = serde_json::to_string(&map).unwrap_or_default();\n                    (*resp).success = true;\n                    let c = CString::new(json).unwrap_or_default();\n                    (*resp).result_json = libc::strdup(c.as_ptr());\n                }\n                None => {\n                    (*resp).success = false;\n                    let c =\n                        CString::new(\"Failed to compile and bind expression\").unwrap_or_default();\n                    (*resp).error = libc::strdup(c.as_ptr());\n                }\n            }\n            return resp;\n        }\n        DaemonMethod::Bindings => {\n            (*resp).success = true;\n            if G_BINDING_STORE.is_null() {\n                let c = CString::new(\"{\\\"bindings\\\":[]}\").unwrap();\n                (*resp).result_json = libc::strdup(c.as_ptr());\n            } else {\n                let store = &*G_BINDING_STORE;\n                let json = store.list_json();\n                let c = CString::new(json).unwrap_or_default();\n                (*resp).result_json = libc::strdup(c.as_ptr());\n            }\n            return resp;\n        }\n        DaemonMethod::Unbind => {\n            let name_ptr = if !(*request).command.is_null() {\n                (*request).command\n            } else {\n                (*request).name\n            };\n            if name_ptr.is_null() {\n                (*resp).success = false;\n                let c = CString::new(\"Missing binding name\").unwrap();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n            if G_BINDING_STORE.is_null() {\n                (*resp).success = false;\n                let c = CString::new(\"Binding store not initialized\").unwrap();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n            let store = &mut *G_BINDING_STORE;\n            let name = CStr::from_ptr(name_ptr).to_string_lossy();\n            if store.unbind(&name) {\n                (*resp).success = true;\n                let c = CString::new(\"{\\\"removed\\\":true}\").unwrap();\n                (*resp).result_json = libc::strdup(c.as_ptr());\n            } else {\n                (*resp).success = false;\n                let c = CString::new(format!(\"Binding not found: {}\", name)).unwrap_or_default();\n                (*resp).error = libc::strdup(c.as_ptr());\n            }\n            return resp;\n        }\n        DaemonMethod::Call => {\n            // Fall through to call dispatch below\n        }\n    }\n\n    // DAEMON_CALL\n    if (*request).command.is_null() {\n        (*resp).success = false;\n        let c = CString::new(\"Missing 'command' field in call request\").unwrap();\n        (*resp).error = libc::strdup(c.as_ptr());\n        return resp;\n    }\n\n    // Delegate to the C functions that handle manifest lookup, arg parsing,\n    // schema handling, and pool communication. These are all already ported\n    // to Rust in other _ffi modules, so we declare them as extern \"C\".\n    extern \"C\" {\n        fn parse_schema(schema: *const c_char, errmsg: *mut *mut c_char) -> *mut CSchema;\n        fn free_schema(schema: *mut CSchema);\n        fn initialize_positional(value: *mut c_char) -> *mut c_void;\n        fn free_argument_t(arg: *mut c_void);\n        fn parse_cli_data_argument(\n            dest: *mut u8,\n            arg: *const c_void,\n            schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn make_call_packet_from_cli(\n            dest: *mut u8,\n            mid: u32,\n            args: *mut *mut c_void,\n            arg_schema_strs: *mut *mut c_char,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn send_and_receive_over_socket(\n            socket_path: *const c_char,\n            packet: *const u8,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn get_morloc_data_packet_error_message(\n            data: *const u8,\n            errmsg: *mut *mut c_char,\n        ) -> *mut c_char;\n        fn get_morloc_data_packet_value(\n            data: *const u8,\n            schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn voidstar_to_json_string(\n            data: *const c_void,\n            schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut c_char;\n        fn morloc_eval(\n            expr: *mut c_void,  // actually *mut MorlocExpression\n            return_schema: *mut CSchema,\n            arg_voidstar: *mut *mut u8,\n            arg_schemas: *mut *mut CSchema,\n            nargs: usize,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n    }\n\n    // The manifest is the canonical v2 C struct from manifest_ffi.rs.\n    // No local mirror needed -- import the real type and walk it.\n    use crate::manifest_ffi::{Manifest as ManifestC, ManifestArgKind, ManifestCommand};\n\n    let mv = manifest as *const ManifestC;\n    let command_name = CStr::from_ptr((*request).command);\n    let mut cmd: *const ManifestCommand = ptr::null();\n    for i in 0..(*mv).n_commands {\n        let c = &*(*mv).commands.add(i);\n        if CStr::from_ptr(c.name) == command_name {\n            cmd = c;\n            break;\n        }\n    }\n\n    if cmd.is_null() {\n        (*resp).success = false;\n        let msg = format!(\n            \"Unknown command: {}\",\n            command_name.to_string_lossy()\n        );\n        let c = CString::new(msg).unwrap_or_default();\n        (*resp).error = libc::strdup(c.as_ptr());\n        return resp;\n    }\n\n    let cmd = &*cmd;\n    let expected_nargs = cmd.n_args;\n\n    // Parse JSON args into argument_t** array\n    let mut err: *mut c_char = ptr::null_mut();\n    let args: *mut *mut c_void;\n\n    if !(*request).args_json.is_null() {\n        // Parse the JSON array\n        let args_str = CStr::from_ptr((*request).args_json).to_string_lossy();\n        let parsed_args: Vec<serde_json::Value> = match serde_json::from_str(&args_str) {\n            Ok(v) => v,\n            Err(e) => {\n                (*resp).success = false;\n                let c = CString::new(format!(\"Failed to parse args: {}\", e)).unwrap_or_default();\n                (*resp).error = libc::strdup(c.as_ptr());\n                return resp;\n            }\n        };\n\n        if parsed_args.len() != expected_nargs {\n            (*resp).success = false;\n            let c = CString::new(format!(\n                \"Expected {} arguments, got {}\",\n                expected_nargs,\n                parsed_args.len()\n            ))\n            .unwrap_or_default();\n            (*resp).error = libc::strdup(c.as_ptr());\n            return resp;\n        }\n\n        args = libc::calloc(expected_nargs + 1, std::mem::size_of::<*mut c_void>())\n            as *mut *mut c_void;\n        for (i, val) in parsed_args.iter().enumerate() {\n            let val_str = match val {\n                serde_json::Value::String(s) => format!(\"\\\"{}\\\"\", s),\n                other => other.to_string(),\n            };\n            let c = CString::new(val_str).unwrap_or_default();\n            let dup = libc::strdup(c.as_ptr());\n            *args.add(i) = initialize_positional(dup);\n            libc::free(dup as *mut c_void);\n        }\n        *args.add(expected_nargs) = ptr::null_mut();\n    } else {\n        if expected_nargs > 0 {\n            // Check if any are positional (required)\n            // For simplicity, match the C behavior: require args if n_args > 0\n            (*resp).success = false;\n            let c = CString::new(\"Missing 'args' field in call request\").unwrap();\n            (*resp).error = libc::strdup(c.as_ptr());\n            return resp;\n        }\n        args =\n            libc::calloc(1, std::mem::size_of::<*mut c_void>()) as *mut *mut c_void;\n        *args = ptr::null_mut();\n    }\n\n    if cmd.is_pure {\n        // Pure command: evaluate expression tree\n        let mut nargs: usize = 0;\n        while !(*args.add(nargs)).is_null() {\n            nargs += 1;\n        }\n\n        // v2: schemas live on each ManifestArg. Walk cmd.args in\n        // declaration order, INCLUDING flags (they consume an arg\n        // slot in the parsed list and need a corresponding schema\n        // entry to keep alignment). For flags, fall back to the\n        // boolean schema \"b\".\n        static FLAG_SCHEMA: &[u8] = b\"b\\0\";\n        let mut arg_schema_strs: Vec<*mut c_char> = Vec::with_capacity(nargs);\n        for i in 0..cmd.n_args {\n            let a = &*cmd.args.add(i);\n            let s = if a.kind == ManifestArgKind::Flag || a.schema.is_null() {\n                FLAG_SCHEMA.as_ptr() as *mut c_char\n            } else {\n                a.schema\n            };\n            arg_schema_strs.push(s);\n        }\n\n        let arg_schemas_arr =\n            libc::calloc(nargs, std::mem::size_of::<*mut CSchema>()) as *mut *mut CSchema;\n        let arg_packets =\n            libc::calloc(nargs, std::mem::size_of::<*mut u8>()) as *mut *mut u8;\n        let arg_voidstars =\n            libc::calloc(nargs, std::mem::size_of::<*mut u8>()) as *mut *mut u8;\n\n        let mut cleanup_and_fail = false;\n\n        for i in 0..nargs {\n            let schema_str = arg_schema_strs.get(i).copied().unwrap_or(ptr::null_mut());\n            *arg_schemas_arr.add(i) = parse_schema(schema_str, &mut err);\n            if !err.is_null() {\n                (*resp).success = false;\n                (*resp).error = err;\n                cleanup_and_fail = true;\n                break;\n            }\n\n            *arg_packets.add(i) = parse_cli_data_argument(\n                ptr::null_mut(),\n                *args.add(i),\n                *arg_schemas_arr.add(i),\n                &mut err,\n            );\n            if !err.is_null() {\n                (*resp).success = false;\n                (*resp).error = err;\n                cleanup_and_fail = true;\n                break;\n            }\n\n            *arg_voidstars.add(i) = get_morloc_data_packet_value(\n                *arg_packets.add(i),\n                *arg_schemas_arr.add(i),\n                &mut err,\n            );\n            if !err.is_null() {\n                (*resp).success = false;\n                (*resp).error = err;\n                cleanup_and_fail = true;\n                break;\n            }\n        }\n\n        if !cleanup_and_fail {\n            let return_schema = parse_schema(cmd.ret.schema, &mut err);\n            if !err.is_null() {\n                (*resp).success = false;\n                (*resp).error = err;\n            } else {\n                let result_abs = morloc_eval(\n                    cmd.expr as *mut c_void,\n                    return_schema,\n                    arg_voidstars,\n                    arg_schemas_arr,\n                    nargs,\n                    &mut err,\n                );\n                if !err.is_null() {\n                    (*resp).success = false;\n                    (*resp).error = err;\n                } else {\n                    let json = voidstar_to_json_string(\n                        result_abs as *const c_void,\n                        return_schema as *const CSchema,\n                        &mut err,\n                    );\n                    if !err.is_null() {\n                        (*resp).success = false;\n                        (*resp).error = err;\n                    } else {\n                        (*resp).success = true;\n                        (*resp).result_json = json;\n                    }\n                }\n                free_schema(return_schema);\n            }\n        }\n\n        // Cleanup\n        for i in 0..nargs {\n            let s = *arg_schemas_arr.add(i);\n            if !s.is_null() {\n                free_schema(s);\n            }\n            let p = *arg_packets.add(i);\n            if !p.is_null() {\n                libc::free(p as *mut c_void);\n            }\n        }\n        libc::free(arg_schemas_arr as *mut c_void);\n        libc::free(arg_packets as *mut c_void);\n        libc::free(arg_voidstars as *mut c_void);\n    } else {\n        // Remote command: send call packet to pool. v2 stores schemas\n        // per-arg, but make_call_packet_from_cli wants a NULL-terminated\n        // flat array. ManifestCommand exposes a helper that materializes\n        // the flat view; the outer pointer array is owned by us and\n        // freed below, but the inner C strings remain owned by the\n        // ManifestArg objects.\n        let arg_schemas_flat = cmd.build_arg_schemas_array();\n        let call_packet = make_call_packet_from_cli(\n            ptr::null_mut(),\n            cmd.mid,\n            args,\n            arg_schemas_flat,\n            &mut err,\n        );\n        libc::free(arg_schemas_flat as *mut c_void);\n        if !err.is_null() {\n            (*resp).success = false;\n            (*resp).error = err;\n        } else {\n            let socket_path = (*sockets.add(cmd.pool_index)).socket_filename;\n            let result_packet =\n                send_and_receive_over_socket(socket_path, call_packet, &mut err);\n            libc::free(call_packet as *mut c_void);\n\n            if !err.is_null() {\n                (*resp).success = false;\n                (*resp).error = err;\n            } else {\n                let packet_error =\n                    get_morloc_data_packet_error_message(result_packet, &mut err);\n                if !packet_error.is_null() {\n                    (*resp).success = false;\n                    (*resp).error = libc::strdup(packet_error);\n                    libc::free(result_packet as *mut c_void);\n                } else if !err.is_null() {\n                    (*resp).success = false;\n                    (*resp).error = err;\n                    libc::free(result_packet as *mut c_void);\n                } else {\n                    let return_schema = parse_schema(cmd.ret.schema, &mut err);\n                    if !err.is_null() {\n                        (*resp).success = false;\n                        (*resp).error = err;\n                        libc::free(result_packet as *mut c_void);\n                    } else {\n                        let packet_value = get_morloc_data_packet_value(\n                            result_packet,\n                            return_schema as *const CSchema,\n                            &mut err,\n                        );\n                        if !err.is_null() {\n                            (*resp).success = false;\n                            (*resp).error = err;\n                        } else {\n                            let json = voidstar_to_json_string(\n                                packet_value as *const c_void,\n                                return_schema as *const CSchema,\n                                &mut err,\n                            );\n                            if !err.is_null() {\n                                (*resp).success = false;\n                                (*resp).error = err;\n                            } else {\n                                (*resp).success = true;\n                                (*resp).result_json = json;\n                            }\n                        }\n                        free_schema(return_schema);\n                        libc::free(result_packet as *mut c_void);\n                    }\n                }\n            }\n        }\n    }\n\n    // Free args\n    let mut i = 0;\n    while !(*args.add(i)).is_null() {\n        free_argument_t(*args.add(i));\n        i += 1;\n    }\n    libc::free(args as *mut c_void);\n\n    resp\n}\n\n// -- Length-prefixed message protocol -----------------------------------------\n\nunsafe fn read_lp_message(\n    fd: i32,\n    out_len: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n\n    let mut len_buf = [0u8; 4];\n    let n = libc::recv(\n        fd,\n        len_buf.as_mut_ptr() as *mut c_void,\n        4,\n        libc::MSG_WAITALL,\n    );\n    if n != 4 {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to read message length prefix\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    let msg_len = ((len_buf[0] as u32) << 24)\n        | ((len_buf[1] as u32) << 16)\n        | ((len_buf[2] as u32) << 8)\n        | (len_buf[3] as u32);\n\n    if msg_len > MAX_LP_MESSAGE {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\"Message too large: {} bytes\", msg_len)),\n        );\n        return ptr::null_mut();\n    }\n\n    let msg = libc::malloc(msg_len as usize + 1) as *mut c_char;\n    if msg.is_null() {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to allocate message buffer\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    let mut total: usize = 0;\n    while total < msg_len as usize {\n        let n = libc::recv(\n            fd,\n            msg.add(total) as *mut c_void,\n            msg_len as usize - total,\n            0,\n        );\n        if n <= 0 {\n            libc::free(msg as *mut c_void);\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(format!(\n                    \"Failed to read message body (got {} of {} bytes)\",\n                    total, msg_len\n                )),\n            );\n            return ptr::null_mut();\n        }\n        total += n as usize;\n    }\n    *msg.add(msg_len as usize) = 0;\n\n    if !out_len.is_null() {\n        *out_len = msg_len as usize;\n    }\n    msg\n}\n\nunsafe fn write_lp_message(\n    fd: i32,\n    data: *const c_char,\n    len: usize,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n\n    let len_buf: [u8; 4] = [\n        ((len >> 24) & 0xFF) as u8,\n        ((len >> 16) & 0xFF) as u8,\n        ((len >> 8) & 0xFF) as u8,\n        (len & 0xFF) as u8,\n    ];\n\n    let n = libc::send(\n        fd,\n        len_buf.as_ptr() as *const c_void,\n        4,\n        crate::utility::SEND_NOSIGNAL,\n    );\n    if n != 4 {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to write message length prefix\".into()),\n        );\n        return false;\n    }\n\n    let mut total: usize = 0;\n    while total < len {\n        let n = libc::send(\n            fd,\n            (data as *const u8).add(total) as *const c_void,\n            len - total,\n            crate::utility::SEND_NOSIGNAL,\n        );\n        if n <= 0 {\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(\"Failed to write message body\".into()),\n            );\n            return false;\n        }\n        total += n as usize;\n    }\n\n    true\n}\n\n// -- Connection handlers ------------------------------------------------------\n\nunsafe fn handle_lp_connection(\n    client_fd: i32,\n    manifest: *mut c_void,\n    sockets: *mut MorlocSocket,\n    shm_basename: *const c_char,\n) {\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let mut msg_len: usize = 0;\n\n    // Peek to distinguish a probe connection (immediate EOF) from a real\n    // client.  The router's readiness check connects then closes without\n    // sending data; silently ignore those.\n    let mut peek_buf = [0u8; 1];\n    let peek_n = libc::recv(client_fd, peek_buf.as_mut_ptr() as *mut c_void, 1, libc::MSG_PEEK);\n    if peek_n == 0 {\n        // Clean EOF — probe connection, silently close.\n        libc::close(client_fd);\n        return;\n    }\n\n    let msg = read_lp_message(client_fd, &mut msg_len, &mut errmsg);\n    if !errmsg.is_null() {\n        let err_str = CStr::from_ptr(errmsg).to_string_lossy();\n        eprintln!(\"daemon: read error: {}\", err_str);\n        libc::free(errmsg as *mut c_void);\n        libc::close(client_fd);\n        return;\n    }\n\n    let req = daemon_parse_request(msg, msg_len, &mut errmsg);\n    libc::free(msg as *mut c_void);\n    if !errmsg.is_null() {\n        let mut err_resp: DaemonResponse = std::mem::zeroed();\n        err_resp.success = false;\n        err_resp.error = errmsg;\n        let mut resp_len: usize = 0;\n        let resp_json = daemon_serialize_response(&mut err_resp, &mut resp_len);\n        let mut write_err: *mut c_char = ptr::null_mut();\n        write_lp_message(client_fd, resp_json, resp_len, &mut write_err);\n        libc::free(resp_json as *mut c_void);\n        if !write_err.is_null() {\n            libc::free(write_err as *mut c_void);\n        }\n        libc::free(errmsg as *mut c_void);\n        libc::close(client_fd);\n        return;\n    }\n\n    let resp = daemon_dispatch(manifest, req, sockets, shm_basename);\n\n    let mut resp_len: usize = 0;\n    let resp_json = daemon_serialize_response(resp, &mut resp_len);\n\n    let mut write_err: *mut c_char = ptr::null_mut();\n    write_lp_message(client_fd, resp_json, resp_len, &mut write_err);\n    if !write_err.is_null() {\n        let err_str = CStr::from_ptr(write_err).to_string_lossy();\n        eprintln!(\"daemon: write error: {}\", err_str);\n        libc::free(write_err as *mut c_void);\n    }\n\n    libc::free(resp_json as *mut c_void);\n    daemon_free_request(req);\n    daemon_free_response(resp);\n    libc::close(client_fd);\n}\n\nunsafe fn handle_http_connection(\n    client_fd: i32,\n    manifest: *mut c_void,\n    sockets: *mut MorlocSocket,\n    shm_basename: *const c_char,\n) {\n    extern \"C\" {\n        fn http_parse_request(fd: i32, errmsg: *mut *mut c_char) -> *mut HttpRequest;\n        fn http_free_request(req: *mut HttpRequest);\n        fn http_write_response(\n            fd: i32,\n            status: i32,\n            content_type: *const c_char,\n            body: *const c_char,\n            body_len: usize,\n        ) -> bool;\n        fn http_to_daemon_request(\n            req: *mut HttpRequest,\n            errmsg: *mut *mut c_char,\n        ) -> *mut DaemonRequest;\n    }\n\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let http_req = http_parse_request(client_fd, &mut errmsg);\n    if !errmsg.is_null() {\n        let body = b\"{\\\"status\\\":\\\"error\\\",\\\"error\\\":\\\"Bad request\\\"}\\0\";\n        let ct = b\"application/json\\0\";\n        http_write_response(\n            client_fd,\n            400,\n            ct.as_ptr() as *const c_char,\n            body.as_ptr() as *const c_char,\n            body.len() - 1,\n        );\n        libc::free(errmsg as *mut c_void);\n        libc::close(client_fd);\n        return;\n    }\n\n    let req = http_to_daemon_request(http_req, &mut errmsg);\n    if !errmsg.is_null() {\n        let body = b\"{\\\"status\\\":\\\"error\\\",\\\"error\\\":\\\"Invalid request\\\"}\\0\";\n        let ct = b\"application/json\\0\";\n        http_write_response(\n            client_fd,\n            400,\n            ct.as_ptr() as *const c_char,\n            body.as_ptr() as *const c_char,\n            body.len() - 1,\n        );\n        http_free_request(http_req);\n        libc::free(errmsg as *mut c_void);\n        libc::close(client_fd);\n        return;\n    }\n    http_free_request(http_req);\n\n    let resp = daemon_dispatch(manifest, req, sockets, shm_basename);\n\n    let mut resp_len: usize = 0;\n    let resp_json = daemon_serialize_response(resp, &mut resp_len);\n\n    // Append newline for terminal-friendly output\n    let resp_body = libc::malloc(resp_len + 2) as *mut u8;\n    ptr::copy_nonoverlapping(resp_json as *const u8, resp_body, resp_len);\n    *resp_body.add(resp_len) = b'\\n';\n    *resp_body.add(resp_len + 1) = 0;\n\n    let status = if (*resp).success { 200 } else { 500 };\n    let ct = b\"application/json\\0\";\n    http_write_response(\n        client_fd,\n        status,\n        ct.as_ptr() as *const c_char,\n        resp_body as *const c_char,\n        resp_len + 1,\n    );\n\n    libc::free(resp_body as *mut c_void);\n    libc::free(resp_json as *mut c_void);\n    daemon_free_request(req);\n    daemon_free_response(resp);\n    libc::close(client_fd);\n}\n\n// -- Thread pool (VecDeque + Condvar instead of linked list + pthread) --------\n\n#[derive(Clone, Copy)]\nstruct DaemonJob {\n    client_fd: i32,\n    conn_type: i32, // 0 = length-prefixed (unix/tcp), 2 = http\n}\n\nstruct JobQueue {\n    jobs: VecDeque<DaemonJob>,\n}\n\nstruct WorkerContext {\n    queue: Mutex<JobQueue>,\n    cond: Condvar,\n    manifest: *mut c_void,\n    sockets: *mut MorlocSocket,\n    shm_basename: *const c_char,\n}\n\n// SAFETY: WorkerContext is shared between threads but all raw pointers\n// within it point to read-only or thread-safe C data.\nunsafe impl Send for WorkerContext {}\nunsafe impl Sync for WorkerContext {}\n\nfn set_socket_timeouts(fd: i32, timeout_sec: i32) {\n    unsafe {\n        let tv = libc::timeval {\n            tv_sec: timeout_sec as _,\n            tv_usec: 0,\n        };\n        libc::setsockopt(\n            fd,\n            libc::SOL_SOCKET,\n            libc::SO_RCVTIMEO,\n            &tv as *const libc::timeval as *const c_void,\n            std::mem::size_of::<libc::timeval>() as libc::socklen_t,\n        );\n        libc::setsockopt(\n            fd,\n            libc::SOL_SOCKET,\n            libc::SO_SNDTIMEO,\n            &tv as *const libc::timeval as *const c_void,\n            std::mem::size_of::<libc::timeval>() as libc::socklen_t,\n        );\n    }\n}\n\n// -- Main daemon event loop ---------------------------------------------------\n\nconst MAX_LISTENERS: usize = 3;\n\n#[no_mangle]\npub unsafe extern \"C\" fn daemon_run(\n    config: *mut DaemonConfig,\n    manifest: *mut c_void,\n    sockets: *mut MorlocSocket,\n    n_pools: usize,\n    shm_basename: *const c_char,\n) {\n    // Set globals\n    G_POOL_ALIVE_FN = (*config).pool_alive_fn;\n    G_N_POOLS = n_pools;\n    let timeout = if (*config).eval_timeout > 0 {\n        (*config).eval_timeout\n    } else {\n        30\n    };\n    G_EVAL_TIMEOUT.store(timeout, Ordering::Relaxed);\n\n    // Initialize binding store\n    if G_BINDING_STORE.is_null() {\n        let store = Box::new(BindingStore::new(\"/tmp/morloc-bindings\"));\n        G_BINDING_STORE = Box::into_raw(store);\n    }\n\n    // Install signal handlers\n    SHUTDOWN_REQUESTED.store(false, Ordering::Relaxed);\n    let handler: libc::sighandler_t =\n        std::mem::transmute::<extern \"C\" fn(i32), libc::sighandler_t>(daemon_signal_handler_fn);\n    libc::signal(libc::SIGTERM, handler);\n    libc::signal(libc::SIGINT, handler);\n\n    let mut fds = [libc::pollfd {\n        fd: -1,\n        events: 0,\n        revents: 0,\n    }; MAX_LISTENERS];\n    let mut fd_types = [0i32; MAX_LISTENERS]; // 0=unix, 1=tcp, 2=http\n    let mut nfds: usize = 0;\n\n    // Unix socket\n    if !(*config).unix_socket_path.is_null() {\n        let sock_fd = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n        if sock_fd < 0 {\n            eprintln!(\"daemon: failed to create unix socket\");\n            return;\n        }\n        let mut addr: libc::sockaddr_un = std::mem::zeroed();\n        addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n        let path_bytes = CStr::from_ptr((*config).unix_socket_path).to_bytes();\n        let copy_len = path_bytes.len().min(addr.sun_path.len() - 1);\n        ptr::copy_nonoverlapping(\n            path_bytes.as_ptr() as *const c_char,\n            addr.sun_path.as_mut_ptr(),\n            copy_len,\n        );\n        libc::unlink((*config).unix_socket_path);\n        if libc::bind(\n            sock_fd,\n            &addr as *const libc::sockaddr_un as *const libc::sockaddr,\n            std::mem::size_of::<libc::sockaddr_un>() as libc::socklen_t,\n        ) < 0\n        {\n            eprintln!(\"daemon: failed to bind unix socket\");\n            libc::close(sock_fd);\n            return;\n        }\n        libc::listen(sock_fd, 64);\n        fds[nfds].fd = sock_fd;\n        fds[nfds].events = libc::POLLIN as i16;\n        fd_types[nfds] = 0;\n        nfds += 1;\n    }\n\n    // TCP\n    if (*config).tcp_port > 0 {\n        let tcp_fd = libc::socket(libc::AF_INET, libc::SOCK_STREAM, 0);\n        if tcp_fd < 0 {\n            eprintln!(\"daemon: failed to create tcp socket\");\n            return;\n        }\n        let opt: i32 = 1;\n        libc::setsockopt(\n            tcp_fd,\n            libc::SOL_SOCKET,\n            libc::SO_REUSEADDR,\n            &opt as *const i32 as *const c_void,\n            std::mem::size_of::<i32>() as libc::socklen_t,\n        );\n        let mut addr: libc::sockaddr_in = std::mem::zeroed();\n        addr.sin_family = libc::AF_INET as libc::sa_family_t;\n        addr.sin_addr.s_addr = u32::from_be(0x7f000001); // INADDR_LOOPBACK\n        addr.sin_port = ((*config).tcp_port as u16).to_be();\n        if libc::bind(\n            tcp_fd,\n            &addr as *const libc::sockaddr_in as *const libc::sockaddr,\n            std::mem::size_of::<libc::sockaddr_in>() as libc::socklen_t,\n        ) < 0\n        {\n            eprintln!(\n                \"daemon: failed to bind tcp port {}\",\n                (*config).tcp_port\n            );\n            libc::close(tcp_fd);\n            return;\n        }\n        libc::listen(tcp_fd, 64);\n        fds[nfds].fd = tcp_fd;\n        fds[nfds].events = libc::POLLIN as i16;\n        fd_types[nfds] = 1;\n        nfds += 1;\n    }\n\n    // HTTP\n    if (*config).http_port > 0 {\n        let http_fd = libc::socket(libc::AF_INET, libc::SOCK_STREAM, 0);\n        if http_fd < 0 {\n            eprintln!(\"daemon: failed to create http socket\");\n            return;\n        }\n        let opt: i32 = 1;\n        libc::setsockopt(\n            http_fd,\n            libc::SOL_SOCKET,\n            libc::SO_REUSEADDR,\n            &opt as *const i32 as *const c_void,\n            std::mem::size_of::<i32>() as libc::socklen_t,\n        );\n        let mut addr: libc::sockaddr_in = std::mem::zeroed();\n        addr.sin_family = libc::AF_INET as libc::sa_family_t;\n        // HTTP router is externally reachable; bind to all interfaces so that\n        // container port mappings (docker -p) can reach it.\n        addr.sin_addr.s_addr = libc::INADDR_ANY.to_be();\n        addr.sin_port = ((*config).http_port as u16).to_be();\n        if libc::bind(\n            http_fd,\n            &addr as *const libc::sockaddr_in as *const libc::sockaddr,\n            std::mem::size_of::<libc::sockaddr_in>() as libc::socklen_t,\n        ) < 0\n        {\n            eprintln!(\n                \"daemon: failed to bind http port {}\",\n                (*config).http_port\n            );\n            libc::close(http_fd);\n            return;\n        }\n        libc::listen(http_fd, 64);\n        fds[nfds].fd = http_fd;\n        fds[nfds].events = libc::POLLIN as i16;\n        fd_types[nfds] = 2;\n        nfds += 1;\n    }\n\n    if nfds == 0 {\n        eprintln!(\"daemon: no listeners configured, exiting\");\n        return;\n    }\n\n    // Start worker thread pool\n    let ctx = Arc::new(WorkerContext {\n        queue: Mutex::new(JobQueue {\n            jobs: VecDeque::new(),\n        }),\n        cond: Condvar::new(),\n        manifest,\n        sockets,\n        shm_basename,\n    });\n\n    let n_workers = n_pools.saturating_add(4).clamp(4, 32);\n    let mut workers = Vec::with_capacity(n_workers);\n    for _ in 0..n_workers {\n        let ctx = Arc::clone(&ctx);\n        workers.push(std::thread::spawn(move || {\n            daemon_worker_fn(ctx);\n        }));\n    }\n\n    // Main event loop\n    while !SHUTDOWN_REQUESTED.load(Ordering::Relaxed) {\n        let ready = libc::poll(fds.as_mut_ptr(), nfds as libc::nfds_t, 1000);\n        if ready < 0 {\n            if crate::utility::errno_val() == libc::EINTR {\n                continue;\n            }\n            eprintln!(\"daemon: poll error\");\n            break;\n        }\n\n        // Check and restart crashed pools\n        if let Some(check_fn) = (*config).pool_check_fn {\n            check_fn(sockets, n_pools);\n        }\n\n        if ready == 0 {\n            continue;\n        }\n\n        for i in 0..nfds {\n            if fds[i].revents & libc::POLLIN as i16 == 0 {\n                continue;\n            }\n            let client_fd = libc::accept(fds[i].fd, ptr::null_mut(), ptr::null_mut());\n            if client_fd < 0 {\n                if crate::utility::errno_val() == libc::EINTR\n                    || crate::utility::errno_val() == libc::EAGAIN\n                {\n                    continue;\n                }\n                eprintln!(\"daemon: accept error\");\n                continue;\n            }\n            crate::utility::set_nosigpipe(client_fd);\n            set_socket_timeouts(client_fd, 30);\n\n            let job = DaemonJob {\n                client_fd,\n                conn_type: fd_types[i],\n            };\n            let mut q = ctx.queue.lock().unwrap();\n            q.jobs.push_back(job);\n            ctx.cond.notify_one();\n        }\n    }\n\n    // Wake all workers and join\n    ctx.cond.notify_all();\n    for w in workers {\n        let _ = w.join();\n    }\n\n    // Drain remaining jobs\n    {\n        let mut q = ctx.queue.lock().unwrap();\n        while let Some(job) = q.jobs.pop_front() {\n            libc::close(job.client_fd);\n        }\n    }\n\n    // Close listener sockets\n    for i in 0..nfds {\n        libc::close(fds[i].fd);\n    }\n\n    if !(*config).unix_socket_path.is_null() {\n        libc::unlink((*config).unix_socket_path);\n    }\n}\n\nfn daemon_worker_fn(ctx: Arc<WorkerContext>) {\n    loop {\n        if SHUTDOWN_REQUESTED.load(Ordering::Relaxed) {\n            break;\n        }\n\n        let job = {\n            let mut q = ctx.queue.lock().unwrap();\n            loop {\n                if let Some(job) = q.jobs.pop_front() {\n                    break Some(job);\n                }\n                if SHUTDOWN_REQUESTED.load(Ordering::Relaxed) {\n                    break None;\n                }\n                // Wait with timeout so we recheck shutdown\n                let (guard, _timeout) = ctx\n                    .cond\n                    .wait_timeout(q, std::time::Duration::from_millis(100))\n                    .unwrap();\n                q = guard;\n            }\n        };\n\n        let job = match job {\n            Some(j) => j,\n            None => continue,\n        };\n\n        unsafe {\n            if job.conn_type == 2 {\n                handle_http_connection(\n                    job.client_fd,\n                    ctx.manifest,\n                    ctx.sockets,\n                    ctx.shm_basename,\n                );\n            } else {\n                handle_lp_connection(\n                    job.client_fd,\n                    ctx.manifest,\n                    ctx.sockets,\n                    ctx.shm_basename,\n                );\n            }\n        }\n    }\n}\n\n// Signal handler (must be async-signal-safe)\nextern \"C\" fn daemon_signal_handler_fn(_sig: i32) {\n    SHUTDOWN_REQUESTED.store(true, Ordering::Relaxed);\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/error.rs",
    "content": "use std::ffi::{CString, c_char};\n\n#[derive(Debug, thiserror::Error)]\npub enum MorlocError {\n    #[error(\"shared memory error: {0}\")]\n    Shm(String),\n    #[error(\"packet error: {0}\")]\n    Packet(String),\n    #[error(\"schema error: {0}\")]\n    Schema(String),\n    #[error(\"serialization error: {0}\")]\n    Serialization(String),\n    #[error(\"IPC error: {0}\")]\n    Ipc(String),\n    #[error(\"IO error: {0}\")]\n    Io(#[from] std::io::Error),\n    #[error(\"null pointer\")]\n    NullPointer,\n    #[error(\"{0}\")]\n    Other(String),\n}\n\n/// Write a MorlocError into the C ERRMSG convention.\n///\n/// # Safety\n/// `errmsg` must be a valid, non-dangling pointer to a `*mut c_char` (i.e., `char**`).\n/// The caller is responsible for freeing the allocated string via `CString::from_raw`\n/// or `libc::free`.\npub unsafe fn set_errmsg(errmsg: *mut *mut c_char, err: &MorlocError) {\n    if !errmsg.is_null() {\n        if let Ok(cstr) = CString::new(err.to_string()) {\n            *errmsg = cstr.into_raw();\n        }\n    }\n}\n\n/// Clear the ERRMSG pointer (must be called at FFI entry).\n///\n/// # Safety\n/// `errmsg` must be a valid pointer to a `*mut c_char`.\npub unsafe fn clear_errmsg(errmsg: *mut *mut c_char) {\n    if !errmsg.is_null() {\n        *errmsg = std::ptr::null_mut();\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/eval_ffi.rs",
    "content": "//! Expression evaluator and constructor functions.\n//! Replaces eval.c. Uses HashMap instead of linked-list dict_t.\n\nuse std::collections::HashMap;\nuse std::ffi::{c_char, c_void, CStr};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::manifest_ffi::*;\nuse crate::shm::{self, AbsPtr, RelPtr};\n\n// ── Constructor functions (called by manifest_ffi.rs and daemon.c) ───────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_morloc_bound_var(\n    schema_str: *const c_char,\n    varname: *mut c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut MorlocExpression {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n    let schema = crate::ffi::parse_schema(schema_str, &mut err);\n    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n\n    let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n    if expr.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate bound variable expression\".into()));\n        return ptr::null_mut();\n    }\n    (*expr).etype = MorlocExpressionType::Bnd;\n    (*expr).schema = schema;\n    (*expr).expr.bnd_expr = varname;\n    expr\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_morloc_literal(\n    schema_str: *const c_char,\n    lit: Primitive,\n    errmsg: *mut *mut c_char,\n) -> *mut MorlocExpression {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n    let schema = crate::ffi::parse_schema(schema_str, &mut err);\n    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n\n    let data = libc::malloc(std::mem::size_of::<MorlocData>()) as *mut MorlocData;\n    if data.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate literal data\".into()));\n        return ptr::null_mut();\n    }\n    (*data).is_voidstar = false;\n    (*data).data = DataUnion { lit_val: std::mem::ManuallyDrop::new(lit) };\n\n    let expr = libc::malloc(std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n    if expr.is_null() {\n        libc::free(data as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate literal expression\".into()));\n        return ptr::null_mut();\n    }\n    (*expr).etype = MorlocExpressionType::Dat;\n    (*expr).schema = schema;\n    (*expr).expr.data_expr = data;\n    expr\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_morloc_pattern(\n    schema_str: *const c_char,\n    pattern: *mut MorlocPattern,\n    errmsg: *mut *mut c_char,\n) -> *mut MorlocExpression {\n    clear_errmsg(errmsg);\n    let mut err: *mut c_char = ptr::null_mut();\n    let schema = crate::ffi::parse_schema(schema_str, &mut err);\n    if !err.is_null() { *errmsg = err; return ptr::null_mut(); }\n\n    let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n    if expr.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate pattern expression\".into()));\n        return ptr::null_mut();\n    }\n    (*expr).etype = MorlocExpressionType::Pat;\n    (*expr).schema = schema;\n    (*expr).expr.pattern_expr = pattern;\n    expr\n}\n\n#[no_mangle]\npub extern \"C\" fn make_morloc_pattern_end() -> *mut MorlocPattern {\n    // SAFETY: calloc returns zeroed memory suitable for MorlocPattern.\n    // We initialize all fields before returning.\n    unsafe {\n        let pat = libc::calloc(1, std::mem::size_of::<MorlocPattern>()) as *mut MorlocPattern;\n        if pat.is_null() { return ptr::null_mut(); }\n        (*pat).ptype = MorlocPatternType::End;\n        (*pat).size = 0;\n        (*pat).fields = PatternFields { indices: ptr::null_mut() };\n        (*pat).selectors = ptr::null_mut();\n        pat\n    }\n}\n\n// Varargs constructors (make_morloc_container, make_morloc_app,\n// make_morloc_lambda, make_morloc_interpolation, make_morloc_pattern_idx,\n// make_morloc_pattern_key) are only used by generated C++ pool code.\n// They cannot be implemented in stable Rust due to C-variadic ABI.\n// The C-gcc build path (morloc init) provides them from the original eval.c.\n// The Rust hybrid build does not call them (only morloc_eval and the\n// non-varargs constructors are needed).\n\n// ── Core evaluator ───────────────────────────────────────────────────────────\n\ntype BndVars<'a> = HashMap<&'a str, AbsPtr>;\n\n/// Convert key-based pattern selectors to index-based using the schema's key names.\n///\n/// # Safety\n/// `pattern` and `schema` must be valid, non-null pointers to C-allocated structures.\n/// `schema` keys array must have `schema.size` entries.\nunsafe fn convert_keys_to_indices(\n    pattern: *mut MorlocPattern,\n    schema: *const CSchema,\n) -> Result<(), MorlocError> {\n    let pat = &mut *pattern;\n    let n_params = (*schema).size;\n\n    if n_params > 1 {\n        for i in 0..pat.size {\n            let child_schema = *(*schema).parameters.add(i);\n            convert_keys_to_indices(*pat.selectors.add(i), child_schema)?;\n        }\n    }\n\n    if pat.ptype == MorlocPatternType::ByKey {\n        let indices = libc::calloc(n_params, std::mem::size_of::<usize>()) as *mut usize;\n        for i in 0..pat.size {\n            let key = CStr::from_ptr(*pat.fields.keys.add(i)).to_str().unwrap_or(\"\");\n            let mut found = false;\n            for j in 0..n_params {\n                let record_key = CStr::from_ptr(*(*schema).keys.add(j)).to_str().unwrap_or(\"\");\n                if key == record_key {\n                    found = true;\n                    *indices.add(i) = j;\n                    break;\n                }\n            }\n            if !found {\n                libc::free(indices as *mut c_void);\n                return Err(MorlocError::Other(format!(\"Pattern contains key missing in schema: {}\", key)));\n            }\n            libc::free(*pat.fields.keys.add(i) as *mut c_void);\n        }\n        pat.ptype = MorlocPatternType::ByIndex;\n        libc::free(pat.fields.keys as *mut c_void);\n        pat.fields.indices = indices;\n    }\n\n    Ok(())\n}\n\n/// Extract fields from a voidstar value using a pattern, copying them into dest.\n///\n/// # Safety\n/// All pointer arguments must be valid and point to correctly-typed C structures.\n/// `value` must point to voidstar data matching `value_schema`.\nunsafe fn apply_getter(\n    dest: AbsPtr,\n    return_index: &mut usize,\n    return_schema: *const CSchema,\n    pattern: *mut MorlocPattern,\n    value_schema: *const CSchema,\n    value: AbsPtr,\n) -> Result<AbsPtr, MorlocError> {\n    let pat = &*pattern;\n\n    match pat.ptype {\n        MorlocPatternType::ByIndex => {\n            for i in 0..pat.size {\n                let idx = *pat.fields.indices.add(i);\n                apply_getter(\n                    dest, return_index, return_schema,\n                    *pat.selectors.add(i),\n                    *(*value_schema).parameters.add(idx),\n                    value.add(*(*value_schema).offsets.add(idx)),\n                )?;\n            }\n        }\n        MorlocPatternType::ByKey => {\n            convert_keys_to_indices(pattern, value_schema)?;\n            return apply_getter(dest, return_index, return_schema, pattern, value_schema, value);\n        }\n        MorlocPatternType::End => {\n            let (element_dest, element_width) = if (*return_schema).size > 1 {\n                (dest.add(*(*return_schema).offsets.add(*return_index)),\n                 (*(*(*return_schema).parameters.add(*return_index))).width)\n            } else {\n                (dest, (*return_schema).width)\n            };\n            *return_index += 1;\n            ptr::copy_nonoverlapping(value, element_dest, element_width);\n        }\n    }\n\n    Ok(dest)\n}\n\n/// Copy value into dest, preserving fields not selected by pattern.\n///\n/// # Safety\n/// All pointer arguments must be valid. Schema sizes must match.\nunsafe fn apply_setter_copy(\n    dest: AbsPtr,\n    return_schema: *const CSchema,\n    pattern: *mut MorlocPattern,\n    value_schema: *const CSchema,\n    value: AbsPtr,\n) -> Result<(), MorlocError> {\n    let pat = &*pattern;\n    match pat.ptype {\n        MorlocPatternType::ByKey => {\n            convert_keys_to_indices(pattern, value_schema)?;\n            return apply_setter_copy(dest, return_schema, pattern, value_schema, value);\n        }\n        MorlocPatternType::ByIndex => {\n            if (*value_schema).size != (*return_schema).size {\n                return Err(MorlocError::Other(\"Expected setter return and input sizes to be the same\".into()));\n            }\n            for i in 0..(*value_schema).size {\n                let new_dest = dest.add(*(*return_schema).offsets.add(i));\n                let new_value = value.add(*(*value_schema).offsets.add(i));\n                let mut changed = false;\n                for j in 0..pat.size {\n                    if i == *pat.fields.indices.add(j) {\n                        apply_setter_copy(\n                            new_dest,\n                            *(*return_schema).parameters.add(i),\n                            *pat.selectors.add(j),\n                            *(*value_schema).parameters.add(i),\n                            new_value,\n                        )?;\n                        changed = true;\n                        break;\n                    }\n                }\n                if !changed {\n                    let w = (*(*(*value_schema).parameters.add(i))).width;\n                    ptr::copy_nonoverlapping(new_value, new_dest, w);\n                }\n            }\n        }\n        MorlocPatternType::End => {}\n    }\n    Ok(())\n}\n\n/// Overwrite pattern-selected fields in dest with provided set_values.\n///\n/// # Safety\n/// All pointer arguments must be valid. set_values must have enough entries.\nunsafe fn apply_setter_set(\n    dest: AbsPtr,\n    return_schema: *const CSchema,\n    pattern: *mut MorlocPattern,\n    value_schema: *const CSchema,\n    value: AbsPtr,\n    set_schemas: *mut *mut CSchema,\n    set_values: *mut AbsPtr,\n    set_idx: &mut usize,\n) -> Result<(), MorlocError> {\n    let pat = &*pattern;\n    match pat.ptype {\n        MorlocPatternType::ByIndex => {\n            for pi in 0..pat.size {\n                let di = *pat.fields.indices.add(pi);\n                apply_setter_set(\n                    dest.add(*(*return_schema).offsets.add(di)),\n                    *(*return_schema).parameters.add(di),\n                    *pat.selectors.add(pi),\n                    *(*value_schema).parameters.add(di),\n                    value.add(*(*value_schema).offsets.add(di)),\n                    set_schemas, set_values, set_idx,\n                )?;\n            }\n        }\n        MorlocPatternType::End => {\n            ptr::copy_nonoverlapping(*set_values.add(*set_idx), dest, (*return_schema).width);\n            *set_idx += 1;\n        }\n        MorlocPatternType::ByKey => {\n            return Err(MorlocError::Other(\"Key patterns should have been resolved in copy step\".into()));\n        }\n    }\n    Ok(())\n}\n\n/// Recursively evaluate a morloc expression, writing results into SHM.\n///\n/// # Safety\n/// `expr` must be a valid MorlocExpression pointer (or null for error).\n/// If `dest` is non-null, it must point to `width` bytes of writable SHM.\nunsafe fn morloc_eval_r(\n    expr: *mut MorlocExpression,\n    dest: AbsPtr,\n    width: usize,\n    bndvars: &mut BndVars,\n) -> Result<AbsPtr, MorlocError> {\n    if expr.is_null() {\n        return Err(MorlocError::Other(\"Empty expression\".into()));\n    }\n\n    let schema = (*expr).schema;\n    let (dest, width) = if dest.is_null() {\n        let w = (*schema).width;\n        let d = shm::shcalloc(1, w)?;\n        (d, w)\n    } else {\n        if width != (*schema).width {\n            return Err(MorlocError::Other(\"Unexpected data size\".into()));\n        }\n        (dest, width)\n    };\n\n    match (*expr).etype {\n        MorlocExpressionType::Dat => {\n            let data = (*expr).expr.data_expr;\n            if (*data).is_voidstar {\n                return Ok((*data).data.voidstar as AbsPtr);\n            }\n\n            let stype = (*schema).serial_type;\n            if stype == crate::schema::SerialType::String as u32 {\n                // String: allocate in SHM\n                let s = std::mem::ManuallyDrop::into_inner(ptr::read(&(*data).data.lit_val)).s;\n                let str_size = if s.is_null() { 0 } else { libc::strlen(s) };\n                let str_relptr: RelPtr = if str_size > 0 {\n                    let abs = shm::shmemcpy(s as *const u8, str_size)?;\n                    shm::abs2rel(abs)?\n                } else {\n                    -1isize as RelPtr\n                };\n                let arr = shm::Array { size: str_size, data: str_relptr };\n                ptr::copy_nonoverlapping(&arr as *const shm::Array as *const u8, dest, width);\n            } else if stype == crate::schema::SerialType::Array as u32 {\n                let arr_data = (*data).data.array_val;\n                let arr_size = (*arr_data).size;\n                let elem_schema = (*arr_data).schema;\n                let elem_width = (*elem_schema).width;\n                let arr_reldata: RelPtr = if arr_size > 0 {\n                    let arr_abs = shm::shcalloc(arr_size, elem_width)?;\n                    for i in 0..arr_size {\n                        morloc_eval_r(\n                            *(*arr_data).values.add(i),\n                            arr_abs.add(i * elem_width),\n                            elem_width,\n                            bndvars,\n                        )?;\n                    }\n                    shm::abs2rel(arr_abs)?\n                } else {\n                    -1isize as RelPtr\n                };\n                let arr = shm::Array { size: arr_size, data: arr_reldata };\n                ptr::copy_nonoverlapping(&arr as *const shm::Array as *const u8, dest, width);\n            } else if stype == crate::schema::SerialType::Tuple as u32\n                   || stype == crate::schema::SerialType::Map as u32 {\n                for i in 0..(*schema).size {\n                    let elem_width = (*(*(*schema).parameters.add(i))).width;\n                    let elem_dest = dest.add(*(*schema).offsets.add(i));\n                    let element = *(*data).data.tuple_val.add(i);\n                    morloc_eval_r(element, elem_dest, elem_width, bndvars)?;\n                }\n            } else {\n                // All primitives: just copy width bytes from the union\n                ptr::copy_nonoverlapping(\n                    &(*data).data as *const DataUnion as *const u8,\n                    dest,\n                    width,\n                );\n            }\n        }\n\n        MorlocExpressionType::App => {\n            let app = (*expr).expr.app_expr;\n            let nargs = (*app).nargs;\n\n            // Evaluate all arguments\n            let mut arg_results: Vec<AbsPtr> = Vec::with_capacity(nargs);\n            for i in 0..nargs {\n                let r = morloc_eval_r(*(*app).args.add(i), ptr::null_mut(), 0, bndvars)?;\n                arg_results.push(r);\n            }\n\n            match (*app).atype {\n                MorlocAppExpressionType::Pattern => {\n                    if nargs == 1 {\n                        let mut return_index: usize = 0;\n                        apply_getter(\n                            dest, &mut return_index, schema,\n                            (*app).function.pattern,\n                            (*(*(*app).args)).schema,\n                            arg_results[0],\n                        )?;\n                    } else if nargs > 1 {\n                        // Setter: first arg is the value, rest are set values\n                        let mut set_schemas: Vec<*mut CSchema> = Vec::with_capacity(nargs - 1);\n                        for i in 1..nargs {\n                            set_schemas.push((*(*(*app).args.add(i))).schema);\n                        }\n                        apply_setter_copy(\n                            dest, schema, (*app).function.pattern,\n                            (*(*(*app).args)).schema, arg_results[0],\n                        )?;\n                        let mut set_idx: usize = 0;\n                        apply_setter_set(\n                            dest, schema, (*app).function.pattern,\n                            (*(*(*app).args)).schema, arg_results[0],\n                            set_schemas.as_mut_ptr(), arg_results[1..].as_ptr() as *mut AbsPtr,\n                            &mut set_idx,\n                        )?;\n                    } else {\n                        return Err(MorlocError::Other(\"No arguments provided to pattern\".into()));\n                    }\n                }\n\n                MorlocAppExpressionType::Lambda => {\n                    let lam = (*app).function.lambda;\n                    // Bind arguments\n                    for i in 0..nargs {\n                        let var = CStr::from_ptr(*(*lam).args.add(i)).to_str().unwrap_or(\"\");\n                        bndvars.insert(var, arg_results[i]);\n                    }\n                    morloc_eval_r((*lam).body, dest, width, bndvars)?;\n                    // Clean up bindings\n                    for i in 0..nargs {\n                        let var = CStr::from_ptr(*(*lam).args.add(i)).to_str().unwrap_or(\"\");\n                        bndvars.remove(var);\n                    }\n                }\n\n                MorlocAppExpressionType::Format => {\n                    let strings = (*app).function.fmt;\n                    let mut result_size: usize = 0;\n                    let mut string_lengths: Vec<usize> = Vec::with_capacity(nargs + 1);\n\n                    for i in 0..=nargs {\n                        let len = libc::strlen(*strings.add(i));\n                        string_lengths.push(len);\n                        result_size += len;\n                    }\n                    for i in 0..nargs {\n                        let arr = &*(arg_results[i] as *const shm::Array);\n                        result_size += arr.size;\n                    }\n\n                    let new_string = shm::shmalloc(result_size)?;\n                    let result_array = &mut *(dest as *mut shm::Array);\n                    result_array.size = result_size;\n                    result_array.data = shm::abs2rel(new_string)?;\n\n                    let mut cursor = new_string;\n                    for i in 0..=nargs {\n                        ptr::copy_nonoverlapping(*strings.add(i) as *const u8, cursor, string_lengths[i]);\n                        cursor = cursor.add(string_lengths[i]);\n                        if i < nargs {\n                            let arr = &*(arg_results[i] as *const shm::Array);\n                            if arr.size > 0 {\n                                let arr_data = shm::rel2abs(arr.data)?;\n                                ptr::copy_nonoverlapping(arr_data, cursor, arr.size);\n                                cursor = cursor.add(arr.size);\n                            }\n                        }\n                    }\n                }\n            }\n        }\n\n        MorlocExpressionType::Bnd => {\n            let varname = CStr::from_ptr((*expr).expr.bnd_expr).to_str().unwrap_or(\"\");\n            let bnd_value = bndvars.get(varname).copied()\n                .ok_or_else(|| MorlocError::Other(format!(\"Unbound variable {}\", varname)))?;\n            ptr::copy_nonoverlapping(bnd_value, dest, (*schema).width);\n        }\n\n        MorlocExpressionType::Show => {\n            // Serialize child to JSON string\n            let child = (*expr).expr.unary_expr;\n            let child_schema = (*child).schema;\n            let child_result = morloc_eval_r(child, ptr::null_mut(), 0, bndvars)?;\n\n            extern \"C\" {\n                fn voidstar_to_json_string(data: *const c_void, schema: *const CSchema, errmsg: *mut *mut c_char) -> *mut c_char;\n            }\n            let mut err: *mut c_char = ptr::null_mut();\n            let json = voidstar_to_json_string(child_result as *const c_void, child_schema, &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n\n            let json_len = libc::strlen(json);\n            let str_relptr: RelPtr = if json_len > 0 {\n                let abs = shm::shmemcpy(json as *const u8, json_len)?;\n                libc::free(json as *mut c_void);\n                shm::abs2rel(abs)?\n            } else {\n                libc::free(json as *mut c_void);\n                -1isize as RelPtr\n            };\n            let arr = shm::Array { size: json_len, data: str_relptr };\n            ptr::copy_nonoverlapping(&arr as *const shm::Array as *const u8, dest, width);\n        }\n\n        MorlocExpressionType::Read => {\n            // Deserialize JSON string to typed data, return optional\n            let child = (*expr).expr.unary_expr;\n            let child_result = morloc_eval_r(child, ptr::null_mut(), 0, bndvars)?;\n            let str_arr = &*(child_result as *const shm::Array);\n\n            let opt_dest = dest;\n            let inner_schema = *(*schema).parameters;\n\n            if str_arr.size > 0 {\n                let str_abs = shm::rel2abs(str_arr.data)?;\n                let json_str = libc::malloc(str_arr.size + 1) as *mut c_char;\n                if json_str.is_null() {\n                    return Err(MorlocError::Other(\"Failed to allocate for @read\".into()));\n                }\n                ptr::copy_nonoverlapping(str_abs, json_str as *mut u8, str_arr.size);\n                *json_str.add(str_arr.size) = 0;\n\n                extern \"C\" {\n                    fn read_json_with_schema(dest: *mut u8, json: *mut c_char, schema: *const CSchema, errmsg: *mut *mut c_char) -> *mut u8;\n                }\n                let inner_offset = *(*schema).offsets;\n                let mut parse_err: *mut c_char = ptr::null_mut();\n                let parsed = read_json_with_schema(opt_dest.add(inner_offset), json_str, inner_schema, &mut parse_err);\n                libc::free(json_str as *mut c_void);\n                if !parse_err.is_null() {\n                    libc::free(parse_err as *mut c_void);\n                    *opt_dest = 0; // None\n                } else {\n                    *opt_dest = if parsed.is_null() { 0 } else { 1 };\n                }\n            } else {\n                *opt_dest = 0; // None\n            }\n        }\n\n        MorlocExpressionType::Hash => {\n            // Hash child data and return hex string\n            let child = (*expr).expr.unary_expr;\n            let child_schema = (*child).schema;\n            let child_result = morloc_eval_r(child, ptr::null_mut(), 0, bndvars)?;\n\n            extern \"C\" {\n                fn mlc_hash(data: *const c_void, schema: *const CSchema, errmsg: *mut *mut c_char) -> *mut c_char;\n            }\n            let mut err: *mut c_char = ptr::null_mut();\n            let hex = mlc_hash(child_result as *const c_void, child_schema, &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n\n            let hex_len = libc::strlen(hex);\n            let str_relptr: RelPtr = if hex_len > 0 {\n                let abs = shm::shmemcpy(hex as *const u8, hex_len)?;\n                libc::free(hex as *mut c_void);\n                shm::abs2rel(abs)?\n            } else {\n                libc::free(hex as *mut c_void);\n                -1isize as RelPtr\n            };\n            let arr = shm::Array { size: hex_len, data: str_relptr };\n            ptr::copy_nonoverlapping(&arr as *const shm::Array as *const u8, dest, width);\n        }\n\n        MorlocExpressionType::Save => {\n            // Save value to file at path\n            let save = (*expr).expr.save_expr;\n            let value_expr = (*save).value;\n            let path_expr = (*save).path;\n            let fmt = CStr::from_ptr((*save).format).to_str().unwrap_or(\"voidstar\");\n\n            let value_schema = (*value_expr).schema;\n            let value_result = morloc_eval_r(value_expr, ptr::null_mut(), 0, bndvars)?;\n            let path_result = morloc_eval_r(path_expr, ptr::null_mut(), 0, bndvars)?;\n\n            // Extract path string from voidstar Array\n            let path_arr = &*(path_result as *const shm::Array);\n            let path_abs = shm::rel2abs(path_arr.data)?;\n            let path_cstr = libc::malloc(path_arr.size + 1) as *mut c_char;\n            if path_cstr.is_null() {\n                return Err(MorlocError::Other(\"Failed to allocate for @save path\".into()));\n            }\n            ptr::copy_nonoverlapping(path_abs, path_cstr as *mut u8, path_arr.size);\n            *path_cstr.add(path_arr.size) = 0;\n\n            extern \"C\" {\n                fn mlc_save(data: *const c_void, schema: *const CSchema, path: *const c_char, errmsg: *mut *mut c_char) -> i32;\n                fn mlc_save_json(data: *const c_void, schema: *const CSchema, path: *const c_char, errmsg: *mut *mut c_char) -> i32;\n                fn mlc_save_voidstar(data: *const c_void, schema: *const CSchema, path: *const c_char, errmsg: *mut *mut c_char) -> i32;\n            }\n            let mut err: *mut c_char = ptr::null_mut();\n            let rc = match fmt {\n                \"json\" => mlc_save_json(value_result as *const c_void, value_schema, path_cstr, &mut err),\n                \"msgpack\" => mlc_save(value_result as *const c_void, value_schema, path_cstr, &mut err),\n                _ => mlc_save_voidstar(value_result as *const c_void, value_schema, path_cstr, &mut err),\n            };\n            libc::free(path_cstr as *mut c_void);\n            if rc != 0 && !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            // Return unit (zero-fill dest)\n            ptr::write_bytes(dest, 0, width);\n        }\n\n        MorlocExpressionType::Load => {\n            // Load data from file, return optional\n            let child = (*expr).expr.unary_expr;\n            let child_result = morloc_eval_r(child, ptr::null_mut(), 0, bndvars)?;\n\n            // Extract path string from voidstar Array\n            let path_arr = &*(child_result as *const shm::Array);\n            let path_abs = shm::rel2abs(path_arr.data)?;\n            let path_cstr = libc::malloc(path_arr.size + 1) as *mut c_char;\n            if path_cstr.is_null() {\n                return Err(MorlocError::Other(\"Failed to allocate for @load path\".into()));\n            }\n            ptr::copy_nonoverlapping(path_abs, path_cstr as *mut u8, path_arr.size);\n            *path_cstr.add(path_arr.size) = 0;\n\n            extern \"C\" {\n                fn mlc_load(path: *const c_char, schema: *const CSchema, errmsg: *mut *mut c_char) -> *mut c_void;\n            }\n            let opt_dest = dest;\n            let inner_schema = *(*schema).parameters;\n            let inner_offset = *(*schema).offsets;\n\n            let mut err: *mut c_char = ptr::null_mut();\n            let loaded = mlc_load(path_cstr, inner_schema, &mut err);\n            libc::free(path_cstr as *mut c_void);\n\n            if loaded.is_null() {\n                if !err.is_null() {\n                    libc::free(err as *mut c_void);\n                }\n                *opt_dest = 0; // None\n            } else {\n                // Copy loaded voidstar data into the optional's inner slot\n                let inner_width = (*inner_schema).width;\n                ptr::copy_nonoverlapping(loaded as *const u8, opt_dest.add(inner_offset), inner_width);\n                libc::free(loaded as *mut c_void);\n                *opt_dest = 1; // Some\n            }\n        }\n\n        _ => {\n            return Err(MorlocError::Other(\"Illegal top expression\".into()));\n        }\n    }\n\n    Ok(dest)\n}\n\n// ── Public entry point ────���─────────────────────────────────��────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn morloc_eval(\n    expr: *mut MorlocExpression,\n    return_schema: *mut CSchema,\n    arg_voidstar: *mut *mut u8,\n    arg_schemas: *mut *mut CSchema,\n    nargs: usize,\n    errmsg: *mut *mut c_char,\n) -> AbsPtr {\n    clear_errmsg(errmsg);\n\n    let mut bndvars: BndVars = HashMap::new();\n    let new_expr: *mut MorlocExpression;\n    let mut allocated_wrappers: Vec<*mut c_void> = Vec::new();\n\n    let eval_expr = match (*expr).etype {\n        MorlocExpressionType::Lam | MorlocExpressionType::Pat => {\n            // Wrap CLI args as voidstar data expressions and apply\n            let arg_exprs = libc::calloc(nargs, std::mem::size_of::<*mut MorlocExpression>()) as *mut *mut MorlocExpression;\n            allocated_wrappers.push(arg_exprs as *mut c_void);\n\n            for i in 0..nargs {\n                let ae = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n                (*ae).etype = MorlocExpressionType::Dat;\n                (*ae).schema = *arg_schemas.add(i);\n                let ad = libc::calloc(1, std::mem::size_of::<MorlocData>()) as *mut MorlocData;\n                (*ad).is_voidstar = true;\n                (*ad).data.voidstar = *arg_voidstar.add(i) as *mut c_void;\n                (*ae).expr.data_expr = ad;\n                *arg_exprs.add(i) = ae;\n                allocated_wrappers.push(ad as *mut c_void);\n                allocated_wrappers.push(ae as *mut c_void);\n            }\n\n            let app = libc::calloc(1, std::mem::size_of::<MorlocAppExpression>()) as *mut MorlocAppExpression;\n            (*app).atype = if (*expr).etype == MorlocExpressionType::Lam {\n                (*app).function.lambda = (*expr).expr.lam_expr;\n                MorlocAppExpressionType::Lambda\n            } else {\n                (*app).function.pattern = (*expr).expr.pattern_expr;\n                MorlocAppExpressionType::Pattern\n            };\n            (*app).args = arg_exprs;\n            (*app).nargs = nargs;\n            allocated_wrappers.push(app as *mut c_void);\n\n            new_expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*new_expr).etype = MorlocExpressionType::App;\n            (*new_expr).schema = return_schema;\n            (*new_expr).expr.app_expr = app;\n            allocated_wrappers.push(new_expr as *mut c_void);\n\n            new_expr\n        }\n        _ => expr,\n    };\n\n    let result = morloc_eval_r(eval_expr, ptr::null_mut(), 0, &mut bndvars);\n\n    // Free wrapper nodes\n    for p in &allocated_wrappers {\n        libc::free(*p);\n    }\n\n    match result {\n        Ok(ptr) => ptr,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            ptr::null_mut()\n        }\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/ffi.rs",
    "content": "//! C ABI wrappers for libmorloc.so\n//!\n//! These `extern \"C\"` functions match the signatures in morloc.h.\n//! Internally they call the Rust implementations and convert between\n//! Rust Result<T,E> and the C ERRMSG convention (char** last arg).\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\n\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::schema::{self};\nuse crate::shm::{self, AbsPtr, BlockHeader, RelPtr, ShmHeader, VolPtr};\npub use crate::cschema::CSchema;\n\n// ── Macro for ERRMSG-pattern FFI wrappers ──────────────────────────────────\n\n/// Wrap a Rust Result-returning expression into the C ERRMSG convention.\n/// On success: clears errmsg, returns the value.\n/// On error: sets errmsg, returns $fail.\nmacro_rules! ffi_try {\n    ($errmsg:expr, $fail:expr, $body:expr) => {{\n        unsafe { clear_errmsg($errmsg) };\n        match $body {\n            Ok(val) => val,\n            Err(e) => {\n                unsafe { set_errmsg($errmsg, &e) };\n                $fail\n            }\n        }\n    }};\n}\n\n// CSchema type and conversions are in cschema.rs (always compiled).\n\n// ── SHM functions ──────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn shinit(\n    shm_basename: *const c_char,\n    volume_index: usize,\n    shm_size: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut ShmHeader {\n    let basename = CStr::from_ptr(shm_basename).to_string_lossy();\n    ffi_try!(errmsg, ptr::null_mut(), shm::shinit(&basename, volume_index, shm_size))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shopen(\n    volume_index: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut ShmHeader {\n    ffi_try!(\n        errmsg,\n        ptr::null_mut(),\n        shm::shopen(volume_index).and_then(|opt| opt.ok_or(MorlocError::Shm(\"volume not found\".into())))\n    )\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shclose(errmsg: *mut *mut c_char) -> bool {\n    ffi_try!(errmsg, false, shm::shclose().map(|_| true))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shm_set_fallback_dir(dir: *const c_char) {\n    if !dir.is_null() {\n        let d = CStr::from_ptr(dir).to_string_lossy();\n        shm::shm_set_fallback_dir(&d);\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shmalloc(size: usize, errmsg: *mut *mut c_char) -> *mut c_void {\n    ffi_try!(errmsg, ptr::null_mut(), shm::shmalloc(size).map(|p| p as *mut c_void))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shmemcpy(\n    src: *mut c_void,\n    size: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    ffi_try!(\n        errmsg,\n        ptr::null_mut(),\n        shm::shmemcpy(src as *const u8, size).map(|p| p as *mut c_void)\n    )\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shcalloc(\n    nmemb: usize,\n    size: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    ffi_try!(errmsg, ptr::null_mut(), shm::shcalloc(nmemb, size).map(|p| p as *mut c_void))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shrealloc(\n    ptr: *mut c_void,\n    size: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    // TODO: implement shrealloc in shm.rs\n    let _ = (ptr, size);\n    set_errmsg(errmsg, &MorlocError::Shm(\"shrealloc not yet implemented\".into()));\n    ptr::null_mut()\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shfree(ptr: *mut c_void, errmsg: *mut *mut c_char) -> bool {\n    ffi_try!(errmsg, false, shm::shfree(ptr as AbsPtr).map(|_| true))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn shincref(ptr: *mut c_void, errmsg: *mut *mut c_char) -> bool {\n    ffi_try!(errmsg, false, shm::shincref(ptr as AbsPtr).map(|_| true))\n}\n\n// shfree_by_schema is provided by cli.c\n\n#[no_mangle]\npub unsafe extern \"C\" fn total_shm_size() -> usize {\n    shm::total_shm_size()\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn rel2abs(ptr: RelPtr, errmsg: *mut *mut c_char) -> *mut c_void {\n    ffi_try!(errmsg, ptr::null_mut(), shm::rel2abs(ptr).map(|p| p as *mut c_void))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn abs2rel(ptr: *mut c_void, errmsg: *mut *mut c_char) -> RelPtr {\n    ffi_try!(errmsg, shm::RELNULL, shm::abs2rel(ptr as AbsPtr))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn abs2shm(ptr: *mut c_void, errmsg: *mut *mut c_char) -> *mut ShmHeader {\n    ffi_try!(errmsg, ptr::null_mut(), shm::abs2shm(ptr as AbsPtr))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn abs2blk(ptr: *mut c_void, errmsg: *mut *mut c_char) -> *mut BlockHeader {\n    clear_errmsg(errmsg);\n    if ptr.is_null() {\n        set_errmsg(errmsg, &MorlocError::NullPointer);\n        return ptr::null_mut();\n    }\n    let blk = (ptr as *mut u8).sub(std::mem::size_of::<BlockHeader>()) as *mut BlockHeader;\n    if (*blk).magic != shm::BLK_MAGIC {\n        set_errmsg(errmsg, &MorlocError::Shm(\"Bad block magic\".into()));\n        return ptr::null_mut();\n    }\n    blk\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn vol2rel(ptr: VolPtr, shm_ptr: *const ShmHeader) -> RelPtr {\n    shm::vol2rel(ptr, &*shm_ptr)\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn vol2abs(ptr: VolPtr, shm_ptr: *const ShmHeader) -> *mut c_void {\n    shm::vol2abs(ptr, shm_ptr) as *mut c_void\n}\n\n// ── Schema functions ───────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn parse_schema(\n    schema_str: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut CSchema {\n    clear_errmsg(errmsg);\n    if schema_str.is_null() {\n        set_errmsg(errmsg, &MorlocError::NullPointer);\n        return ptr::null_mut();\n    }\n    let s = CStr::from_ptr(schema_str).to_string_lossy();\n    match schema::parse_schema(&s) {\n        Ok(schema) => CSchema::from_rust(&schema),\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            ptr::null_mut()\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn schema_to_string(schema: *const CSchema) -> *mut c_char {\n    if schema.is_null() {\n        return ptr::null_mut();\n    }\n    let rs = CSchema::to_rust(schema);\n    let s = schema::schema_to_string(&rs);\n    match CString::new(s) {\n        Ok(cs) => cs.into_raw(),\n        Err(_) => ptr::null_mut(),\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn free_schema(schema: *mut CSchema) {\n    CSchema::free(schema);\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn schema_is_fixed_width(schema: *const CSchema) -> bool {\n    if schema.is_null() {\n        return true;\n    }\n    let rs = CSchema::to_rust(schema);\n    rs.is_fixed_width()\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn schema_alignment(schema: *const CSchema) -> usize {\n    if schema.is_null() {\n        return 1;\n    }\n    let rs = CSchema::to_rust(schema);\n    rs.alignment()\n}\n\n// Hash: morloc_xxh64 is provided by utility.c (via xxhash.h inline)\n\n// ── Serialization ──────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn pack_with_schema(\n    mlc: *const c_void,\n    schema: *const CSchema,\n    mpkptr: *mut *mut c_char,\n    mpk_size: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    *mpkptr = ptr::null_mut();\n    *mpk_size = 0;\n\n    let rs = CSchema::to_rust(schema);\n    match crate::mpack::pack_with_schema(mlc as AbsPtr, &rs) {\n        Ok(data) => {\n            *mpk_size = data.len();\n            let buf = libc::malloc(data.len()) as *mut u8;\n            if buf.is_null() {\n                set_errmsg(errmsg, &MorlocError::Shm(\"malloc failed\".into()));\n                return 1;\n            }\n            std::ptr::copy_nonoverlapping(data.as_ptr(), buf, data.len());\n            *mpkptr = buf as *mut c_char;\n            0\n        }\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            1\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn pack(\n    mlc: *const c_void,\n    schema_str: *const c_char,\n    mpkptr: *mut *mut c_char,\n    mpk_size: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    let s = CStr::from_ptr(schema_str).to_string_lossy();\n    let schema = match schema::parse_schema(&s) {\n        Ok(s) => s,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            return 1;\n        }\n    };\n    let cs = CSchema::from_rust(&schema);\n    let result = pack_with_schema(mlc, cs, mpkptr, mpk_size, errmsg);\n    free_schema(cs);\n    result\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn unpack_with_schema(\n    mpk: *const c_char,\n    mpk_size: usize,\n    schema: *const CSchema,\n    mlcptr: *mut *mut c_void,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    *mlcptr = ptr::null_mut();\n\n    let data = std::slice::from_raw_parts(mpk as *const u8, mpk_size);\n    let rs = CSchema::to_rust(schema);\n    match crate::mpack::unpack_with_schema(data, &rs) {\n        Ok(ptr) => {\n            *mlcptr = ptr as *mut c_void;\n            0\n        }\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            1\n        }\n    }\n}\n\n// quoted, print_voidstar, pretty_print_voidstar, read_json_with_schema\n// are provided by json.c\n\n// ── Schema utility functions needed by C code ──────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn calculate_voidstar_size(\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> usize {\n    clear_errmsg(errmsg);\n    if data.is_null() || schema.is_null() {\n        return 0;\n    }\n    let rs = CSchema::to_rust(schema);\n    match calc_voidstar_size_inner(data as *const u8, &rs) {\n        Ok(size) => size,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            0\n        }\n    }\n}\n\npub fn calc_voidstar_size_inner(\n    data: *const u8,\n    schema: &crate::schema::Schema,\n) -> Result<usize, MorlocError> {\n    use crate::schema::SerialType;\n    use crate::shm::{self, Array, Tensor};\n\n    // SAFETY: data points to voidstar data in SHM with layout described by schema.\n    // We only read Array/Tensor headers and follow relptrs to compute total size.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String => {\n                let arr = &*(data as *const Array);\n                Ok(std::mem::size_of::<Array>() + arr.size)\n            }\n            SerialType::Array => {\n                let arr = &*(data as *const Array);\n                let mut size = std::mem::size_of::<Array>();\n                if arr.size == 0 {\n                    return Ok(size);\n                }\n                let elem_schema = &schema.parameters[0];\n                let elem_width = elem_schema.width;\n                size += elem_schema.alignment().saturating_sub(1);\n\n                if schema.is_fixed_width() {\n                    size += elem_width * arr.size;\n                } else {\n                    let elem_data = shm::rel2abs(arr.data)?;\n                    for i in 0..arr.size {\n                        size += calc_voidstar_size_inner(\n                            elem_data.add(i * elem_width),\n                            elem_schema,\n                        )?;\n                    }\n                }\n                Ok(size)\n            }\n            SerialType::Optional => {\n                let tag = *data;\n                let mut size = schema.width;\n                if tag != 0 {\n                    let inner_offset = schema.offsets.first().copied().unwrap_or(\n                        shm::align_up(1, schema.parameters[0].alignment().max(1)),\n                    );\n                    let inner_total = calc_voidstar_size_inner(\n                        data.add(inner_offset),\n                        &schema.parameters[0],\n                    )?;\n                    if inner_total > schema.parameters[0].width {\n                        size += inner_total - schema.parameters[0].width;\n                    }\n                }\n                Ok(size)\n            }\n            SerialType::Tensor => {\n                let tensor = &*(data as *const Tensor);\n                let ndim = schema.offsets.first().copied().unwrap_or(0);\n                let elem_width = schema.parameters[0].width;\n                let mut size = std::mem::size_of::<Tensor>();\n                size += schema.parameters[0].alignment().saturating_sub(1);\n                size += ndim * std::mem::size_of::<i64>();\n                size += schema.parameters[0].alignment().saturating_sub(1);\n                size += tensor.total_elements * elem_width;\n                Ok(size)\n            }\n            SerialType::Tuple | SerialType::Map => {\n                if schema.is_fixed_width() {\n                    Ok(schema.width)\n                } else {\n                    let mut size = schema.width;\n                    for i in 0..schema.parameters.len() {\n                        let elem_total = calc_voidstar_size_inner(\n                            data.add(schema.offsets[i]),\n                            &schema.parameters[i],\n                        )?;\n                        if elem_total > schema.parameters[i].width {\n                            size += elem_total - schema.parameters[i].width;\n                        }\n                    }\n                    Ok(size)\n                }\n            }\n            _ => Ok(schema.width),\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn get_ptr(\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    clear_errmsg(errmsg);\n    if schema.is_null() {\n        return ptr::null_mut();\n    }\n    let rs = CSchema::to_rust(schema);\n    ffi_try!(errmsg, ptr::null_mut(), shm::shmalloc(rs.width).map(|p| p as *mut c_void))\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/hash.rs",
    "content": "//! xxHash64 wrapper using the twox-hash crate.\n//! Replaces the 1500-line xxhash.h header.\n\nuse std::hash::Hasher;\nuse twox_hash::XxHash64;\n\nconst DEFAULT_SEED: u64 = 0;\n\n/// Compute xxHash64 of a byte slice with the default seed.\npub fn xxh64(data: &[u8]) -> u64 {\n    let mut hasher = XxHash64::with_seed(DEFAULT_SEED);\n    hasher.write(data);\n    hasher.finish()\n}\n\n/// Compute xxHash64 with a custom seed.\npub fn xxh64_with_seed(data: &[u8], seed: u64) -> u64 {\n    let mut hasher = XxHash64::with_seed(seed);\n    hasher.write(data);\n    hasher.finish()\n}\n\n/// Mix multiple hash values together (for composite keys).\npub fn mix(a: u64, b: u64) -> u64 {\n    // Use xxHash to mix two 64-bit values\n    let mut hasher = XxHash64::with_seed(a);\n    hasher.write(&b.to_le_bytes());\n    hasher.finish()\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_xxh64_empty() {\n        let h = xxh64(b\"\");\n        assert_ne!(h, 0); // xxHash of empty with seed 0 is a known non-zero value\n    }\n\n    #[test]\n    fn test_xxh64_deterministic() {\n        let a = xxh64(b\"hello\");\n        let b = xxh64(b\"hello\");\n        assert_eq!(a, b);\n    }\n\n    #[test]\n    fn test_xxh64_different_inputs() {\n        let a = xxh64(b\"hello\");\n        let b = xxh64(b\"world\");\n        assert_ne!(a, b);\n    }\n\n    #[test]\n    fn test_mix_commutative_ish() {\n        // mix is not commutative by design\n        let ab = mix(1, 2);\n        let ba = mix(2, 1);\n        assert_ne!(ab, ba);\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/http_ffi.rs",
    "content": "//! C ABI wrappers for HTTP request/response handling.\n//! Replaces http.c.\n\nuse std::ffi::{c_char, c_void};\nuse std::ptr;\n\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\nconst HTTP_MAX_HEADERS: usize = 8192;\nconst HTTP_MAX_REQUEST: usize = 4 * 1024 * 1024;\n\n// ── C-compatible types ───────────────────────────────────────────────────────\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum HttpMethod {\n    Get = 0,\n    Post = 1,\n    Delete = 2,\n    Options = 3,\n}\n\n#[repr(C)]\npub struct HttpRequest {\n    pub method: HttpMethod,\n    pub path: [c_char; 256],\n    pub body: *mut c_char,\n    pub body_len: usize,\n}\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum DaemonMethod {\n    Call = 0,\n    Discover = 1,\n    Health = 2,\n    Eval = 3,\n    Typecheck = 4,\n    Bind = 5,\n    Bindings = 6,\n    Unbind = 7,\n}\n\n#[repr(C)]\npub struct DaemonRequest {\n    pub id: *mut c_char,\n    pub method: DaemonMethod,\n    pub command: *mut c_char,\n    pub args_json: *mut c_char,\n    pub expr: *mut c_char,\n    pub name: *mut c_char,\n}\n\n// ── http_parse_request ───────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn http_parse_request(\n    fd: i32,\n    errmsg: *mut *mut c_char,\n) -> *mut HttpRequest {\n    clear_errmsg(errmsg);\n\n    // Read headers byte by byte until \\r\\n\\r\\n\n    let mut header_buf = vec![0u8; HTTP_MAX_HEADERS];\n    let mut header_len: usize = 0;\n    let mut header_end_pos: Option<usize> = None;\n\n    while header_len < HTTP_MAX_HEADERS - 1 {\n        let n = libc::recv(fd, header_buf.as_mut_ptr().add(header_len) as *mut c_void, 1, 0);\n        if n <= 0 {\n            set_errmsg(errmsg, &MorlocError::Other(\"Connection closed while reading HTTP headers\".into()));\n            return ptr::null_mut();\n        }\n        header_len += 1;\n\n        if header_len >= 4 {\n            let tail = &header_buf[header_len - 4..header_len];\n            if tail == b\"\\r\\n\\r\\n\" {\n                header_end_pos = Some(header_len - 4);\n                break;\n            }\n        }\n    }\n\n    let header_end = match header_end_pos {\n        Some(p) => p,\n        None => {\n            set_errmsg(errmsg, &MorlocError::Other(\"HTTP headers too large or malformed\".into()));\n            return ptr::null_mut();\n        }\n    };\n\n    let header_str = std::str::from_utf8(&header_buf[..header_len]).unwrap_or(\"\");\n\n    // Allocate request\n    let req = libc::calloc(1, std::mem::size_of::<HttpRequest>()) as *mut HttpRequest;\n    if req.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate http_request_t\".into()));\n        return ptr::null_mut();\n    }\n\n    // Parse method\n    if header_str.starts_with(\"GET \") {\n        (*req).method = HttpMethod::Get;\n    } else if header_str.starts_with(\"POST \") {\n        (*req).method = HttpMethod::Post;\n    } else if header_str.starts_with(\"DELETE \") {\n        (*req).method = HttpMethod::Delete;\n    } else if header_str.starts_with(\"OPTIONS \") {\n        (*req).method = HttpMethod::Options;\n    } else {\n        libc::free(req as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Other(\"Unsupported HTTP method\".into()));\n        return ptr::null_mut();\n    }\n\n    // Parse path\n    let first_space = header_str.find(' ').unwrap_or(0) + 1;\n    let path_end = header_str[first_space..].find(' ').map(|p| first_space + p).unwrap_or(first_space);\n    let path = &header_str[first_space..path_end];\n    // Strip query string\n    let path = path.split('?').next().unwrap_or(path);\n    let path_len = path.len().min(255);\n    ptr::copy_nonoverlapping(path.as_ptr(), (*req).path.as_mut_ptr() as *mut u8, path_len);\n    (*req).path[path_len] = 0;\n\n    // Find Content-Length\n    let mut content_length: usize = 0;\n    let header_lower = header_str.to_ascii_lowercase();\n    if let Some(pos) = header_lower.find(\"content-length:\") {\n        let after = &header_str[pos + 15..];\n        let trimmed = after.trim_start();\n        if let Some(end) = trimmed.find(|c: char| !c.is_ascii_digit()) {\n            content_length = trimmed[..end].parse().unwrap_or(0);\n        } else {\n            content_length = trimmed.parse().unwrap_or(0);\n        }\n    }\n\n    // Read body\n    if content_length > 0 {\n        if content_length > HTTP_MAX_REQUEST {\n            libc::free(req as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(format!(\"HTTP body too large: {} bytes\", content_length)));\n            return ptr::null_mut();\n        }\n\n        let body = libc::malloc(content_length + 1) as *mut u8;\n        if body.is_null() {\n            libc::free(req as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate HTTP body buffer\".into()));\n            return ptr::null_mut();\n        }\n\n        // Some body bytes may be in header_buf after \\r\\n\\r\\n\n        let after_headers = header_end + 4;\n        let already_read = (header_len - after_headers).min(content_length);\n        if already_read > 0 {\n            ptr::copy_nonoverlapping(header_buf.as_ptr().add(after_headers), body, already_read);\n        }\n\n        let mut total = already_read;\n        while total < content_length {\n            let n = libc::recv(fd, body.add(total) as *mut c_void, content_length - total, 0);\n            if n <= 0 {\n                libc::free(body as *mut c_void);\n                libc::free(req as *mut c_void);\n                set_errmsg(errmsg, &MorlocError::Other(\"Connection closed while reading HTTP body\".into()));\n                return ptr::null_mut();\n            }\n            total += n as usize;\n        }\n        *body.add(content_length) = 0;\n        (*req).body = body as *mut c_char;\n        (*req).body_len = content_length;\n    }\n\n    req\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn http_free_request(req: *mut HttpRequest) {\n    if req.is_null() { return; }\n    if !(*req).body.is_null() {\n        libc::free((*req).body as *mut c_void);\n    }\n    libc::free(req as *mut c_void);\n}\n\n// ── http_write_response ──────────────────────────────────────────────────────\n\nfn http_status_text(status: i32) -> &'static str {\n    match status {\n        200 => \"OK\",\n        400 => \"Bad Request\",\n        404 => \"Not Found\",\n        405 => \"Method Not Allowed\",\n        500 => \"Internal Server Error\",\n        _ => \"Unknown\",\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn http_write_response(\n    fd: i32,\n    status: i32,\n    content_type: *const c_char,\n    body: *const c_char,\n    body_len: usize,\n) -> bool {\n    let ct = if content_type.is_null() {\n        \"application/json\"\n    } else {\n        std::ffi::CStr::from_ptr(content_type).to_str().unwrap_or(\"application/json\")\n    };\n\n    let header = format!(\n        \"HTTP/1.1 {} {}\\r\\n\\\n         Content-Type: {}\\r\\n\\\n         Content-Length: {}\\r\\n\\\n         Connection: close\\r\\n\\\n         Access-Control-Allow-Origin: *\\r\\n\\\n         Access-Control-Allow-Methods: GET, POST, OPTIONS\\r\\n\\\n         Access-Control-Allow-Headers: Content-Type\\r\\n\\\n         \\r\\n\",\n        status, http_status_text(status), ct, body_len\n    );\n\n    let n = libc::send(fd, header.as_ptr() as *const c_void, header.len(), crate::utility::SEND_NOSIGNAL);\n    if n < 0 { return false; }\n\n    if !body.is_null() && body_len > 0 {\n        let mut total: usize = 0;\n        while total < body_len {\n            let n = libc::send(fd, (body as *const u8).add(total) as *const c_void, body_len - total, crate::utility::SEND_NOSIGNAL);\n            if n <= 0 { return false; }\n            total += n as usize;\n        }\n    }\n\n    true\n}\n\n// ── http_to_daemon_request ───────────────────────────────────────────────────\n\n/// Extract a JSON string value after a key like \"expr\": \"...\"\nfn extract_json_string(body: &str, key: &str) -> Option<String> {\n    let search = format!(\"\\\"{}\\\"\", key);\n    let pos = body.find(&search)?;\n    let after = &body[pos + search.len()..];\n    let after = after.trim_start();\n    let after = after.strip_prefix(':')?;\n    let after = after.trim_start();\n    if !after.starts_with('\"') { return None; }\n    let after = &after[1..]; // skip opening quote\n    let mut result = String::new();\n    let mut chars = after.chars();\n    loop {\n        match chars.next() {\n            Some('\\\\') => {\n                if let Some(c) = chars.next() {\n                    result.push(c);\n                }\n            }\n            Some('\"') => break,\n            Some(c) => result.push(c),\n            None => break,\n        }\n    }\n    Some(result)\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn http_to_daemon_request(\n    req: *mut HttpRequest,\n    errmsg: *mut *mut c_char,\n) -> *mut DaemonRequest {\n    clear_errmsg(errmsg);\n\n    let dreq = libc::calloc(1, std::mem::size_of::<DaemonRequest>()) as *mut DaemonRequest;\n    if dreq.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to allocate daemon_request_t\".into()));\n        return ptr::null_mut();\n    }\n\n    let path = std::ffi::CStr::from_ptr((*req).path.as_ptr())\n        .to_str().unwrap_or(\"\");\n    let method = (*req).method;\n\n    let body_str = if !(*req).body.is_null() && (*req).body_len > 0 {\n        std::str::from_utf8(std::slice::from_raw_parts((*req).body as *const u8, (*req).body_len))\n            .unwrap_or(\"\")\n    } else {\n        \"\"\n    };\n\n    // GET /health\n    if method == HttpMethod::Get && path == \"/health\" {\n        (*dreq).method = DaemonMethod::Health;\n        return dreq;\n    }\n\n    // GET /discover\n    if method == HttpMethod::Get && path == \"/discover\" {\n        (*dreq).method = DaemonMethod::Discover;\n        return dreq;\n    }\n\n    // POST /eval\n    if method == HttpMethod::Post && path == \"/eval\" {\n        (*dreq).method = DaemonMethod::Eval;\n        if let Some(expr) = extract_json_string(body_str, \"expr\") {\n            (*dreq).expr = libc::strdup(expr.as_ptr() as *const c_char);\n            // strdup from Rust string - need null terminated\n            let c = std::ffi::CString::new(expr).unwrap_or_default();\n            (*dreq).expr = libc::strdup(c.as_ptr());\n        }\n        if (*dreq).expr.is_null() {\n            libc::free(dreq as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"Missing 'expr' field in /eval request body\".into()));\n            return ptr::null_mut();\n        }\n        return dreq;\n    }\n\n    // POST /typecheck\n    if method == HttpMethod::Post && path == \"/typecheck\" {\n        (*dreq).method = DaemonMethod::Typecheck;\n        if let Some(expr) = extract_json_string(body_str, \"expr\") {\n            let c = std::ffi::CString::new(expr).unwrap_or_default();\n            (*dreq).expr = libc::strdup(c.as_ptr());\n        }\n        if (*dreq).expr.is_null() {\n            libc::free(dreq as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"Missing 'expr' field in /typecheck request body\".into()));\n            return ptr::null_mut();\n        }\n        return dreq;\n    }\n\n    // POST /bind\n    if method == HttpMethod::Post && path == \"/bind\" {\n        (*dreq).method = DaemonMethod::Bind;\n        if let Some(expr) = extract_json_string(body_str, \"expr\") {\n            let c = std::ffi::CString::new(expr).unwrap_or_default();\n            (*dreq).expr = libc::strdup(c.as_ptr());\n        }\n        if let Some(name) = extract_json_string(body_str, \"name\") {\n            let c = std::ffi::CString::new(name).unwrap_or_default();\n            (*dreq).name = libc::strdup(c.as_ptr());\n        }\n        if (*dreq).expr.is_null() {\n            libc::free(dreq as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"Missing 'expr' field in /bind request body\".into()));\n            return ptr::null_mut();\n        }\n        return dreq;\n    }\n\n    // GET /bindings\n    if method == HttpMethod::Get && path == \"/bindings\" {\n        (*dreq).method = DaemonMethod::Bindings;\n        return dreq;\n    }\n\n    // DELETE /bindings/<name>\n    if method == HttpMethod::Delete && path.starts_with(\"/bindings/\") {\n        let name = &path[10..];\n        if name.is_empty() {\n            libc::free(dreq as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"Missing binding name in /bindings/ path\".into()));\n            return ptr::null_mut();\n        }\n        (*dreq).method = DaemonMethod::Unbind;\n        let c = std::ffi::CString::new(name).unwrap_or_default();\n        (*dreq).name = libc::strdup(c.as_ptr());\n        return dreq;\n    }\n\n    // POST /call/<command>\n    if method == HttpMethod::Post && path.starts_with(\"/call/\") {\n        let cmd_name = &path[6..];\n        if cmd_name.is_empty() {\n            libc::free(dreq as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"Missing command name in /call/ path\".into()));\n            return ptr::null_mut();\n        }\n        (*dreq).method = DaemonMethod::Call;\n        let c = std::ffi::CString::new(cmd_name).unwrap_or_default();\n        (*dreq).command = libc::strdup(c.as_ptr());\n\n        // Parse body\n        let trimmed = body_str.trim();\n        if trimmed.starts_with('[') {\n            let c = std::ffi::CString::new(trimmed).unwrap_or_default();\n            (*dreq).args_json = libc::strdup(c.as_ptr());\n        } else if trimmed.starts_with('{') {\n            // Extract \"args\" array\n            if let Some(args_pos) = trimmed.find(\"\\\"args\\\"\") {\n                let after = &trimmed[args_pos + 6..];\n                let after = after.trim_start().strip_prefix(':').unwrap_or(after).trim_start();\n                if after.starts_with('[') {\n                    // Find matching ]\n                    let mut depth = 0i32;\n                    let mut in_string = false;\n                    let mut end = 0;\n                    for (i, ch) in after.chars().enumerate() {\n                        if in_string {\n                            if ch == '\\\\' { continue; }\n                            if ch == '\"' { in_string = false; }\n                        } else {\n                            if ch == '\"' { in_string = true; }\n                            else if ch == '[' { depth += 1; }\n                            else if ch == ']' { depth -= 1; if depth == 0 { end = i + 1; break; } }\n                        }\n                    }\n                    if end > 0 {\n                        let arr = &after[..end];\n                        let c = std::ffi::CString::new(arr).unwrap_or_default();\n                        (*dreq).args_json = libc::strdup(c.as_ptr());\n                    }\n                }\n            }\n        }\n        return dreq;\n    }\n\n    // OPTIONS (CORS preflight)\n    if method == HttpMethod::Options {\n        (*dreq).method = DaemonMethod::Health;\n        return dreq;\n    }\n\n    libc::free(dreq as *mut c_void);\n    let method_str = match method {\n        HttpMethod::Get => \"GET\",\n        HttpMethod::Post => \"POST\",\n        HttpMethod::Delete => \"DELETE\",\n        HttpMethod::Options => \"OPTIONS\",\n    };\n    set_errmsg(errmsg, &MorlocError::Other(format!(\"Unknown HTTP endpoint: {} {}\", method_str, path)));\n    ptr::null_mut()\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/intrinsics.rs",
    "content": "//! Intrinsic functions for morloc: save/load/show/read/hash.\n//! Replaces intrinsics.c. These are thin wrappers around serialization functions.\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\n// ── mlc_save: serialize to msgpack file ────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_save(\n    data: *const c_void,\n    schema: *const CSchema,\n    path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn pack_with_schema(\n            mlc: *const c_void, schema: *const CSchema,\n            mpk: *mut *mut c_char, mpk_size: *mut usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n        fn write_atomic(\n            filename: *const c_char, data: *const u8, size: usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let mut mpk: *mut c_char = ptr::null_mut();\n    let mut mpk_size: usize = 0;\n\n    let rc = pack_with_schema(data, schema, &mut mpk, &mut mpk_size, &mut err);\n    if rc != 0 {\n        *errmsg = err;\n        return 1;\n    }\n\n    let wrc = write_atomic(path, mpk as *const u8, mpk_size, &mut err);\n    libc::free(mpk as *mut c_void);\n    if wrc != 0 {\n        *errmsg = err;\n        return 1;\n    }\n    0\n}\n\n// ── mlc_save_json: serialize to JSON file ──────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_save_json(\n    data: *const c_void,\n    schema: *const CSchema,\n    path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn voidstar_to_json_string(\n            data: *const c_void, schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut c_char;\n        fn write_atomic(\n            filename: *const c_char, data: *const u8, size: usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let json = voidstar_to_json_string(data, schema, &mut err);\n    if json.is_null() {\n        *errmsg = err;\n        return 1;\n    }\n\n    let json_len = libc::strlen(json);\n    let wrc = write_atomic(path, json as *const u8, json_len, &mut err);\n    libc::free(json as *mut c_void);\n    if wrc != 0 {\n        *errmsg = err;\n        return 1;\n    }\n    0\n}\n\n// ── mlc_save_voidstar: serialize to binary voidstar file ───────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_save_voidstar(\n    data: *const c_void,\n    schema: *const CSchema,\n    path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn flatten_voidstar_to_buffer(\n            data: *const c_void, schema: *const CSchema,\n            out_buf: *mut *mut u8, out_size: *mut usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n        fn write_binary_fd(\n            fd: i32, buf: *const c_char, count: usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n\n    // Get directory for temp file\n    let path_str = CStr::from_ptr(path).to_string_lossy();\n    let parent = std::path::Path::new(path_str.as_ref()).parent();\n    let dir = match parent {\n        Some(p) if !p.as_os_str().is_empty() => p.to_string_lossy().into_owned(),\n        _ => \".\".to_string(),\n    };\n\n    let tmp_template = format!(\"{}/morloc-tmp_XXXXXX\\0\", dir);\n    let mut tmp_buf: Vec<u8> = tmp_template.into_bytes();\n    let fd = libc::mkstemp(tmp_buf.as_mut_ptr() as *mut c_char);\n    if fd < 0 {\n        set_errmsg(errmsg, &MorlocError::Io(std::io::Error::last_os_error()));\n        return 1;\n    }\n\n    // Write packet header placeholder\n    let header_size = std::mem::size_of::<crate::packet::PacketHeader>();\n    let zeros = vec![0u8; header_size];\n    if write_binary_fd(fd, zeros.as_ptr() as *const c_char, header_size, &mut err) != 0 {\n        libc::close(fd);\n        libc::unlink(tmp_buf.as_ptr() as *const c_char);\n        *errmsg = err;\n        return 1;\n    }\n\n    // Flatten voidstar\n    let mut blob: *mut u8 = ptr::null_mut();\n    let mut blob_size: usize = 0;\n    if flatten_voidstar_to_buffer(data, schema, &mut blob, &mut blob_size, &mut err) != 0 {\n        libc::close(fd);\n        libc::unlink(tmp_buf.as_ptr() as *const c_char);\n        *errmsg = err;\n        return 1;\n    }\n\n    // Write flattened data\n    if write_binary_fd(fd, blob as *const c_char, blob_size, &mut err) != 0 {\n        libc::free(blob as *mut c_void);\n        libc::close(fd);\n        libc::unlink(tmp_buf.as_ptr() as *const c_char);\n        *errmsg = err;\n        return 1;\n    }\n    libc::free(blob as *mut c_void);\n\n    // Seek back and write real header\n    libc::lseek(fd, 0, libc::SEEK_SET);\n    let header = crate::packet::PacketHeader::data_mesg(\n        crate::packet::PACKET_FORMAT_VOIDSTAR,\n        blob_size as u64,\n    );\n    let hdr_bytes = header.to_bytes();\n    write_binary_fd(fd, hdr_bytes.as_ptr() as *const c_char, hdr_bytes.len(), &mut err);\n\n    libc::fsync(fd);\n    libc::close(fd);\n\n    // Atomic rename\n    if libc::rename(tmp_buf.as_ptr() as *const c_char, path) != 0 {\n        libc::unlink(tmp_buf.as_ptr() as *const c_char);\n        set_errmsg(errmsg, &MorlocError::Io(std::io::Error::last_os_error()));\n        return 1;\n    }\n\n    0\n}\n\n// ── mlc_load: load from file (auto-detect format) ─────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_load(\n    path: *const c_char,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn file_exists(filename: *const c_char) -> bool;\n        fn read_binary_file(\n            filename: *const c_char, file_size: *mut usize,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n        fn load_morloc_data_file(\n            path: *const c_char, data: *mut u8, data_size: usize,\n            schema: *const CSchema, errmsg: *mut *mut c_char,\n        ) -> *mut c_void;\n    }\n\n    if !file_exists(path) {\n        return ptr::null_mut();\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let mut file_size: usize = 0;\n    let data = read_binary_file(path, &mut file_size, &mut err);\n    if data.is_null() {\n        if !err.is_null() {\n            let path_str = CStr::from_ptr(path).to_string_lossy();\n            let err_str = CStr::from_ptr(err).to_string_lossy();\n            eprintln!(\"@load warning ({}): {}\", path_str, err_str);\n            libc::free(err as *mut libc::c_void);\n        }\n        return ptr::null_mut();\n    }\n\n    let result = load_morloc_data_file(path, data, file_size, schema, &mut err);\n    if result.is_null() && !err.is_null() {\n        let path_str = CStr::from_ptr(path).to_string_lossy();\n        let err_str = CStr::from_ptr(err).to_string_lossy();\n        eprintln!(\"@load warning ({}): {}\", path_str, err_str);\n        libc::free(err as *mut libc::c_void);\n    }\n    result\n}\n\n// ── mlc_hash: hash voidstar data ───────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_hash(\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let hash = crate::cache::hash_voidstar(data, schema, 0, &mut err);\n    if !err.is_null() {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    let hex = format!(\"{:016x}\", hash);\n    match CString::new(hex) {\n        Ok(cs) => cs.into_raw(),\n        Err(_) => {\n            set_errmsg(errmsg, &MorlocError::Other(\"CString error\".into()));\n            ptr::null_mut()\n        }\n    }\n}\n\n// ── mlc_show: serialize to JSON string ─────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_show(\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn voidstar_to_json_string(\n            data: *const c_void, schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut c_char;\n    }\n\n    voidstar_to_json_string(data, schema, errmsg)\n}\n\n// ── mlc_read: deserialize from JSON string ─────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn mlc_read(\n    json_str: *const c_char,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_void {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn read_json_with_schema(\n            dest: *mut u8, json: *mut c_char, schema: *const CSchema,\n            errmsg: *mut *mut c_char,\n        ) -> *mut u8;\n    }\n\n    let json_copy = libc::strdup(json_str);\n    if json_copy.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"strdup failed\".into()));\n        return ptr::null_mut();\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let result = read_json_with_schema(ptr::null_mut(), json_copy, schema, &mut err);\n    libc::free(json_copy as *mut c_void);\n    if result.is_null() {\n        if !err.is_null() {\n            libc::free(err as *mut c_void);\n        }\n    }\n    result as *mut c_void\n}\n\n// write_voidstar_binary is provided by packet.c (still C)\n// It will be ported when packet.c is ported to Rust.\n\n// Unused Rust implementation kept for future use\n#[allow(dead_code)]\nunsafe fn _write_voidstar_binary_rust(\n    fd: i32,\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> isize {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn flatten_voidstar_to_buffer(\n            data: *const c_void, schema: *const CSchema,\n            out_buf: *mut *mut u8, out_size: *mut usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n        fn write_binary_fd(\n            fd: i32, buf: *const c_char, count: usize,\n            errmsg: *mut *mut c_char,\n        ) -> i32;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let mut blob: *mut u8 = ptr::null_mut();\n    let mut blob_size: usize = 0;\n\n    if flatten_voidstar_to_buffer(data, schema, &mut blob, &mut blob_size, &mut err) != 0 {\n        *errmsg = err;\n        return -1;\n    }\n\n    if write_binary_fd(fd, blob as *const c_char, blob_size, &mut err) != 0 {\n        libc::free(blob as *mut c_void);\n        *errmsg = err;\n        return -1;\n    }\n\n    libc::free(blob as *mut c_void);\n    blob_size as isize\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/ipc.rs",
    "content": "//! Unix domain socket IPC for communication between nexus and language pools.\n//! Replaces ipc.c.\n\nuse crate::error::MorlocError;\nuse crate::packet::PacketHeader;\nuse std::io::{Read, Write};\nuse std::os::unix::net::UnixStream;\nuse std::path::Path;\n\n/// Send a packet (header + payload) over a Unix stream socket and receive the response.\npub fn send_and_receive(\n    socket_path: &Path,\n    header: &PacketHeader,\n    payload: &[u8],\n) -> Result<(PacketHeader, Vec<u8>), MorlocError> {\n    let mut stream = UnixStream::connect(socket_path).map_err(|e| {\n        MorlocError::Ipc(format!(\n            \"failed to connect to {}: {e}\",\n            socket_path.display()\n        ))\n    })?;\n\n    // Send header\n    let header_bytes = header.to_bytes();\n    stream\n        .write_all(&header_bytes)\n        .map_err(|e| MorlocError::Ipc(format!(\"failed to send header: {e}\")))?;\n\n    // Send payload\n    if !payload.is_empty() {\n        stream\n            .write_all(payload)\n            .map_err(|e| MorlocError::Ipc(format!(\"failed to send payload: {e}\")))?;\n    }\n\n    // Read response header\n    let mut resp_header_bytes = [0u8; 32];\n    stream\n        .read_exact(&mut resp_header_bytes)\n        .map_err(|e| MorlocError::Ipc(format!(\"failed to read response header: {e}\")))?;\n\n    let resp_header = PacketHeader::from_bytes(&resp_header_bytes)?;\n\n    // Read response payload\n    let payload_len = resp_header.length as usize;\n    let mut resp_payload = vec![0u8; payload_len];\n    if payload_len > 0 {\n        stream\n            .read_exact(&mut resp_payload)\n            .map_err(|e| MorlocError::Ipc(format!(\"failed to read response payload: {e}\")))?;\n    }\n\n    Ok((resp_header, resp_payload))\n}\n\n/// Read a single packet from a connected stream.\npub fn read_packet(stream: &mut UnixStream) -> Result<(PacketHeader, Vec<u8>), MorlocError> {\n    let mut header_bytes = [0u8; 32];\n    stream\n        .read_exact(&mut header_bytes)\n        .map_err(|e| MorlocError::Ipc(format!(\"failed to read packet header: {e}\")))?;\n\n    let header = PacketHeader::from_bytes(&header_bytes)?;\n\n    // Skip metadata between header and payload\n    let skip = header.offset as usize - 32;\n    if skip > 0 {\n        let mut discard = vec![0u8; skip];\n        stream\n            .read_exact(&mut discard)\n            .map_err(|e| MorlocError::Ipc(format!(\"failed to skip metadata: {e}\")))?;\n    }\n\n    let payload_len = header.length as usize;\n    let mut payload = vec![0u8; payload_len];\n    if payload_len > 0 {\n        stream\n            .read_exact(&mut payload)\n            .map_err(|e| MorlocError::Ipc(format!(\"failed to read payload: {e}\")))?;\n    }\n\n    Ok((header, payload))\n}\n\n/// Send a packet over a connected stream.\npub fn send_packet(\n    stream: &mut UnixStream,\n    header: &PacketHeader,\n    payload: &[u8],\n) -> Result<(), MorlocError> {\n    let header_bytes = header.to_bytes();\n    stream\n        .write_all(&header_bytes)\n        .map_err(|e| MorlocError::Ipc(format!(\"failed to send header: {e}\")))?;\n    if !payload.is_empty() {\n        stream\n            .write_all(payload)\n            .map_err(|e| MorlocError::Ipc(format!(\"failed to send payload: {e}\")))?;\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/ipc_ffi.rs",
    "content": "//! C ABI wrappers for IPC functions.\n//! Replaces ipc.c with Rust implementations of Unix domain socket operations.\n\nuse std::ffi::{c_char, c_void, CStr};\nuse std::ptr;\n\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\n// ── C types from call.h ──────────────────────────────────────────────────────\n\n#[repr(C)]\npub struct ClientList {\n    pub fd: i32,\n    pub next: *mut ClientList,\n}\n\n// language_daemon_t has fd_set which is 128 bytes on Linux.\n// We represent it as an opaque struct and use libc calls.\n#[repr(C)]\npub struct LanguageDaemon {\n    pub socket_path: *mut c_char,\n    pub tmpdir: *mut c_char,\n    pub shm_basename: *mut c_char,\n    pub shm: *mut crate::shm::ShmHeader,\n    pub shm_default_size: usize,\n    pub server_fd: i32,\n    pub read_fds: libc::fd_set,\n    pub client_fds: *mut ClientList,\n}\n\nconst BUFFER_SIZE: usize = 65536;\n\n// ── close_socket / close_daemon ──────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn close_socket(socket_id: i32) {\n    if socket_id >= 0 {\n        libc::close(socket_id);\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn close_daemon(daemon_ptr: *mut *mut LanguageDaemon) {\n    if daemon_ptr.is_null() || (*daemon_ptr).is_null() {\n        return;\n    }\n    let daemon = *daemon_ptr;\n\n    close_socket((*daemon).server_fd);\n\n    // Free client list\n    let mut current = (*daemon).client_fds;\n    while !current.is_null() {\n        let next = (*current).next;\n        libc::close((*current).fd);\n        libc::free(current as *mut c_void);\n        current = next;\n    }\n\n    if !(*daemon).socket_path.is_null() {\n        libc::unlink((*daemon).socket_path);\n        libc::free((*daemon).socket_path as *mut c_void);\n    }\n    if !(*daemon).tmpdir.is_null() {\n        libc::free((*daemon).tmpdir as *mut c_void);\n    }\n    if !(*daemon).shm_basename.is_null() {\n        libc::free((*daemon).shm_basename as *mut c_void);\n    }\n\n    libc::free(daemon as *mut c_void);\n    *daemon_ptr = ptr::null_mut();\n}\n\n// ── Socket helpers ───────────────────────────────────────────────────────────\n\nunsafe fn new_socket(errmsg: *mut *mut c_char) -> i32 {\n    clear_errmsg(errmsg);\n    let fd = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n    if fd < 0 {\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Error creating socket\".into()));\n        return -1;\n    }\n    crate::utility::set_nosigpipe(fd);\n    fd\n}\n\nunsafe fn new_server_addr(socket_path: *const c_char) -> libc::sockaddr_un {\n    let mut addr: libc::sockaddr_un = std::mem::zeroed();\n    addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n    let path_bytes = CStr::from_ptr(socket_path).to_bytes();\n    let copy_len = path_bytes.len().min(addr.sun_path.len() - 1);\n    ptr::copy_nonoverlapping(\n        path_bytes.as_ptr() as *const c_char,\n        addr.sun_path.as_mut_ptr(),\n        copy_len,\n    );\n    addr\n}\n\nunsafe fn new_server(socket_path: *const c_char, errmsg: *mut *mut c_char) -> i32 {\n    let server_fd = new_socket(errmsg);\n    if server_fd < 0 {\n        return -1;\n    }\n\n    let addr = new_server_addr(socket_path);\n\n    // Remove any existing socket file\n    libc::unlink(socket_path);\n\n    if libc::bind(server_fd, &addr as *const libc::sockaddr_un as *const libc::sockaddr,\n                  std::mem::size_of::<libc::sockaddr_un>() as u32) < 0 {\n        close_socket(server_fd);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Error binding socket\".into()));\n        return -1;\n    }\n\n    if libc::listen(server_fd, 16) < 0 {\n        close_socket(server_fd);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Error listening on socket\".into()));\n        return -1;\n    }\n\n    server_fd\n}\n\n// ── start_daemon ─────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn start_daemon(\n    socket_path: *const c_char,\n    tmpdir: *const c_char,\n    shm_basename: *const c_char,\n    shm_default_size: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut LanguageDaemon {\n    clear_errmsg(errmsg);\n\n    let daemon = libc::calloc(1, std::mem::size_of::<LanguageDaemon>()) as *mut LanguageDaemon;\n    if daemon.is_null() {\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Calloc for language_daemon_t failed\".into()));\n        return ptr::null_mut();\n    }\n\n    (*daemon).server_fd = -1;\n    (*daemon).socket_path = libc::strdup(socket_path);\n    (*daemon).tmpdir = libc::strdup(tmpdir);\n    (*daemon).shm_basename = libc::strdup(shm_basename);\n\n    if (*daemon).socket_path.is_null() || (*daemon).tmpdir.is_null() || (*daemon).shm_basename.is_null() {\n        close_daemon(&mut (daemon as *mut LanguageDaemon));\n        set_errmsg(errmsg, &MorlocError::Ipc(\"strdup failed in start_daemon\".into()));\n        return ptr::null_mut();\n    }\n\n    (*daemon).shm_default_size = shm_default_size;\n    (*daemon).client_fds = ptr::null_mut();\n    libc::FD_ZERO(&mut (*daemon).read_fds);\n\n    // Set fallback dir for file-backed SHM\n    crate::shm::shm_set_fallback_dir(&CStr::from_ptr(tmpdir).to_string_lossy());\n\n    // Init shared memory\n    let mut err: *mut c_char = ptr::null_mut();\n    let shm = crate::ffi::shinit(shm_basename, 0, shm_default_size, &mut err);\n    if !err.is_null() {\n        close_daemon(&mut (daemon as *mut LanguageDaemon));\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n    (*daemon).shm = shm;\n\n    // Create server socket\n    (*daemon).server_fd = new_server(socket_path, &mut err);\n    if !err.is_null() {\n        close_daemon(&mut (daemon as *mut LanguageDaemon));\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Set non-blocking mode\n    let flags = libc::fcntl((*daemon).server_fd, libc::F_GETFL);\n    if flags == -1 || libc::fcntl((*daemon).server_fd, libc::F_SETFL, flags | libc::O_NONBLOCK) == -1 {\n        let errno_msg = std::ffi::CStr::from_ptr(libc::strerror(crate::utility::errno_val()))\n            .to_string_lossy().into_owned();\n        close_daemon(&mut (daemon as *mut LanguageDaemon));\n        set_errmsg(errmsg, &MorlocError::Ipc(format!(\"Failed to set non-blocking mode: {}\", errno_msg)));\n        return ptr::null_mut();\n    }\n\n    daemon\n}\n\n// ── stream_from_client_wait ──────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn stream_from_client_wait(\n    client_fd: i32,\n    pselect_timeout_us: i32,\n    recv_timeout_us: i32,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n\n    if libc::fcntl(client_fd, libc::F_GETFD) == -1 {\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Invalid file descriptor\".into()));\n        return ptr::null_mut();\n    }\n\n    let buffer = libc::calloc(BUFFER_SIZE, 1) as *mut u8;\n    if buffer.is_null() {\n        set_errmsg(errmsg, &MorlocError::Ipc(\"calloc failed for buffer\".into()));\n        return ptr::null_mut();\n    }\n\n    let mut read_fds: libc::fd_set = std::mem::zeroed();\n    let max_fd = client_fd;\n\n    // Timeout setup\n    let mut ts_loop: libc::timespec = std::mem::zeroed();\n    let timeout_ptr = if pselect_timeout_us > 0 {\n        ts_loop.tv_sec = (pselect_timeout_us / 1000000) as i64;\n        ts_loop.tv_nsec = ((pselect_timeout_us % 1000000) * 1000) as i64;\n        &ts_loop as *const libc::timespec\n    } else {\n        ptr::null()\n    };\n\n    // Signal mask setup\n    let mut mask: libc::sigset_t = std::mem::zeroed();\n    let mut origmask: libc::sigset_t = std::mem::zeroed();\n    libc::sigemptyset(&mut mask);\n    libc::sigaddset(&mut mask, libc::SIGINT);\n    libc::pthread_sigmask(libc::SIG_SETMASK, &mask, &mut origmask);\n\n    // Initial receive with timeout\n    let mut ready;\n    loop {\n        libc::FD_ZERO(&mut read_fds);\n        libc::FD_SET(client_fd, &mut read_fds);\n        ready = libc::pselect(max_fd + 1, &mut read_fds, ptr::null_mut(), ptr::null_mut(), timeout_ptr, &origmask);\n        if !(ready < 0 && crate::utility::errno_val() == libc::EINTR) {\n            break;\n        }\n    }\n    libc::pthread_sigmask(libc::SIG_SETMASK, &origmask, ptr::null_mut());\n\n    if ready == 0 {\n        libc::free(buffer as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Timeout waiting for initial data\".into()));\n        return ptr::null_mut();\n    }\n    if ready < 0 {\n        libc::free(buffer as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"pselect error\".into()));\n        return ptr::null_mut();\n    }\n    if !libc::FD_ISSET(client_fd, &read_fds) {\n        libc::free(buffer as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Bad client file descriptor\".into()));\n        return ptr::null_mut();\n    }\n\n    let recv_length = libc::recv(client_fd, buffer as *mut c_void, BUFFER_SIZE, 0);\n    if recv_length == 0 {\n        libc::free(buffer as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Connection closed by peer\".into()));\n        return ptr::null_mut();\n    }\n    if recv_length < 0 && crate::utility::errno_val() != libc::EWOULDBLOCK && crate::utility::errno_val() != libc::EAGAIN {\n        libc::free(buffer as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"Recv error\".into()));\n        return ptr::null_mut();\n    }\n\n    // Get packet size from header\n    let mut packet_err: *mut c_char = ptr::null_mut();\n    let packet_length = crate::packet_ffi::morloc_packet_size(buffer, &mut packet_err);\n    if !packet_err.is_null() {\n        libc::free(buffer as *mut c_void);\n        *errmsg = packet_err;\n        return ptr::null_mut();\n    }\n\n    let result = libc::calloc(packet_length, 1) as *mut u8;\n    if result.is_null() {\n        libc::free(buffer as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Ipc(\"calloc failure\".into()));\n        return ptr::null_mut();\n    }\n\n    let copy_length = (recv_length as usize).min(packet_length);\n    ptr::copy_nonoverlapping(buffer, result, copy_length);\n    let mut data_ptr = result.add(copy_length);\n    libc::free(buffer as *mut c_void);\n\n    let attempts = 10;\n    while (data_ptr as usize - result as usize) < packet_length {\n        let mut packet_received = false;\n        for attempt in 0..attempts {\n            libc::FD_ZERO(&mut read_fds);\n            libc::FD_SET(client_fd, &mut read_fds);\n\n            let recv_timeout_ptr = if recv_timeout_us > 0 {\n                let total_us = recv_timeout_us as i64 * (attempt as i64 + 1);\n                ts_loop.tv_sec = total_us / 1000000;\n                ts_loop.tv_nsec = (total_us % 1000000) * 1000;\n                &ts_loop as *const libc::timespec\n            } else {\n                ptr::null()\n            };\n\n            libc::pthread_sigmask(libc::SIG_SETMASK, &mask, ptr::null_mut());\n            ready = libc::pselect(max_fd + 1, &mut read_fds, ptr::null_mut(), ptr::null_mut(), recv_timeout_ptr, &origmask);\n            libc::pthread_sigmask(libc::SIG_SETMASK, &origmask, ptr::null_mut());\n\n            if ready == 0 {\n                libc::free(result as *mut c_void);\n                set_errmsg(errmsg, &MorlocError::Ipc(\"Timeout waiting for remaining data\".into()));\n                return ptr::null_mut();\n            }\n            if ready < 0 && crate::utility::errno_val() != libc::EINTR {\n                libc::free(result as *mut c_void);\n                set_errmsg(errmsg, &MorlocError::Ipc(\"pselect error\".into()));\n                return ptr::null_mut();\n            }\n            if ready <= 0 { continue; }\n\n            if libc::FD_ISSET(client_fd, &read_fds) {\n                let remaining = packet_length - (data_ptr as usize - result as usize);\n                let recv_size = remaining.min(BUFFER_SIZE);\n                let n = libc::recv(client_fd, data_ptr as *mut c_void, recv_size, 0);\n                if n > 0 {\n                    data_ptr = data_ptr.add(n as usize);\n                    packet_received = true;\n                    break;\n                }\n                if n == 0 {\n                    libc::free(result as *mut c_void);\n                    set_errmsg(errmsg, &MorlocError::Ipc(\"Connection closed early\".into()));\n                    return ptr::null_mut();\n                }\n                if n < 0 && crate::utility::errno_val() != libc::EWOULDBLOCK && crate::utility::errno_val() != libc::EAGAIN {\n                    libc::free(result as *mut c_void);\n                    set_errmsg(errmsg, &MorlocError::Ipc(\"Recv error\".into()));\n                    return ptr::null_mut();\n                }\n            }\n        }\n        if !packet_received {\n            libc::free(result as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Ipc(\"Failed to retrieve packet\".into()));\n            return ptr::null_mut();\n        }\n    }\n\n    result\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn stream_from_client(\n    client_fd: i32,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    stream_from_client_wait(client_fd, 0, 0, errmsg)\n}\n\n// ── send_and_receive_over_socket ─────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn send_and_receive_over_socket_wait(\n    socket_path: *const c_char,\n    packet: *const u8,\n    pselect_timeout_us: i32,\n    recv_timeout_us: i32,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let client_fd = new_socket(&mut err);\n    if client_fd < 0 {\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    let addr = new_server_addr(socket_path);\n\n    // Connect with retry (matching C WAIT macro behavior)\n    let mut retcode;\n    let mut attempts = 0;\n    loop {\n        retcode = libc::connect(client_fd, &addr as *const libc::sockaddr_un as *const libc::sockaddr,\n                                std::mem::size_of::<libc::sockaddr_un>() as u32);\n        if retcode == 0 { break; }\n        attempts += 1;\n        if attempts > 300 { // ~30 seconds with 100ms sleep\n            close_socket(client_fd);\n            set_errmsg(errmsg, &MorlocError::Ipc(format!(\n                \"Failed to connect to pipe '{}'\",\n                CStr::from_ptr(socket_path).to_string_lossy()\n            )));\n            return ptr::null_mut();\n        }\n        libc::usleep(100_000); // 100ms\n    }\n\n    let packet_size = crate::packet_ffi::morloc_packet_size(packet, &mut err);\n    if !err.is_null() {\n        close_socket(client_fd);\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    // Send packet in loop\n    let mut total_sent: usize = 0;\n    while total_sent < packet_size {\n        let bytes_sent = libc::send(\n            client_fd,\n            packet.add(total_sent) as *const c_void,\n            packet_size - total_sent,\n            crate::utility::SEND_NOSIGNAL,\n        );\n        if bytes_sent <= 0 {\n            close_socket(client_fd);\n            set_errmsg(errmsg, &MorlocError::Ipc(format!(\n                \"Failed to send data to '{}'\",\n                CStr::from_ptr(socket_path).to_string_lossy()\n            )));\n            return ptr::null_mut();\n        }\n        total_sent += bytes_sent as usize;\n    }\n\n    let result = stream_from_client_wait(client_fd, pselect_timeout_us, recv_timeout_us, &mut err);\n    if !err.is_null() {\n        close_socket(client_fd);\n        *errmsg = err;\n        return ptr::null_mut();\n    }\n\n    close_socket(client_fd);\n    result\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn send_and_receive_over_socket(\n    socket_path: *const c_char,\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    send_and_receive_over_socket_wait(socket_path, packet, 0, 0, errmsg)\n}\n\n// ── send_packet_to_foreign_server ────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn send_packet_to_foreign_server(\n    client_fd: i32,\n    packet: *mut u8,\n    errmsg: *mut *mut c_char,\n) -> usize {\n    clear_errmsg(errmsg);\n\n    let mut err: *mut c_char = ptr::null_mut();\n    let size = crate::packet_ffi::morloc_packet_size(packet, &mut err);\n    if !err.is_null() {\n        *errmsg = err;\n        return 0;\n    }\n\n    let mut total_sent: usize = 0;\n    while total_sent < size {\n        let bytes_sent = libc::send(\n            client_fd,\n            packet.add(total_sent) as *const c_void,\n            size - total_sent,\n            crate::utility::SEND_NOSIGNAL,\n        );\n        if bytes_sent <= 0 {\n            set_errmsg(errmsg, &MorlocError::Ipc(format!(\n                \"Failed to send over client {}\", client_fd\n            )));\n            return 0;\n        }\n        total_sent += bytes_sent as usize;\n    }\n\n    total_sent\n}\n\n// ── wait_for_client ──────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn wait_for_client_with_timeout(\n    daemon: *mut LanguageDaemon,\n    timeout_us: i32,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n\n    libc::FD_ZERO(&mut (*daemon).read_fds);\n    libc::FD_SET((*daemon).server_fd, &mut (*daemon).read_fds);\n\n    let mut max_fd = (*daemon).server_fd;\n\n    // Add existing client fds\n    let mut client = (*daemon).client_fds;\n    while !client.is_null() {\n        libc::FD_SET((*client).fd, &mut (*daemon).read_fds);\n        if (*client).fd > max_fd {\n            max_fd = (*client).fd;\n        }\n        client = (*client).next;\n    }\n\n    // Timeout\n    let mut ts: libc::timespec = std::mem::zeroed();\n    let timeout_ptr = if timeout_us > 0 {\n        ts.tv_sec = (timeout_us / 1000000) as i64;\n        ts.tv_nsec = ((timeout_us % 1000000) * 1000) as i64;\n        &ts as *const libc::timespec\n    } else {\n        ptr::null()\n    };\n\n    let mut emptymask: libc::sigset_t = std::mem::zeroed();\n    libc::sigemptyset(&mut emptymask);\n\n    let ready = libc::pselect(max_fd + 1, &mut (*daemon).read_fds, ptr::null_mut(), ptr::null_mut(), timeout_ptr, &emptymask);\n    if ready < 0 {\n        if crate::utility::errno_val() == libc::EINTR {\n            return 0;\n        }\n        set_errmsg(errmsg, &MorlocError::Ipc(\"pselect error\".into()));\n        return -1;\n    }\n    if ready == 0 {\n        return 0;\n    }\n\n    // Check for new connection\n    if libc::FD_ISSET((*daemon).server_fd, &(*daemon).read_fds) {\n        let selected_fd = libc::accept((*daemon).server_fd, ptr::null_mut(), ptr::null_mut());\n        if selected_fd >= 0 {\n            crate::utility::set_nosigpipe(selected_fd);\n            libc::fcntl(selected_fd, libc::F_SETFL, libc::O_NONBLOCK);\n\n            let new_client = libc::calloc(1, std::mem::size_of::<ClientList>()) as *mut ClientList;\n            (*new_client).fd = selected_fd;\n            (*new_client).next = ptr::null_mut();\n\n            if (*daemon).client_fds.is_null() {\n                (*daemon).client_fds = new_client;\n            } else {\n                let mut last = (*daemon).client_fds;\n                while !(*last).next.is_null() {\n                    last = (*last).next;\n                }\n                (*last).next = new_client;\n            }\n        }\n        // Ignore EAGAIN/EWOULDBLOCK on accept\n    }\n\n    if (*daemon).client_fds.is_null() {\n        return 0; // spurious wakeup\n    }\n\n    // Dequeue first client\n    let client_node = (*daemon).client_fds;\n    let return_fd = (*client_node).fd;\n    (*daemon).client_fds = (*client_node).next;\n    libc::free(client_node as *mut c_void);\n\n    return_fd\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn wait_for_client(\n    daemon: *mut LanguageDaemon,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    wait_for_client_with_timeout(daemon, 0, errmsg)\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/json.rs",
    "content": "//! JSON <-> Voidstar conversion.\n//!\n//! # Safety model\n//!\n//! All SHM pointer operations are encapsulated in `ShmWriter` (write) and\n//! `ShmReader` (read). Each has a single `unsafe fn new()` constructor;\n//! all subsequent reads/writes through the handle are safe methods.\n//! The only remaining `unsafe` blocks are `libc::snprintf` for float\n//! formatting and constructing readers/writers at known-valid offsets.\n\nuse crate::error::MorlocError;\nuse crate::schema::{Schema, SerialType};\nuse crate::shm::{self, AbsPtr, Array, RELNULL};\n\n// ── Safe SHM abstractions ────────────────────────────────────────────────────\n\n/// Write handle to a region of shared memory.\n///\n/// # Safety invariant\n/// `ptr` was obtained from `shmalloc` and points to at least `len` writable bytes.\nstruct ShmWriter {\n    ptr: *mut u8,\n    #[cfg(debug_assertions)]\n    len: usize,\n}\n\nimpl ShmWriter {\n    /// # Safety\n    /// `ptr` must point to `len` bytes of valid, writable SHM.\n    unsafe fn new(ptr: *mut u8, len: usize) -> Self {\n        let _ = len;\n        ShmWriter { ptr, #[cfg(debug_assertions)] len }\n    }\n\n    fn as_ptr(&self) -> *mut u8 { self.ptr }\n\n    fn write_bytes(&self, offset: usize, src: &[u8]) {\n        debug_assert!(offset + src.len() <= { #[cfg(debug_assertions)] { self.len } #[cfg(not(debug_assertions))] { usize::MAX } });\n        unsafe { std::ptr::copy_nonoverlapping(src.as_ptr(), self.ptr.add(offset), src.len()); }\n    }\n\n    fn zero(&self, offset: usize, count: usize) {\n        unsafe { std::ptr::write_bytes(self.ptr.add(offset), 0, count); }\n    }\n\n    fn write_val<T: Copy>(&self, offset: usize, val: T) {\n        unsafe { (self.ptr.add(offset) as *mut T).write_unaligned(val); }\n    }\n\n    fn write_array_header(&self, offset: usize, size: usize, data_rel: shm::RelPtr) {\n        let arr = Array { size, data: data_rel };\n        unsafe {\n            std::ptr::copy_nonoverlapping(\n                &arr as *const Array as *const u8, self.ptr.add(offset),\n                std::mem::size_of::<Array>(),\n            );\n        }\n    }\n\n    fn sub(&self, offset: usize, sub_len: usize) -> ShmWriter {\n        unsafe { ShmWriter::new(self.ptr.add(offset), sub_len) }\n    }\n}\n\n/// Read handle to SHM data.\n///\n/// # Safety invariant\n/// `ptr` was obtained from `rel2abs`/`shmalloc` and points to valid readable SHM.\nstruct ShmReader { ptr: *const u8 }\n\nimpl ShmReader {\n    /// # Safety\n    /// `ptr` must point to valid, readable shared memory.\n    unsafe fn new(ptr: *const u8) -> Self { ShmReader { ptr } }\n\n    fn read_val<T: Copy>(&self, offset: usize) -> T {\n        unsafe { (self.ptr.add(offset) as *const T).read_unaligned() }\n    }\n    fn read_u8(&self, offset: usize) -> u8 { self.read_val(offset) }\n    fn read_array(&self, offset: usize) -> Array { unsafe { *(self.ptr.add(offset) as *const Array) } }\n\n    fn read_str(&self, offset: usize, len: usize) -> &str {\n        unsafe {\n            std::str::from_utf8(std::slice::from_raw_parts(self.ptr.add(offset), len)).unwrap_or(\"\")\n        }\n    }\n\n    fn at(&self, offset: usize) -> ShmReader {\n        unsafe { ShmReader::new(self.ptr.add(offset)) }\n    }\n}\n\n// ── JSON -> Voidstar ───────────────────────────────────────────────────────\n\npub fn read_json_with_schema(json_str: &str, schema: &Schema) -> Result<AbsPtr, MorlocError> {\n    read_json_with_schema_dest(None, json_str, schema)\n}\n\npub fn read_json_with_schema_dest(\n    dest: Option<AbsPtr>, json_str: &str, schema: &Schema,\n) -> Result<AbsPtr, MorlocError> {\n    let value: serde_json::Value = serde_json::from_str(json_str)\n        .map_err(|e| MorlocError::Serialization(format!(\"JSON parse error: {}\", e)))?;\n    json_to_voidstar(&value, schema, dest)\n}\n\nfn alloc(dest: Option<AbsPtr>, size: usize) -> Result<ShmWriter, MorlocError> {\n    let ptr = match dest { Some(p) => p, None => shm::shmalloc(size)? };\n    // SAFETY: ptr from shmalloc or caller-provided valid SHM of sufficient size\n    Ok(unsafe { ShmWriter::new(ptr, size) })\n}\n\nfn json_to_voidstar(\n    value: &serde_json::Value, schema: &Schema, dest: Option<AbsPtr>,\n) -> Result<AbsPtr, MorlocError> {\n    match schema.serial_type {\n        SerialType::Nil => { let w = alloc(dest, 1)?; w.write_val::<u8>(0, 0); Ok(w.as_ptr()) }\n        SerialType::Bool => {\n            let b = value.as_bool().ok_or_else(|| err(\"expected bool\"))?;\n            let w = alloc(dest, 1)?; w.write_val::<u8>(0, b as u8); Ok(w.as_ptr())\n        }\n        SerialType::Sint8  => { let w = alloc(dest, 1)?; w.write_val::<i8>(0,  as_i64(value)? as i8);  Ok(w.as_ptr()) }\n        SerialType::Sint16 => { let w = alloc(dest, 2)?; w.write_val::<i16>(0, as_i64(value)? as i16); Ok(w.as_ptr()) }\n        SerialType::Sint32 => { let w = alloc(dest, 4)?; w.write_val::<i32>(0, as_i64(value)? as i32); Ok(w.as_ptr()) }\n        SerialType::Sint64 => { let w = alloc(dest, 8)?; w.write_val::<i64>(0, as_i64(value)?);        Ok(w.as_ptr()) }\n        SerialType::Uint8  => { let w = alloc(dest, 1)?; w.write_val::<u8>(0,  as_u64(value)? as u8);  Ok(w.as_ptr()) }\n        SerialType::Uint16 => { let w = alloc(dest, 2)?; w.write_val::<u16>(0, as_u64(value)? as u16); Ok(w.as_ptr()) }\n        SerialType::Uint32 => { let w = alloc(dest, 4)?; w.write_val::<u32>(0, as_u64(value)? as u32); Ok(w.as_ptr()) }\n        SerialType::Uint64 => { let w = alloc(dest, 8)?; w.write_val::<u64>(0, as_u64(value)?);        Ok(w.as_ptr()) }\n        SerialType::Float32 => { let w = alloc(dest, 4)?; w.write_val::<f32>(0, as_f64(value)? as f32); Ok(w.as_ptr()) }\n        SerialType::Float64 => { let w = alloc(dest, 8)?; w.write_val::<f64>(0, as_f64(value)?);        Ok(w.as_ptr()) }\n\n        SerialType::String => {\n            let s = value.as_str().ok_or_else(|| err(\"expected string\"))?;\n            let bytes = s.as_bytes();\n            let hdr = std::mem::size_of::<Array>();\n\n            let (w, data_rel) = if dest.is_some() {\n                let w = alloc(dest, hdr)?;\n                let data_rel = if bytes.is_empty() { RELNULL } else {\n                    shm::abs2rel(shm::shmemcpy(bytes.as_ptr(), bytes.len())?)?\n                };\n                (w, data_rel)\n            } else {\n                let w = alloc(None, hdr + bytes.len())?;\n                w.write_bytes(hdr, bytes);\n                // SAFETY: data is hdr bytes into the same shmalloc block\n                let data_rel = shm::abs2rel(unsafe { w.as_ptr().add(hdr) })?;\n                (w, data_rel)\n            };\n            w.write_array_header(0, bytes.len(), data_rel);\n            Ok(w.as_ptr())\n        }\n\n        SerialType::Array => {\n            let arr_val = value.as_array().ok_or_else(|| err(\"expected array\"))?;\n            let es = schema.parameters.first().ok_or_else(|| err(\"array has no element type\"))?;\n            let n = arr_val.len();\n            let ew = es.width;\n            let hdr = std::mem::size_of::<Array>();\n\n            let (hw, data_ptr) = if dest.is_some() {\n                let hw = alloc(dest, hdr)?;\n                let dp = if n > 0 { shm::shmalloc(n * ew)? } else { std::ptr::null_mut() };\n                (hw, dp)\n            } else {\n                let w = alloc(None, hdr + n * ew)?;\n                // SAFETY: data is hdr bytes into the same shmalloc block\n                let dp = unsafe { w.as_ptr().add(hdr) };\n                (w, dp)\n            };\n            let data_rel = if data_ptr.is_null() { RELNULL } else { shm::abs2rel(data_ptr)? };\n\n            for (i, elem) in arr_val.iter().enumerate() {\n                // SAFETY: data_ptr + i * ew is within the data allocation\n                let ep = unsafe { data_ptr.add(i * ew) };\n                json_to_voidstar(elem, es, Some(ep))?;\n            }\n            hw.write_array_header(0, n, data_rel);\n            Ok(hw.as_ptr())\n        }\n\n        SerialType::Tuple | SerialType::Map => {\n            let fields = extract_fields(value, schema)?;\n            if fields.len() != schema.parameters.len() {\n                return Err(err(&format!(\"expected {} fields, got {}\", schema.parameters.len(), fields.len())));\n            }\n            let w = alloc(dest, schema.width)?;\n            w.zero(0, schema.width);\n            for (i, (fv, fs)) in fields.iter().zip(schema.parameters.iter()).enumerate() {\n                let sub = w.sub(schema.offsets[i], fs.width);\n                json_to_voidstar(fv, fs, Some(sub.as_ptr()))?;\n            }\n            Ok(w.as_ptr())\n        }\n\n        SerialType::Optional => {\n            let inner = schema.parameters.first().ok_or_else(|| err(\"optional has no inner type\"))?;\n            let off = shm::align_up(1, inner.alignment().max(1));\n            let total = off + inner.width;\n            let w = alloc(dest, total)?;\n            if value.is_null() {\n                w.zero(0, total);\n            } else {\n                w.write_val::<u8>(0, 1);\n                json_to_voidstar(value, inner, Some(w.sub(off, inner.width).as_ptr()))?;\n            }\n            Ok(w.as_ptr())\n        }\n\n        SerialType::Tensor => Err(err(\"Tensor JSON parsing not yet implemented\")),\n    }\n}\n\nfn extract_fields(value: &serde_json::Value, schema: &Schema) -> Result<Vec<serde_json::Value>, MorlocError> {\n    if schema.serial_type == SerialType::Map && value.is_object() {\n        let obj = value.as_object().unwrap();\n        Ok(schema.keys.iter().map(|k| obj.get(k).cloned().unwrap_or(serde_json::Value::Null)).collect())\n    } else {\n        value.as_array().ok_or_else(|| err(\"expected array for tuple/map\")).cloned()\n    }\n}\n\n// ── Voidstar -> JSON ───────────────────────────────────────────────────────\n\npub fn voidstar_to_json_string(ptr: AbsPtr, schema: &Schema) -> Result<String, MorlocError> {\n    let mut buf = String::new();\n    // SAFETY: ptr from shmalloc/rel2abs — valid SHM\n    let r = unsafe { ShmReader::new(ptr) };\n    to_json(&r, schema, &mut buf)?;\n    Ok(buf)\n}\n\npub fn print_voidstar(ptr: AbsPtr, schema: &Schema) -> Result<(), MorlocError> {\n    println!(\"{}\", voidstar_to_json_string(ptr, schema)?);\n    Ok(())\n}\n\npub fn pretty_print_voidstar(ptr: AbsPtr, schema: &Schema) -> Result<(), MorlocError> {\n    let json = voidstar_to_json_string(ptr, schema)?;\n    let v: serde_json::Value = serde_json::from_str(&json).map_err(|e| err(&e.to_string()))?;\n    match &v {\n        // Print strings as raw text (unescaped, no quotes)\n        serde_json::Value::String(s) => println!(\"{}\", s),\n        // Print numbers and bools as plain values\n        serde_json::Value::Number(n) => println!(\"{}\", n),\n        serde_json::Value::Bool(b) => println!(\"{}\", b),\n        serde_json::Value::Null => println!(\"null\"),\n        // Print arrays and objects as indented JSON\n        _ => println!(\"{}\", serde_json::to_string_pretty(&v).map_err(|e| err(&e.to_string()))?),\n    }\n    Ok(())\n}\n\nfn to_json(r: &ShmReader, schema: &Schema, buf: &mut String) -> Result<(), MorlocError> {\n    match schema.serial_type {\n        SerialType::Nil    => buf.push_str(\"null\"),\n        SerialType::Bool   => buf.push_str(if r.read_u8(0) != 0 { \"true\" } else { \"false\" }),\n        SerialType::Sint8  => buf.push_str(&(r.read_val::<i8>(0)).to_string()),\n        SerialType::Sint16 => buf.push_str(&(r.read_val::<i16>(0)).to_string()),\n        SerialType::Sint32 => buf.push_str(&(r.read_val::<i32>(0)).to_string()),\n        SerialType::Sint64 => buf.push_str(&(r.read_val::<i64>(0)).to_string()),\n        SerialType::Uint8  => buf.push_str(&r.read_u8(0).to_string()),\n        SerialType::Uint16 => buf.push_str(&(r.read_val::<u16>(0)).to_string()),\n        SerialType::Uint32 => buf.push_str(&(r.read_val::<u32>(0)).to_string()),\n        SerialType::Uint64 => buf.push_str(&(r.read_val::<u64>(0)).to_string()),\n        SerialType::Float32 => write_float(buf, r.read_val::<f32>(0) as f64, b\"%.7g\\0\"),\n        SerialType::Float64 => write_float(buf, r.read_val::<f64>(0), b\"%.15g\\0\"),\n\n        SerialType::String => {\n            let arr = r.read_array(0);\n            if arr.size == 0 || arr.data == RELNULL {\n                buf.push_str(\"\\\"\\\"\");\n            } else {\n                // SAFETY: arr.data resolved to valid SHM string bytes\n                let dr = unsafe { ShmReader::new(shm::rel2abs(arr.data)?) };\n                json_escape(dr.read_str(0, arr.size), buf);\n            }\n        }\n        SerialType::Array => {\n            let arr = r.read_array(0);\n            let es = &schema.parameters[0];\n            buf.push('[');\n            if arr.size > 0 && arr.data != RELNULL {\n                let data = shm::rel2abs(arr.data)?;\n                for i in 0..arr.size {\n                    if i > 0 { buf.push(','); }\n                    // SAFETY: data + i * es.width within array data block\n                    let er = unsafe { ShmReader::new(data.add(i * es.width)) };\n                    to_json(&er, es, buf)?;\n                }\n            }\n            buf.push(']');\n        }\n        SerialType::Tuple => {\n            buf.push('[');\n            for (i, fs) in schema.parameters.iter().enumerate() {\n                if i > 0 { buf.push(','); }\n                to_json(&r.at(schema.offsets[i]), fs, buf)?;\n            }\n            buf.push(']');\n        }\n        SerialType::Map => {\n            buf.push('{');\n            for (i, fs) in schema.parameters.iter().enumerate() {\n                if i > 0 { buf.push(','); }\n                if i < schema.keys.len() { buf.push('\"'); buf.push_str(&schema.keys[i]); buf.push_str(\"\\\":\"); }\n                to_json(&r.at(schema.offsets[i]), fs, buf)?;\n            }\n            buf.push('}');\n        }\n        SerialType::Optional => {\n            if r.read_u8(0) == 0 {\n                buf.push_str(\"null\");\n            } else {\n                let inner = &schema.parameters[0];\n                to_json(&r.at(shm::align_up(1, inner.alignment().max(1))), inner, buf)?;\n            }\n        }\n        SerialType::Tensor => {\n            // SAFETY: reading Tensor struct from SHM\n            let tensor = unsafe { &*(r.ptr as *const shm::Tensor) };\n            if tensor.total_elements == 0 {\n                buf.push_str(\"[]\");\n            } else {\n                let ndim = schema.offsets.first().copied().unwrap_or(1);\n                let sp = shm::rel2abs(tensor.shape)?;\n                // SAFETY: sp points to ndim i64 values in SHM\n                let shape: Vec<usize> = (0..ndim).map(|i| unsafe { *((sp as *const i64).add(i)) } as usize).collect();\n                let dp = shm::rel2abs(tensor.data)?;\n                let es = &schema.parameters[0];\n                tensor_to_json(buf, dp, &shape, tensor.total_elements, es)?;\n            }\n        }\n    }\n    Ok(())\n}\n\nfn tensor_to_json(\n    buf: &mut String, data: *const u8, shape: &[usize], stride: usize, es: &Schema,\n) -> Result<(), MorlocError> {\n    buf.push('[');\n    if shape.len() == 1 {\n        for i in 0..shape[0] {\n            if i > 0 { buf.push(','); }\n            // SAFETY: data + i * es.width within tensor data\n            let r = unsafe { ShmReader::new(data.add(i * es.width)) };\n            to_json(&r, es, buf)?;\n        }\n    } else {\n        let inner = stride / shape[0];\n        for i in 0..shape[0] {\n            if i > 0 { buf.push(','); }\n            tensor_to_json(buf, data.wrapping_add(i * inner * es.width), &shape[1..], inner, es)?;\n        }\n    }\n    buf.push(']');\n    Ok(())\n}\n\n// ── Helpers ────────────────────────────────────────────────────────────────\n\nfn json_escape(s: &str, buf: &mut String) {\n    buf.push('\"');\n    for ch in s.chars() {\n        match ch {\n            '\"' => buf.push_str(\"\\\\\\\"\"), '\\\\' => buf.push_str(\"\\\\\\\\\"), '/' => buf.push_str(\"\\\\/\"),\n            '\\x08' => buf.push_str(\"\\\\b\"), '\\x0c' => buf.push_str(\"\\\\f\"),\n            '\\n' => buf.push_str(\"\\\\n\"), '\\r' => buf.push_str(\"\\\\r\"), '\\t' => buf.push_str(\"\\\\t\"),\n            c if c < '\\x20' => buf.push_str(&format!(\"\\\\u{:04x}\", c as u32)),\n            c => buf.push(c),\n        }\n    }\n    buf.push('\"');\n}\n\nfn err(msg: &str) -> MorlocError { MorlocError::Serialization(msg.into()) }\nfn as_i64(v: &serde_json::Value) -> Result<i64, MorlocError> { v.as_i64().ok_or_else(|| err(\"expected integer\")) }\nfn as_u64(v: &serde_json::Value) -> Result<u64, MorlocError> { v.as_u64().ok_or_else(|| err(\"expected unsigned integer\")) }\nfn as_f64(v: &serde_json::Value) -> Result<f64, MorlocError> { v.as_f64().ok_or_else(|| err(\"expected number\")) }\n\nfn write_float(buf: &mut String, f: f64, fmt: &[u8]) {\n    if f.is_nan() || f.is_infinite() { buf.push_str(\"null\"); return; }\n    let mut cbuf = [0u8; 64];\n    // SAFETY: snprintf writes to stack-local buffer with explicit size limit\n    let n = unsafe { libc::snprintf(cbuf.as_mut_ptr() as *mut libc::c_char, cbuf.len(), fmt.as_ptr() as *const libc::c_char, f) };\n    if n > 0 && (n as usize) < cbuf.len() {\n        buf.push_str(std::str::from_utf8(&cbuf[..n as usize]).unwrap_or(\"0\"));\n    } else {\n        buf.push_str(\"0\");\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::schema::parse_schema;\n    fn setup() { crate::init_test_shm(); }\n\n    #[test] fn test_int()     { setup(); let s = parse_schema(\"i4\").unwrap(); let p = read_json_with_schema(\"42\", &s).unwrap(); assert_eq!(voidstar_to_json_string(p, &s).unwrap(), \"42\"); }\n    #[test] fn test_string()  { setup(); let s = parse_schema(\"s\").unwrap(); let p = read_json_with_schema(\"\\\"hello\\\"\", &s).unwrap(); assert_eq!(voidstar_to_json_string(p, &s).unwrap(), \"\\\"hello\\\"\"); }\n    #[test] fn test_bool()    { setup(); let s = parse_schema(\"b\").unwrap(); let p = read_json_with_schema(\"true\", &s).unwrap(); assert_eq!(voidstar_to_json_string(p, &s).unwrap(), \"true\"); }\n    #[test] fn test_array()   { setup(); let s = parse_schema(\"ai4\").unwrap(); let p = read_json_with_schema(\"[1,2,3]\", &s).unwrap(); assert_eq!(voidstar_to_json_string(p, &s).unwrap(), \"[1,2,3]\"); }\n    #[test] fn test_opt_some(){ setup(); let s = parse_schema(\"?i4\").unwrap(); let p = read_json_with_schema(\"5\", &s).unwrap(); assert_eq!(voidstar_to_json_string(p, &s).unwrap(), \"5\"); }\n    #[test] fn test_opt_null(){ setup(); let s = parse_schema(\"?i4\").unwrap(); let p = read_json_with_schema(\"null\", &s).unwrap(); assert_eq!(voidstar_to_json_string(p, &s).unwrap(), \"null\"); }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/json_ffi.rs",
    "content": "//! C ABI wrappers for JSON functions.\n//! Replaces json.c's core functions with calls to Rust json.rs.\n//! Arrow output and json_buf API are also implemented here.\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\n// ── quoted ─────────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn quoted(input: *const c_char) -> *mut c_char {\n    if input.is_null() {\n        return ptr::null_mut();\n    }\n    let s = CStr::from_ptr(input);\n    let bytes = s.to_bytes();\n    let len = bytes.len();\n    // Simple wrapping: \"input\" (matching C behavior — no escaping)\n    let buf = libc::calloc(len + 3, 1) as *mut c_char;\n    if buf.is_null() {\n        return ptr::null_mut();\n    }\n    *buf = b'\"' as c_char;\n    std::ptr::copy_nonoverlapping(bytes.as_ptr(), buf.add(1) as *mut u8, len);\n    *buf.add(len + 1) = b'\"' as c_char;\n    buf\n}\n\n// ── read_json_with_schema ──────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_json_with_schema(\n    dest: *mut u8,\n    json_str: *mut c_char,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    if json_str.is_null() || schema.is_null() {\n        set_errmsg(errmsg, &MorlocError::NullPointer);\n        return ptr::null_mut();\n    }\n\n    let rs = CSchema::to_rust(schema);\n    let json = CStr::from_ptr(json_str).to_string_lossy();\n\n    let dest_opt = if dest.is_null() { None } else { Some(dest) };\n    match crate::json::read_json_with_schema_dest(dest_opt, &json, &rs) {\n        Ok(ptr) => ptr,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            ptr::null_mut()\n        }\n    }\n}\n\n// ── voidstar_to_json_string ────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn voidstar_to_json_string(\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    match crate::json::voidstar_to_json_string(data as *mut u8, &rs) {\n        Ok(s) => {\n            match CString::new(s) {\n                Ok(cs) => cs.into_raw(),\n                Err(_) => {\n                    set_errmsg(errmsg, &MorlocError::Other(\"CString conversion failed\".into()));\n                    ptr::null_mut()\n                }\n            }\n        }\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            ptr::null_mut()\n        }\n    }\n}\n\n// ── print_voidstar ─────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn print_voidstar(\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    match crate::json::print_voidstar(data as *mut u8, &rs) {\n        Ok(_) => true,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            false\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn pretty_print_voidstar(\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    match crate::json::pretty_print_voidstar(data as *mut u8, &rs) {\n        Ok(_) => true,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            false\n        }\n    }\n}\n\n// ── json_buf API (used by daemon.c for discovery JSON) ─────────────────────\n\n/// Dynamic JSON string builder.\npub struct JsonBuf {\n    buf: String,\n    needs_comma: Vec<bool>,\n}\n\n#[no_mangle]\npub extern \"C\" fn json_buf_new() -> *mut JsonBuf {\n    Box::into_raw(Box::new(JsonBuf {\n        buf: String::with_capacity(256),\n        needs_comma: Vec::new(),\n    }))\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_buf_free(jb: *mut JsonBuf) {\n    if !jb.is_null() {\n        let _ = Box::from_raw(jb);\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_buf_finish(jb: *mut JsonBuf) -> *mut c_char {\n    if jb.is_null() {\n        return ptr::null_mut();\n    }\n    let jb = Box::from_raw(jb);\n    match CString::new(jb.buf) {\n        Ok(cs) => cs.into_raw(),\n        Err(_) => ptr::null_mut(),\n    }\n}\n\nunsafe fn jb_maybe_comma(jb: &mut JsonBuf) {\n    if let Some(needs) = jb.needs_comma.last_mut() {\n        if *needs {\n            jb.buf.push(',');\n        }\n        *needs = true;\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_obj_start(jb: *mut JsonBuf) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    jb.buf.push('{');\n    jb.needs_comma.push(false);\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_obj_end(jb: *mut JsonBuf) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb.needs_comma.pop();\n    jb.buf.push('}');\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_arr_start(jb: *mut JsonBuf) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    jb.buf.push('[');\n    jb.needs_comma.push(false);\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_arr_end(jb: *mut JsonBuf) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb.needs_comma.pop();\n    jb.buf.push(']');\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_key(jb: *mut JsonBuf, key: *const c_char) {\n    if jb.is_null() || key.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    let s = CStr::from_ptr(key).to_string_lossy();\n    jb.buf.push('\"');\n    jb.buf.push_str(&s);\n    jb.buf.push_str(\"\\\":\");\n    // Don't set needs_comma — the value will follow immediately\n    if let Some(needs) = jb.needs_comma.last_mut() {\n        *needs = false;\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_string(jb: *mut JsonBuf, val: *const c_char) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    if val.is_null() {\n        jb.buf.push_str(\"null\");\n    } else {\n        let s = CStr::from_ptr(val).to_string_lossy();\n        // JSON-escape the string\n        jb.buf.push('\"');\n        for ch in s.chars() {\n            match ch {\n                '\"' => jb.buf.push_str(\"\\\\\\\"\"),\n                '\\\\' => jb.buf.push_str(\"\\\\\\\\\"),\n                '\\n' => jb.buf.push_str(\"\\\\n\"),\n                '\\r' => jb.buf.push_str(\"\\\\r\"),\n                '\\t' => jb.buf.push_str(\"\\\\t\"),\n                c if c < '\\x20' => {\n                    jb.buf.push_str(&format!(\"\\\\u{:04x}\", c as u32));\n                }\n                c => jb.buf.push(c),\n            }\n        }\n        jb.buf.push('\"');\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_int(jb: *mut JsonBuf, val: i64) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    jb.buf.push_str(&val.to_string());\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_uint(jb: *mut JsonBuf, val: u64) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    jb.buf.push_str(&val.to_string());\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_bool(jb: *mut JsonBuf, val: bool) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    jb.buf.push_str(if val { \"true\" } else { \"false\" });\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_null(jb: *mut JsonBuf) {\n    if jb.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    jb.buf.push_str(\"null\");\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn json_write_raw(jb: *mut JsonBuf, raw: *const c_char) {\n    if jb.is_null() || raw.is_null() { return; }\n    let jb = &mut *jb;\n    jb_maybe_comma(jb);\n    let s = CStr::from_ptr(raw).to_string_lossy();\n    jb.buf.push_str(&s);\n}\n\n// ── Arrow JSON output ──────────────────────────────────────────────────────\n// Arrow output is complex and depends on the Arrow C Data Interface.\n// These are implemented in C (arrow_json.c) and linked via the hybrid build.\n// The functions below are stubs that will be overridden by the C implementations\n// when we create a separate arrow_json.c file.\n// For now, remove the Rust stubs and let C json.c's implementations be used\n// from a separate compilation unit.\n\n// print_arrow_as_json and print_arrow_as_table are provided by the C\n// arrow_json code (extracted from json.c, kept in build as arrow_json.c)\n\n#[allow(dead_code)]\nunsafe fn _print_arrow_as_json_stub(\n    data: *const c_void,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n\n    // Use the arrow.c implementation which handles the Arrow C Data Interface\n    extern \"C\" {\n        fn arrow_column_desc(\n            header: *const c_void,\n            col_idx: usize,\n        ) -> *const c_void;\n        fn arrow_column_data(\n            header: *const c_void,\n            col_idx: usize,\n        ) -> *const c_void;\n        fn arrow_column_name(\n            header: *const c_void,\n            col_idx: usize,\n        ) -> *const c_char;\n    }\n\n    // Read arrow_shm_header fields\n    // arrow_shm_header_t: { magic: u32, n_columns: u32, n_rows: u64, ... }\n    let header = data as *const u8;\n    let n_columns = *(header.add(4) as *const u32) as usize;\n    let n_rows = *(header.add(8) as *const u64) as usize;\n\n    // Build JSON array of objects\n    print!(\"[\");\n    for row in 0..n_rows {\n        if row > 0 { print!(\",\"); }\n        print!(\"{{\");\n        for col in 0..n_columns {\n            if col > 0 { print!(\",\"); }\n            let name = arrow_column_name(data, col);\n            let name_str = if name.is_null() { \"?\" } else { CStr::from_ptr(name).to_str().unwrap_or(\"?\") };\n            print!(\"\\\"{}\\\":\", name_str);\n\n            let desc = arrow_column_desc(data, col);\n            if desc.is_null() {\n                print!(\"null\");\n                continue;\n            }\n            // desc is arrow_column_desc_t: { type: u8, length: u32, null_count: u32, name_offset, data_offset }\n            let col_type = *(desc as *const u8);\n            let col_data = arrow_column_data(data, col);\n\n            // Print value based on type\n            // Types: 0=nil, 1=bool, 2=i8, ..., 11=f64, 13=string\n            match col_type {\n                4 => { // i32\n                    let vals = col_data as *const i32;\n                    print!(\"{}\", *vals.add(row));\n                }\n                5 => { // i64\n                    let vals = col_data as *const i64;\n                    print!(\"{}\", *vals.add(row));\n                }\n                11 => { // f64\n                    let vals = col_data as *const f64;\n                    let mut cbuf = [0u8; 64];\n                    let fmt = b\"%.15g\\0\";\n                    let n = libc::snprintf(cbuf.as_mut_ptr() as *mut c_char, 64, fmt.as_ptr() as *const c_char, *vals.add(row));\n                    if n > 0 {\n                        let s = std::str::from_utf8(&cbuf[..n as usize]).unwrap_or(\"0\");\n                        print!(\"{}\", s);\n                    }\n                }\n                13 => { // string\n                    // Arrow strings: offsets array + data buffer\n                    // For simplicity, use arrow_column_data which gives the data pointer\n                    // This is a simplified implementation — full Arrow string handling\n                    // requires offset arrays\n                    print!(\"\\\"<string>\\\"\");\n                }\n                _ => {\n                    print!(\"null\");\n                }\n            }\n        }\n        print!(\"}}\");\n    }\n    println!(\"]\");\n\n    true\n}\n\n#[allow(dead_code)]\nunsafe fn _print_arrow_as_table_stub(\n    _data: *const c_void,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    // Stub — Arrow table output is rarely used\n    // The full implementation would print TSV-formatted columns\n    set_errmsg(errmsg, &MorlocError::Other(\"Arrow table output not yet implemented in Rust\".into()));\n    false\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/lib.rs",
    "content": "pub mod error;\npub mod schema;\npub mod packet;\npub mod shm;\npub mod hash;\npub mod ipc;\npub mod json;\npub mod mpack;\n// FFI and utility modules export #[no_mangle] extern \"C\" symbols.\n// When the \"no-ffi-exports\" feature is active (nexus build), these modules\n// are not compiled, preventing symbol conflicts with libmorloc.so.\n// CSchema type is always available (used by nexus for Rust<->C conversion)\npub mod cschema;\npub mod ffi;\npub mod utility;\npub mod cache;\npub mod intrinsics;\npub mod voidstar;\npub mod json_ffi;\npub mod packet_ffi;\npub mod ipc_ffi;\npub mod http_ffi;\npub mod slurm_ffi;\npub mod manifest_ffi;\npub mod eval_ffi;\npub mod arrow_ffi;\npub mod pool_ffi;\npub mod daemon_ffi;\npub mod router_ffi;\npub mod cli;\n\n/// Shared test SHM initialization. Call from all test modules.\n#[cfg(test)]\npub(crate) fn init_test_shm() {\n    use std::sync::Once;\n    static INIT: Once = Once::new();\n    INIT.call_once(|| {\n        let tmpdir = std::env::temp_dir();\n        let test_dir = tmpdir.join(format!(\"morloc_test_{}\", std::process::id()));\n        let _ = std::fs::create_dir_all(&test_dir);\n        shm::shm_set_fallback_dir(test_dir.to_str().unwrap());\n        let basename = format!(\"morloc_test_{}\", std::process::id());\n        shm::shinit(&basename, 0, 0x100000).unwrap(); // 1MB\n    });\n}\n\n// Re-export core types at crate root\npub use error::MorlocError;\npub use schema::{Schema, SerialType};\npub use packet::{PacketHeader, PACKET_MAGIC};\npub use shm::{RelPtr, VolPtr, AbsPtr, Array, Tensor};\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/manifest_ffi.rs",
    "content": "//! C ABI wrappers for manifest parsing and discovery JSON.\n//!\n//! This file mirrors the manifest data model into raw `#[repr(C)]`\n//! structs that the daemon and slurm subsystems read via FFI from C\n//! code. It is **not** the canonical Rust deserializer of the manifest\n//! schema -- that lives in\n//! `data/rust/morloc-nexus/src/manifest.rs`, which has full doc\n//! comments describing the v2 manifest schema, every field's purpose,\n//! and which slots are reserved for future expansion.\n//!\n//! The split exists for two reasons:\n//!\n//! 1. **C ABI stability.** The C structs here have the original v1\n//!    field layout (flat `arg_schemas`, `return_schema`, `return_type`,\n//!    `return_desc`, `build_dir`, `version`) so that downstream C\n//!    callers (the daemon, the slurm bridge, any third-party FFI\n//!    consumers) don't break when the JSON schema evolves. The\n//!    `parse_manifest` function below reads the new v2 JSON shape and\n//!    populates these legacy C struct fields, acting as a translation\n//!    layer.\n//!\n//! 2. **Build-time decoupling.** The morloc-runtime crate needs to\n//!    consume manifests without depending on the morloc-nexus crate.\n//!    Sharing a Rust-level data model would create a circular\n//!    dependency between the two crates.\n//!\n//! When the v2 schema gains new fields (constraints, metadata, etc.),\n//! the canonical Rust model in `morloc-nexus/src/manifest.rs` is\n//! updated first. This file gets new C struct fields only when a C-side\n//! consumer needs them; otherwise the new JSON keys are silently\n//! ignored here, which is the correct forward-compatible behavior.\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\n// -- C-compatible types matching eval.h ---------------------------------------\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum MorlocExpressionType {\n    Dat = 0,\n    App = 1,\n    Lam = 2,\n    Bnd = 3,\n    Pat = 4,\n    Fmt = 5,\n    Show = 6,\n    Read = 7,\n    Hash = 8,\n    Save = 9,\n    Load = 10,\n}\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum MorlocAppExpressionType {\n    Pattern = 0,\n    Lambda = 1,\n    Format = 2,\n}\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum MorlocPatternType {\n    ByKey = 0,\n    ByIndex = 1,\n    End = 2,\n}\n\n#[repr(C)]\npub union PatternFields {\n    pub indices: *mut usize,\n    pub keys: *mut *mut c_char,\n}\n\n#[repr(C)]\npub struct MorlocPattern {\n    pub ptype: MorlocPatternType,\n    pub size: usize,\n    pub fields: PatternFields,\n    pub selectors: *mut *mut MorlocPattern,\n}\n\n#[repr(C)]\npub union Primitive {\n    pub s: *mut c_char,\n    pub z: u8,\n    pub b: bool,\n    pub i1: i8,\n    pub i2: i16,\n    pub i4: i32,\n    pub i8_: i64,\n    pub u1: u8,\n    pub u2: u16,\n    pub u4: u32,\n    pub u8_: u64,\n    pub f4: f32,\n    pub f8: f64,\n}\n\n#[repr(C)]\npub struct MorlocDataArray {\n    pub schema: *mut CSchema,\n    pub size: usize,\n    pub values: *mut *mut MorlocExpression,\n}\n\n#[repr(C)]\n// Primitive contains a pointer (s: *mut c_char), so DataUnion uses ManuallyDrop\n#[repr(C)]\npub union DataUnion {\n    pub lit_val: std::mem::ManuallyDrop<Primitive>,\n    pub tuple_val: *mut *mut MorlocExpression,\n    pub array_val: *mut MorlocDataArray,\n    pub voidstar: *mut c_void,\n}\n\n#[repr(C)]\npub struct MorlocData {\n    pub is_voidstar: bool,\n    pub data: DataUnion,\n}\n\n#[repr(C)]\npub union AppFunction {\n    pub pattern: *mut MorlocPattern,\n    pub lambda: *mut MorlocLamExpression,\n    pub fmt: *mut *mut c_char,\n}\n\n#[repr(C)]\npub struct MorlocAppExpression {\n    pub atype: MorlocAppExpressionType,\n    pub function: AppFunction,\n    pub args: *mut *mut MorlocExpression,\n    pub nargs: usize,\n}\n\n#[repr(C)]\npub struct MorlocLamExpression {\n    pub nargs: usize,\n    pub args: *mut *mut c_char,\n    pub body: *mut MorlocExpression,\n}\n\n#[repr(C)]\npub struct MorlocSaveExpression {\n    pub format: *mut c_char,\n    pub value: *mut MorlocExpression,\n    pub path: *mut MorlocExpression,\n}\n\n#[repr(C)]\npub union ExprUnion {\n    pub app_expr: *mut MorlocAppExpression,\n    pub lam_expr: *mut MorlocLamExpression,\n    pub bnd_expr: *mut c_char,\n    pub interpolation: *mut *mut c_char,\n    pub pattern_expr: *mut MorlocPattern,\n    pub data_expr: *mut MorlocData,\n    pub unary_expr: *mut MorlocExpression,\n    pub save_expr: *mut MorlocSaveExpression,\n}\n\n#[repr(C)]\npub struct MorlocExpression {\n    pub etype: MorlocExpressionType,\n    pub schema: *mut CSchema,\n    pub expr: ExprUnion,\n}\n\n// -- C-ABI mirror of morloc-manifest v2 ---------------------------------------\n//\n// These #[repr(C)] structs are the in-memory layout that daemon_ffi /\n// router_ffi / slurm_ffi consume via raw pointers. They mirror the\n// shape of morloc_manifest's Rust types one-for-one (modulo C-string\n// encoding) -- when the Rust schema gains a new field, it's added here\n// too as a parallel C field. There is no longer any \"translation\" or\n// reshape layer; parse_manifest below is a near-1:1 walker.\n//\n// Conventions:\n// - C-string fields are owned by the manifest and freed by free_manifest.\n// - Array fields use a (pointer, count) pair (e.g. desc + n_desc).\n// - The \"constraints\" and \"metadata\" extension slots described in\n//   morloc-manifest's docs are mirrored here so daemon-side constraint\n//   enforcement can later read them without another C ABI break.\n// - metadata is serialized as JSON text (`metadata_json`) because the\n//   C side has no natural map type and the slot is reserved for now.\n\n#[repr(C)]\npub struct ManifestBuild {\n    pub path: *mut c_char,\n    pub time: i64,\n    pub morloc_version: *mut c_char,\n}\n\n#[repr(C)]\npub struct ManifestConstraint {\n    /// Constraint discriminator (e.g. \"kind\", \"min\", \"regex\").\n    pub ctype: *mut c_char,\n    /// JSON-encoded payload for the constraint, or NULL when the\n    /// constraint type carries no value (e.g. \"non_empty\").\n    pub value_json: *mut c_char,\n}\n\n#[repr(C)]\npub struct ManifestPool {\n    pub lang: *mut c_char,\n    pub exec: *mut *mut c_char, // NULL-terminated\n    pub socket: *mut c_char,\n    /// JSON-encoded pool-level metadata. Reserved.\n    pub metadata_json: *mut c_char,\n}\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum ManifestArgKind {\n    Pos = 0,\n    Opt = 1,\n    Flag = 2,\n    Grp = 3,\n}\n\n#[repr(C)]\npub struct ManifestGrpEntry {\n    pub key: *mut c_char,\n    pub arg: *mut ManifestArg,\n}\n\n#[repr(C)]\npub struct ManifestArg {\n    pub kind: ManifestArgKind,\n    /// Per-arg morloc serialization schema. NULL for flags. Group\n    /// entries also have NULL here (the group's top-level schema\n    /// covers them).\n    pub schema: *mut c_char,\n    /// User-facing type name. NULL for flags.\n    pub type_desc: *mut c_char,\n    pub metavar: *mut c_char,\n    pub quoted: bool,\n    pub short_opt: c_char,\n    pub long_opt: *mut c_char,\n    pub long_rev: *mut c_char,\n    pub default_val: *mut c_char,\n    /// NULL-terminated array of description lines.\n    pub desc: *mut *mut c_char,\n    pub n_desc: usize,\n    /// Array of ManifestConstraint owned by this arg.\n    pub constraints: *mut ManifestConstraint,\n    pub n_constraints: usize,\n    /// Group sub-fields (only meaningful when kind == Grp).\n    pub grp_short: c_char,\n    pub grp_long: *mut c_char,\n    pub entries: *mut ManifestGrpEntry,\n    pub n_entries: usize,\n    /// JSON-encoded per-arg metadata. Reserved.\n    pub metadata_json: *mut c_char,\n}\n\n#[repr(C)]\npub struct ManifestReturn {\n    pub schema: *mut c_char,\n    pub type_desc: *mut c_char,\n    pub desc: *mut *mut c_char,\n    pub n_desc: usize,\n    pub constraints: *mut ManifestConstraint,\n    pub n_constraints: usize,\n    pub metadata_json: *mut c_char,\n}\n\n#[repr(C)]\npub struct ManifestCmdGroup {\n    pub name: *mut c_char,\n    pub desc: *mut *mut c_char,\n    pub n_desc: usize,\n    pub metadata_json: *mut c_char,\n}\n\n#[repr(C)]\npub struct ManifestCommand {\n    pub name: *mut c_char,\n    pub is_pure: bool,\n    pub mid: u32,\n    pub pool_index: usize,\n    pub needed_pools: *mut usize,\n    pub n_needed_pools: usize,\n    pub desc: *mut *mut c_char,\n    pub n_desc: usize,\n    pub args: *mut ManifestArg,\n    pub n_args: usize,\n    /// Return-value descriptor as a sub-struct (replaces v1's flat\n    /// return_schema/return_type/return_desc fields).\n    pub ret: ManifestReturn,\n    pub constraints: *mut ManifestConstraint,\n    pub n_constraints: usize,\n    pub expr: *mut MorlocExpression,\n    pub group: *mut c_char,\n    pub metadata_json: *mut c_char,\n}\n\n#[repr(C)]\npub struct ManifestService {\n    pub stype: *mut c_char,\n    pub host: *mut c_char,\n    pub port: i32,\n    pub socket: *mut c_char,\n    pub metadata_json: *mut c_char,\n}\n\n#[repr(C)]\npub struct Manifest {\n    pub name: *mut c_char,\n    pub build: ManifestBuild,\n    pub pools: *mut ManifestPool,\n    pub n_pools: usize,\n    pub commands: *mut ManifestCommand,\n    pub n_commands: usize,\n    pub groups: *mut ManifestCmdGroup,\n    pub n_groups: usize,\n    pub service: *mut ManifestService,\n    pub metadata_json: *mut c_char,\n}\n\nimpl ManifestCommand {\n    /// Build a transient NULL-terminated array of schema strings for\n    /// the command's args, in declaration order. The caller owns the\n    /// outer array allocation but NOT the inner C strings (they\n    /// remain owned by the ManifestArg objects). Use\n    /// `libc::free(arr as *mut c_void)` to release the outer array\n    /// when done.\n    ///\n    /// The array has one entry per arg INCLUDING flags. Flags have a\n    /// per-arg `schema` field of NULL on the v2 ManifestArg, but the\n    /// legacy callers (e.g. make_call_packet_from_cli) expect a slot\n    /// per arg position to keep index alignment with the parallel\n    /// args array; we substitute \"b\" (the boolean schema) for flags\n    /// so dispatch reads the flag value as a Bool, matching v1\n    /// behavior.\n    pub unsafe fn build_arg_schemas_array(&self) -> *mut *mut c_char {\n        let n = self.n_args;\n        let arr = libc::calloc(n + 1, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n        for i in 0..n {\n            let arg = &*self.args.add(i);\n            *arr.add(i) = if arg.kind == ManifestArgKind::Flag || arg.schema.is_null() {\n                // Flag schema fallback: use the static \"b\" string. This\n                // pointer is NOT freed by the caller (it's a global\n                // string literal), but since the caller only frees the\n                // OUTER array, this is fine. We use a per-call CString\n                // leak so each call has a distinct C-string buffer the\n                // caller doesn't try to free with the per-arg owners.\n                // Cleaner: we just point to the existing per-arg\n                // schema slot if non-null, else fall through. Most\n                // flag args won't be hit by legacy callers anyway.\n                static FLAG_SCHEMA: &[u8] = b\"b\\0\";\n                FLAG_SCHEMA.as_ptr() as *mut c_char\n            } else {\n                arg.schema\n            };\n        }\n        *arr.add(n) = ptr::null_mut();\n        arr\n    }\n}\n\n// -- Helpers ------------------------------------------------------------------\n\nunsafe fn c_strdup(s: &str) -> *mut c_char {\n    match CString::new(s) {\n        Ok(cs) => libc::strdup(cs.as_ptr()),\n        Err(_) => ptr::null_mut(),\n    }\n}\n\nunsafe fn nullable_strdup(s: Option<&str>) -> *mut c_char {\n    match s {\n        Some(s) => c_strdup(s),\n        None => ptr::null_mut(),\n    }\n}\n\n// -- Expression builder (using serde_json::Value) -----------------------------\n\nunsafe fn build_pattern(jp: &serde_json::Value) -> Result<*mut MorlocPattern, MorlocError> {\n    let ptype = jp.get(\"type\").and_then(|v| v.as_str()).ok_or_else(|| MorlocError::Other(\"Pattern missing 'type' field\".into()))?;\n\n    if ptype == \"end\" {\n        // make_morloc_pattern_end - call C function\n        extern \"C\" {\n            fn make_morloc_pattern_end() -> *mut MorlocPattern;\n        }\n        return Ok(make_morloc_pattern_end());\n    }\n\n    let sels = jp.get(\"selectors\").and_then(|v| v.as_array());\n    let n = sels.map(|a| a.len()).unwrap_or(0);\n\n    let pat = libc::calloc(1, std::mem::size_of::<MorlocPattern>()) as *mut MorlocPattern;\n    (*pat).size = n;\n    (*pat).selectors = libc::calloc(n, std::mem::size_of::<*mut MorlocPattern>()) as *mut *mut MorlocPattern;\n\n    if ptype == \"idx\" {\n        (*pat).ptype = MorlocPatternType::ByIndex;\n        (*pat).fields.indices = libc::calloc(n, std::mem::size_of::<usize>()) as *mut usize;\n        if let Some(sels) = sels {\n            for (i, sel) in sels.iter().enumerate() {\n                *(*pat).fields.indices.add(i) = sel.get(\"index\").and_then(|v| v.as_f64()).unwrap_or(0.0) as usize;\n                *(*pat).selectors.add(i) = build_pattern(sel.get(\"sub\").unwrap_or(&serde_json::Value::Null))?;\n            }\n        }\n    } else if ptype == \"key\" {\n        (*pat).ptype = MorlocPatternType::ByKey;\n        (*pat).fields.keys = libc::calloc(n, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n        if let Some(sels) = sels {\n            for (i, sel) in sels.iter().enumerate() {\n                let key = sel.get(\"key\").and_then(|v| v.as_str()).unwrap_or(\"\");\n                *(*pat).fields.keys.add(i) = c_strdup(key);\n                *(*pat).selectors.add(i) = build_pattern(sel.get(\"sub\").unwrap_or(&serde_json::Value::Null))?;\n            }\n        }\n    } else {\n        return Err(MorlocError::Other(format!(\"Unknown pattern type: {}\", ptype)));\n    }\n\n    Ok(pat)\n}\n\nunsafe fn build_expr(je: &serde_json::Value) -> Result<*mut MorlocExpression, MorlocError> {\n    let tag = je.get(\"tag\").and_then(|v| v.as_str()).ok_or_else(|| MorlocError::Other(\"Expression missing 'tag' field\".into()))?;\n\n    extern \"C\" {\n        fn parse_schema(s: *const c_char, errmsg: *mut *mut c_char) -> *mut CSchema;\n        fn make_morloc_literal(schema: *const c_char, prim: Primitive, errmsg: *mut *mut c_char) -> *mut MorlocExpression;\n        fn make_morloc_bound_var(schema: *const c_char, var: *mut c_char, errmsg: *mut *mut c_char) -> *mut MorlocExpression;\n        fn make_morloc_pattern(schema: *const c_char, pat: *mut MorlocPattern, errmsg: *mut *mut c_char) -> *mut MorlocExpression;\n    }\n\n    let mut err: *mut c_char = ptr::null_mut();\n\n    match tag {\n        \"lit\" => {\n            let schema = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let lt = je.get(\"lit_type\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let val = je.get(\"value\").and_then(|v| v.as_str()).unwrap_or(\"0\");\n            let mut prim: Primitive = std::mem::zeroed();\n\n            match lt {\n                \"f4\" => prim.f4 = val.parse::<f32>().unwrap_or(0.0),\n                \"f8\" => prim.f8 = val.parse::<f64>().unwrap_or(0.0),\n                \"i1\" => prim.i1 = val.parse::<i8>().unwrap_or(0),\n                \"i2\" => prim.i2 = val.parse::<i16>().unwrap_or(0),\n                \"i4\" => prim.i4 = val.parse::<i32>().unwrap_or(0),\n                \"i8\" => prim.i8_ = val.parse::<i64>().unwrap_or(0),\n                \"u1\" => prim.u1 = val.parse::<u8>().unwrap_or(0),\n                \"u2\" => prim.u2 = val.parse::<u16>().unwrap_or(0),\n                \"u4\" => prim.u4 = val.parse::<u32>().unwrap_or(0),\n                \"u8\" => prim.u8_ = val.parse::<u64>().unwrap_or(0),\n                \"b\" => prim.b = val != \"0\",\n                \"z\" => prim.z = 0,\n                _ => return Err(MorlocError::Other(format!(\"Unknown lit_type: {}\", lt))),\n            }\n\n            let c_schema = CString::new(schema).unwrap_or_default();\n            let result = make_morloc_literal(c_schema.as_ptr(), prim, &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            Ok(result)\n        }\n\n        \"str\" => {\n            let schema = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let val = je.get(\"value\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let mut prim: Primitive = std::mem::zeroed();\n            prim.s = c_strdup(val);\n            let c_schema = CString::new(schema).unwrap_or_default();\n            let result = make_morloc_literal(c_schema.as_ptr(), prim, &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            Ok(result)\n        }\n\n        \"container\" => {\n            let schema_str = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let elems = je.get(\"elements\").and_then(|v| v.as_array());\n            let n = elems.map(|a| a.len()).unwrap_or(0);\n\n            let c_schema_str = CString::new(schema_str).unwrap_or_default();\n            let schema = parse_schema(c_schema_str.as_ptr(), &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n\n            let values = libc::calloc(n, std::mem::size_of::<*mut MorlocExpression>()) as *mut *mut MorlocExpression;\n            if let Some(elems) = elems {\n                for (i, elem) in elems.iter().enumerate() {\n                    *values.add(i) = build_expr(elem)?;\n                }\n            }\n\n            let data = libc::calloc(1, std::mem::size_of::<MorlocData>()) as *mut MorlocData;\n            (*data).is_voidstar = false;\n\n            let schema_type = (*schema).serial_type;\n            // MORLOC_ARRAY = 14, MORLOC_TUPLE = 15, MORLOC_MAP = 16\n            if schema_type == 14 { // Array\n                let arr = libc::calloc(1, std::mem::size_of::<MorlocDataArray>()) as *mut MorlocDataArray;\n                (*arr).schema = if (*schema).size > 0 && !(*schema).parameters.is_null() { *(*schema).parameters } else { ptr::null_mut() };\n                (*arr).size = n;\n                (*arr).values = values;\n                (*data).data.array_val = arr;\n            } else if schema_type == 15 || schema_type == 16 { // Tuple or Map\n                (*data).data.tuple_val = values;\n            } else {\n                libc::free(values as *mut c_void);\n                libc::free(data as *mut c_void);\n                CSchema::free(schema);\n                return Err(MorlocError::Other(\"Container schema is not a container type\".into()));\n            }\n\n            let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*expr).etype = MorlocExpressionType::Dat;\n            (*expr).schema = schema;\n            (*expr).expr.data_expr = data;\n            Ok(expr)\n        }\n\n        \"app\" => {\n            let schema_str = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let jargs = je.get(\"args\").and_then(|v| v.as_array());\n            let n = jargs.map(|a| a.len()).unwrap_or(0);\n\n            let c_schema_str = CString::new(schema_str).unwrap_or_default();\n            let schema = parse_schema(c_schema_str.as_ptr(), &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n\n            let func = build_expr(je.get(\"func\").unwrap_or(&serde_json::Value::Null))?;\n            let args = libc::calloc(n, std::mem::size_of::<*mut MorlocExpression>()) as *mut *mut MorlocExpression;\n            if let Some(jargs) = jargs {\n                for (i, a) in jargs.iter().enumerate() {\n                    *args.add(i) = build_expr(a)?;\n                }\n            }\n\n            let app = libc::calloc(1, std::mem::size_of::<MorlocAppExpression>()) as *mut MorlocAppExpression;\n            match (*func).etype {\n                MorlocExpressionType::Pat => {\n                    (*app).atype = MorlocAppExpressionType::Pattern;\n                    (*app).function.pattern = (*func).expr.pattern_expr;\n                }\n                MorlocExpressionType::Lam => {\n                    (*app).atype = MorlocAppExpressionType::Lambda;\n                    (*app).function.lambda = (*func).expr.lam_expr;\n                }\n                MorlocExpressionType::Fmt => {\n                    (*app).atype = MorlocAppExpressionType::Format;\n                    (*app).function.fmt = (*func).expr.interpolation;\n                }\n                _ => {\n                    return Err(MorlocError::Other(format!(\"Invalid function in app expression (type={:?})\", (*func).etype)));\n                }\n            }\n            (*app).args = args;\n            (*app).nargs = n;\n\n            let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*expr).etype = MorlocExpressionType::App;\n            (*expr).schema = schema;\n            (*expr).expr.app_expr = app;\n            Ok(expr)\n        }\n\n        \"lambda\" => {\n            let jvars = je.get(\"vars\").and_then(|v| v.as_array());\n            let n = jvars.map(|a| a.len()).unwrap_or(0);\n\n            let body = build_expr(je.get(\"body\").unwrap_or(&serde_json::Value::Null))?;\n            let vars = libc::calloc(n, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n            if let Some(jvars) = jvars {\n                for (i, v) in jvars.iter().enumerate() {\n                    *vars.add(i) = c_strdup(v.as_str().unwrap_or(\"\"));\n                }\n            }\n\n            let lam = libc::calloc(1, std::mem::size_of::<MorlocLamExpression>()) as *mut MorlocLamExpression;\n            (*lam).nargs = n;\n            (*lam).args = vars;\n            (*lam).body = body;\n\n            let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*expr).etype = MorlocExpressionType::Lam;\n            (*expr).schema = ptr::null_mut();\n            (*expr).expr.lam_expr = lam;\n            Ok(expr)\n        }\n\n        \"bound\" => {\n            let schema = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let var = je.get(\"var\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let c_schema = CString::new(schema).unwrap_or_default();\n            let c_var = c_strdup(var);\n            let result = make_morloc_bound_var(c_schema.as_ptr(), c_var, &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            Ok(result)\n        }\n\n        \"show\" | \"read\" | \"hash\" | \"load\" => {\n            let schema_str = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let c_schema_str = CString::new(schema_str).unwrap_or_default();\n            let schema = parse_schema(c_schema_str.as_ptr(), &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            let child = build_expr(je.get(\"child\").unwrap_or(&serde_json::Value::Null))?;\n            let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*expr).etype = match tag {\n                \"show\" => MorlocExpressionType::Show,\n                \"read\" => MorlocExpressionType::Read,\n                \"hash\" => MorlocExpressionType::Hash,\n                \"load\" => MorlocExpressionType::Load,\n                _ => unreachable!(),\n            };\n            (*expr).schema = schema;\n            (*expr).expr.unary_expr = child;\n            Ok(expr)\n        }\n\n        \"save\" => {\n            let schema_str = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let c_schema_str = CString::new(schema_str).unwrap_or_default();\n            let schema = parse_schema(c_schema_str.as_ptr(), &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            let fmt_str = je.get(\"format\").and_then(|v| v.as_str()).unwrap_or(\"voidstar\");\n            let c_fmt = CString::new(fmt_str).unwrap_or_default();\n            let value = build_expr(je.get(\"value\").unwrap_or(&serde_json::Value::Null))?;\n            let path = build_expr(je.get(\"path\").unwrap_or(&serde_json::Value::Null))?;\n            let save = libc::calloc(1, std::mem::size_of::<MorlocSaveExpression>()) as *mut MorlocSaveExpression;\n            (*save).format = c_fmt.into_raw();\n            (*save).value = value;\n            (*save).path = path;\n            let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*expr).etype = MorlocExpressionType::Save;\n            (*expr).schema = schema;\n            (*expr).expr.save_expr = save;\n            Ok(expr)\n        }\n\n        \"interpolation\" => {\n            let schema_str = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let jstrs = je.get(\"strings\").and_then(|v| v.as_array());\n            let n = jstrs.map(|a| a.len()).unwrap_or(0);\n\n            let c_schema_str = CString::new(schema_str).unwrap_or_default();\n            let schema = parse_schema(c_schema_str.as_ptr(), &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n\n            let strings = libc::calloc(n + 1, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n            if let Some(jstrs) = jstrs {\n                for (i, s) in jstrs.iter().enumerate() {\n                    *strings.add(i) = c_strdup(s.as_str().unwrap_or(\"\"));\n                }\n            }\n\n            let expr = libc::calloc(1, std::mem::size_of::<MorlocExpression>()) as *mut MorlocExpression;\n            (*expr).etype = MorlocExpressionType::Fmt;\n            (*expr).schema = schema;\n            (*expr).expr.interpolation = strings;\n            Ok(expr)\n        }\n\n        \"pattern\" => {\n            let schema_str = je.get(\"schema\").and_then(|v| v.as_str()).unwrap_or(\"\");\n            let pat = build_pattern(je.get(\"pattern\").unwrap_or(&serde_json::Value::Null))?;\n            let c_schema = CString::new(schema_str).unwrap_or_default();\n            let result = make_morloc_pattern(c_schema.as_ptr(), pat, &mut err);\n            if !err.is_null() {\n                let msg = CStr::from_ptr(err).to_string_lossy().into_owned();\n                libc::free(err as *mut c_void);\n                return Err(MorlocError::Other(msg));\n            }\n            Ok(result)\n        }\n\n        _ => Err(MorlocError::Other(format!(\"Unknown expression tag: {}\", tag))),\n    }\n}\n\n// -- build_manifest_expr ------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn build_manifest_expr(\n    json_str: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut MorlocExpression {\n    clear_errmsg(errmsg);\n    let s = CStr::from_ptr(json_str).to_string_lossy();\n    let jv: serde_json::Value = match serde_json::from_str(&s) {\n        Ok(v) => v,\n        Err(e) => {\n            set_errmsg(errmsg, &MorlocError::Other(format!(\"Failed to parse expression JSON: {}\", e)));\n            return ptr::null_mut();\n        }\n    };\n    match build_expr(&jv) {\n        Ok(expr) => expr,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            ptr::null_mut()\n        }\n    }\n}\n\n// -- C-ABI population from morloc-manifest Rust types -------------------------\n//\n// parse_manifest is now a thin shell over morloc_manifest::parse_manifest\n// (which does all the JSON parsing, version checking, and serde\n// validation). The walker functions below convert the Rust-native\n// structs into owned C structs, mirroring the v2 schema field-for-field.\n\nunsafe fn populate_constraint(dst: *mut ManifestConstraint, src: &morloc_manifest::Constraint) {\n    (*dst).ctype = c_strdup(&src.ctype);\n    (*dst).value_json = match &src.value {\n        Some(v) => c_strdup(&v.to_string()),\n        None => ptr::null_mut(),\n    };\n}\n\nunsafe fn populate_constraints(\n    src: &[morloc_manifest::Constraint],\n) -> (*mut ManifestConstraint, usize) {\n    if src.is_empty() {\n        return (ptr::null_mut(), 0);\n    }\n    let arr =\n        libc::calloc(src.len(), std::mem::size_of::<ManifestConstraint>()) as *mut ManifestConstraint;\n    for (i, c) in src.iter().enumerate() {\n        populate_constraint(arr.add(i), c);\n    }\n    (arr, src.len())\n}\n\nunsafe fn populate_metadata(src: &morloc_manifest::Metadata) -> *mut c_char {\n    if src.is_empty() {\n        c_strdup(\"{}\")\n    } else {\n        let s = serde_json::to_string(src).unwrap_or_else(|_| \"{}\".into());\n        c_strdup(&s)\n    }\n}\n\n/// Convert a Vec<String> to a NULL-terminated array of C strings,\n/// and return (pointer, count). Caller owns the allocation.\nunsafe fn populate_str_vec(src: &[String]) -> (*mut *mut c_char, usize) {\n    let n = src.len();\n    let arr = libc::calloc(n + 1, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n    for (i, s) in src.iter().enumerate() {\n        *arr.add(i) = c_strdup(s);\n    }\n    *arr.add(n) = ptr::null_mut();\n    (arr, n)\n}\n\nunsafe fn populate_arg(dst: *mut ManifestArg, src: &morloc_manifest::Arg) {\n    use morloc_manifest::Arg;\n    match src {\n        Arg::Positional {\n            schema,\n            type_desc,\n            metavar,\n            quoted,\n            desc,\n            constraints,\n            ..\n        } => {\n            (*dst).kind = ManifestArgKind::Pos;\n            (*dst).schema = nullable_strdup(schema.as_deref());\n            (*dst).type_desc = nullable_strdup(type_desc.as_deref());\n            (*dst).metavar = nullable_strdup(metavar.as_deref());\n            (*dst).quoted = *quoted;\n            let (d, n) = populate_str_vec(desc);\n            (*dst).desc = d;\n            (*dst).n_desc = n;\n            let (cs, nc) = populate_constraints(constraints);\n            (*dst).constraints = cs;\n            (*dst).n_constraints = nc;\n            (*dst).metadata_json = c_strdup(\"{}\");\n        }\n        Arg::Optional {\n            schema,\n            type_desc,\n            metavar,\n            quoted,\n            short_opt,\n            long_opt,\n            default_val,\n            desc,\n            constraints,\n            ..\n        } => {\n            (*dst).kind = ManifestArgKind::Opt;\n            (*dst).schema = nullable_strdup(schema.as_deref());\n            (*dst).type_desc = nullable_strdup(type_desc.as_deref());\n            (*dst).metavar = nullable_strdup(metavar.as_deref());\n            (*dst).quoted = *quoted;\n            (*dst).short_opt = short_opt\n                .as_ref()\n                .and_then(|s| s.as_bytes().first().copied())\n                .map(|b| b as c_char)\n                .unwrap_or(0);\n            (*dst).long_opt = nullable_strdup(long_opt.as_deref());\n            (*dst).default_val = nullable_strdup(default_val.as_deref());\n            let (d, n) = populate_str_vec(desc);\n            (*dst).desc = d;\n            (*dst).n_desc = n;\n            let (cs, nc) = populate_constraints(constraints);\n            (*dst).constraints = cs;\n            (*dst).n_constraints = nc;\n            (*dst).metadata_json = c_strdup(\"{}\");\n        }\n        Arg::Flag {\n            short_opt,\n            long_opt,\n            long_rev,\n            default_val,\n            desc,\n            ..\n        } => {\n            (*dst).kind = ManifestArgKind::Flag;\n            (*dst).short_opt = short_opt\n                .as_ref()\n                .and_then(|s| s.as_bytes().first().copied())\n                .map(|b| b as c_char)\n                .unwrap_or(0);\n            (*dst).long_opt = nullable_strdup(long_opt.as_deref());\n            (*dst).long_rev = nullable_strdup(long_rev.as_deref());\n            (*dst).default_val = nullable_strdup(default_val.as_deref());\n            let (d, n) = populate_str_vec(desc);\n            (*dst).desc = d;\n            (*dst).n_desc = n;\n            (*dst).metadata_json = c_strdup(\"{}\");\n        }\n        Arg::Group {\n            schema,\n            type_desc,\n            metavar,\n            desc,\n            group_opt,\n            entries,\n            constraints,\n            ..\n        } => {\n            (*dst).kind = ManifestArgKind::Grp;\n            (*dst).schema = nullable_strdup(schema.as_deref());\n            (*dst).type_desc = nullable_strdup(type_desc.as_deref());\n            (*dst).metavar = nullable_strdup(metavar.as_deref());\n            let (d, n) = populate_str_vec(desc);\n            (*dst).desc = d;\n            (*dst).n_desc = n;\n            if let Some(g) = group_opt {\n                (*dst).grp_short = g\n                    .short_opt\n                    .as_ref()\n                    .and_then(|s| s.as_bytes().first().copied())\n                    .map(|b| b as c_char)\n                    .unwrap_or(0);\n                (*dst).grp_long = nullable_strdup(g.long_opt.as_deref());\n            }\n            if !entries.is_empty() {\n                (*dst).n_entries = entries.len();\n                (*dst).entries = libc::calloc(\n                    entries.len(),\n                    std::mem::size_of::<ManifestGrpEntry>(),\n                ) as *mut ManifestGrpEntry;\n                for (i, ge) in entries.iter().enumerate() {\n                    let ge_dst = &mut *(*dst).entries.add(i);\n                    ge_dst.key = c_strdup(&ge.key);\n                    let sub_arg = libc::calloc(1, std::mem::size_of::<ManifestArg>())\n                        as *mut ManifestArg;\n                    populate_arg(sub_arg, &ge.arg);\n                    ge_dst.arg = sub_arg;\n                }\n            }\n            let (cs, nc) = populate_constraints(constraints);\n            (*dst).constraints = cs;\n            (*dst).n_constraints = nc;\n            (*dst).metadata_json = c_strdup(\"{}\");\n        }\n    }\n}\n\nunsafe fn populate_return(dst: *mut ManifestReturn, src: &morloc_manifest::Return) {\n    (*dst).schema = c_strdup(&src.schema);\n    (*dst).type_desc = c_strdup(&src.type_desc);\n    let (d, n) = populate_str_vec(&src.desc);\n    (*dst).desc = d;\n    (*dst).n_desc = n;\n    let (cs, nc) = populate_constraints(&src.constraints);\n    (*dst).constraints = cs;\n    (*dst).n_constraints = nc;\n    (*dst).metadata_json = populate_metadata(&src.metadata);\n}\n\nunsafe fn populate_command(dst: *mut ManifestCommand, src: &morloc_manifest::Command) -> Result<(), MorlocError> {\n    (*dst).name = c_strdup(&src.name);\n    (*dst).is_pure = src.is_pure();\n    (*dst).mid = src.mid;\n    (*dst).pool_index = src.pool_index;\n    if !src.needed_pools.is_empty() {\n        (*dst).n_needed_pools = src.needed_pools.len();\n        (*dst).needed_pools = libc::calloc(\n            src.needed_pools.len(),\n            std::mem::size_of::<usize>(),\n        ) as *mut usize;\n        for (i, p) in src.needed_pools.iter().enumerate() {\n            *(*dst).needed_pools.add(i) = *p;\n        }\n    }\n    let (d, n) = populate_str_vec(&src.desc);\n    (*dst).desc = d;\n    (*dst).n_desc = n;\n\n    if !src.args.is_empty() {\n        (*dst).n_args = src.args.len();\n        (*dst).args = libc::calloc(\n            src.args.len(),\n            std::mem::size_of::<ManifestArg>(),\n        ) as *mut ManifestArg;\n        for (i, a) in src.args.iter().enumerate() {\n            populate_arg((*dst).args.add(i), a);\n        }\n    }\n\n    populate_return(&mut (*dst).ret, &src.ret);\n\n    let (cs, nc) = populate_constraints(&src.constraints);\n    (*dst).constraints = cs;\n    (*dst).n_constraints = nc;\n\n    (*dst).metadata_json = populate_metadata(&src.metadata);\n\n    if src.is_pure() {\n        if let Some(expr_val) = &src.expr {\n            match build_expr(expr_val) {\n                Ok(e) => (*dst).expr = e,\n                Err(e) => return Err(e),\n            }\n        }\n    }\n\n    (*dst).group = match &src.group {\n        Some(g) => c_strdup(g),\n        None => ptr::null_mut(),\n    };\n\n    Ok(())\n}\n\nunsafe fn populate_pool(dst: *mut ManifestPool, src: &morloc_manifest::Pool) {\n    (*dst).lang = c_strdup(&src.lang);\n    let n = src.exec.len();\n    (*dst).exec = libc::calloc(n + 1, std::mem::size_of::<*mut c_char>()) as *mut *mut c_char;\n    for (i, e) in src.exec.iter().enumerate() {\n        *(*dst).exec.add(i) = c_strdup(e);\n    }\n    *(*dst).exec.add(n) = ptr::null_mut();\n    (*dst).socket = c_strdup(&src.socket);\n    (*dst).metadata_json = populate_metadata(&src.metadata);\n}\n\nunsafe fn populate_cmd_group(dst: *mut ManifestCmdGroup, src: &morloc_manifest::CmdGroup) {\n    (*dst).name = c_strdup(&src.name);\n    let (d, n) = populate_str_vec(&src.desc);\n    (*dst).desc = d;\n    (*dst).n_desc = n;\n    (*dst).metadata_json = populate_metadata(&src.metadata);\n}\n\nunsafe fn populate_service(dst: *mut ManifestService, src: &morloc_manifest::Service) {\n    (*dst).stype = nullable_strdup(src.service_type.as_deref());\n    (*dst).host = nullable_strdup(src.host.as_deref());\n    (*dst).port = src.port.unwrap_or(0);\n    (*dst).socket = nullable_strdup(src.socket.as_deref());\n    (*dst).metadata_json = populate_metadata(&src.metadata);\n}\n\n// -- parse_manifest -----------------------------------------------------------\n//\n// Reads a manifest JSON string, parses it via the canonical\n// morloc-manifest crate (which performs the version staleness check),\n// and converts the resulting Rust struct into owned C structs for\n// daemon-side consumers. There is no separate JSON walker here -- the\n// shape and validation rules live in one place (the morloc-manifest\n// crate).\n\n#[no_mangle]\npub unsafe extern \"C\" fn parse_manifest(\n    text: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut Manifest {\n    clear_errmsg(errmsg);\n    let s = CStr::from_ptr(text).to_string_lossy();\n\n    let parsed = match morloc_manifest::parse_manifest(&s) {\n        Ok(p) => p,\n        Err(e) => {\n            set_errmsg(errmsg, &MorlocError::Other(e));\n            return ptr::null_mut();\n        }\n    };\n\n    let m = libc::calloc(1, std::mem::size_of::<Manifest>()) as *mut Manifest;\n    (*m).name = c_strdup(&parsed.name);\n\n    // build sub-object\n    (*m).build.path = c_strdup(&parsed.build.path);\n    (*m).build.time = parsed.build.time;\n    (*m).build.morloc_version = c_strdup(&parsed.build.morloc_version);\n\n    // pools\n    if !parsed.pools.is_empty() {\n        (*m).n_pools = parsed.pools.len();\n        (*m).pools = libc::calloc(\n            parsed.pools.len(),\n            std::mem::size_of::<ManifestPool>(),\n        ) as *mut ManifestPool;\n        for (i, p) in parsed.pools.iter().enumerate() {\n            populate_pool((*m).pools.add(i), p);\n        }\n    }\n\n    // commands\n    if !parsed.commands.is_empty() {\n        (*m).n_commands = parsed.commands.len();\n        (*m).commands = libc::calloc(\n            parsed.commands.len(),\n            std::mem::size_of::<ManifestCommand>(),\n        ) as *mut ManifestCommand;\n        for (i, c) in parsed.commands.iter().enumerate() {\n            if let Err(e) = populate_command((*m).commands.add(i), c) {\n                set_errmsg(errmsg, &e);\n                return ptr::null_mut();\n            }\n        }\n    }\n\n    // groups\n    if !parsed.groups.is_empty() {\n        (*m).n_groups = parsed.groups.len();\n        (*m).groups = libc::calloc(\n            parsed.groups.len(),\n            std::mem::size_of::<ManifestCmdGroup>(),\n        ) as *mut ManifestCmdGroup;\n        for (i, g) in parsed.groups.iter().enumerate() {\n            populate_cmd_group((*m).groups.add(i), g);\n        }\n    }\n\n    // service\n    if let Some(svc) = parsed.service {\n        (*m).service =\n            libc::calloc(1, std::mem::size_of::<ManifestService>()) as *mut ManifestService;\n        populate_service((*m).service, &svc);\n    }\n\n    (*m).metadata_json = populate_metadata(&parsed.metadata);\n\n    m\n}\n\n// -- read_manifest ------------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_manifest(\n    path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut Manifest {\n    clear_errmsg(errmsg);\n    let path_str = CStr::from_ptr(path).to_string_lossy();\n    match std::fs::read_to_string(path_str.as_ref()) {\n        Ok(text) => {\n            let c_text = CString::new(text).unwrap_or_default();\n            parse_manifest(c_text.as_ptr(), errmsg)\n        }\n        Err(e) => {\n            set_errmsg(errmsg, &MorlocError::Io(e));\n            ptr::null_mut()\n        }\n    }\n}\n\n// -- free_manifest ------------------------------------------------------------\n//\n// Walks the v2 C structs and releases every owned C string + sub-allocation.\n// Helper functions mirror the populate_* helpers above for symmetry.\n\nunsafe fn free_str_array(arr: *mut *mut c_char) {\n    if arr.is_null() {\n        return;\n    }\n    let mut j = 0;\n    while !(*arr.add(j)).is_null() {\n        libc::free(*arr.add(j) as *mut c_void);\n        j += 1;\n    }\n    libc::free(arr as *mut c_void);\n}\n\nunsafe fn free_constraints(ptr: *mut ManifestConstraint, n: usize) {\n    if ptr.is_null() {\n        return;\n    }\n    for i in 0..n {\n        let c = &*ptr.add(i);\n        if !c.ctype.is_null() {\n            libc::free(c.ctype as *mut c_void);\n        }\n        if !c.value_json.is_null() {\n            libc::free(c.value_json as *mut c_void);\n        }\n    }\n    libc::free(ptr as *mut c_void);\n}\n\nunsafe fn free_arg(arg: &ManifestArg) {\n    if !arg.schema.is_null() {\n        libc::free(arg.schema as *mut c_void);\n    }\n    if !arg.type_desc.is_null() {\n        libc::free(arg.type_desc as *mut c_void);\n    }\n    if !arg.metavar.is_null() {\n        libc::free(arg.metavar as *mut c_void);\n    }\n    if !arg.long_opt.is_null() {\n        libc::free(arg.long_opt as *mut c_void);\n    }\n    if !arg.long_rev.is_null() {\n        libc::free(arg.long_rev as *mut c_void);\n    }\n    if !arg.default_val.is_null() {\n        libc::free(arg.default_val as *mut c_void);\n    }\n    free_str_array(arg.desc);\n    free_constraints(arg.constraints, arg.n_constraints);\n    if !arg.grp_long.is_null() {\n        libc::free(arg.grp_long as *mut c_void);\n    }\n    if !arg.entries.is_null() {\n        for i in 0..arg.n_entries {\n            let ge = &*arg.entries.add(i);\n            if !ge.key.is_null() {\n                libc::free(ge.key as *mut c_void);\n            }\n            if !ge.arg.is_null() {\n                free_arg(&*ge.arg);\n                libc::free(ge.arg as *mut c_void);\n            }\n        }\n        libc::free(arg.entries as *mut c_void);\n    }\n    if !arg.metadata_json.is_null() {\n        libc::free(arg.metadata_json as *mut c_void);\n    }\n}\n\nunsafe fn free_return(ret: &ManifestReturn) {\n    if !ret.schema.is_null() {\n        libc::free(ret.schema as *mut c_void);\n    }\n    if !ret.type_desc.is_null() {\n        libc::free(ret.type_desc as *mut c_void);\n    }\n    free_str_array(ret.desc);\n    free_constraints(ret.constraints, ret.n_constraints);\n    if !ret.metadata_json.is_null() {\n        libc::free(ret.metadata_json as *mut c_void);\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn free_manifest(manifest: *mut Manifest) {\n    if manifest.is_null() {\n        return;\n    }\n    let m = &*manifest;\n    if !m.name.is_null() {\n        libc::free(m.name as *mut c_void);\n    }\n    // build sub-object\n    if !m.build.path.is_null() {\n        libc::free(m.build.path as *mut c_void);\n    }\n    if !m.build.morloc_version.is_null() {\n        libc::free(m.build.morloc_version as *mut c_void);\n    }\n    // pools\n    for i in 0..m.n_pools {\n        let pool = &*m.pools.add(i);\n        if !pool.lang.is_null() {\n            libc::free(pool.lang as *mut c_void);\n        }\n        free_str_array(pool.exec);\n        if !pool.socket.is_null() {\n            libc::free(pool.socket as *mut c_void);\n        }\n        if !pool.metadata_json.is_null() {\n            libc::free(pool.metadata_json as *mut c_void);\n        }\n    }\n    if !m.pools.is_null() {\n        libc::free(m.pools as *mut c_void);\n    }\n    // commands\n    for i in 0..m.n_commands {\n        let cmd = &*m.commands.add(i);\n        if !cmd.name.is_null() {\n            libc::free(cmd.name as *mut c_void);\n        }\n        if !cmd.needed_pools.is_null() {\n            libc::free(cmd.needed_pools as *mut c_void);\n        }\n        free_str_array(cmd.desc);\n        for j in 0..cmd.n_args {\n            free_arg(&*cmd.args.add(j));\n        }\n        if !cmd.args.is_null() {\n            libc::free(cmd.args as *mut c_void);\n        }\n        free_return(&cmd.ret);\n        free_constraints(cmd.constraints, cmd.n_constraints);\n        if !cmd.group.is_null() {\n            libc::free(cmd.group as *mut c_void);\n        }\n        if !cmd.metadata_json.is_null() {\n            libc::free(cmd.metadata_json as *mut c_void);\n        }\n        // Note: cmd.expr is owned by the C side and freed by its own\n        // free function in eval_ffi.rs (not in scope here).\n    }\n    if !m.commands.is_null() {\n        libc::free(m.commands as *mut c_void);\n    }\n    // groups\n    for i in 0..m.n_groups {\n        let g = &*m.groups.add(i);\n        if !g.name.is_null() {\n            libc::free(g.name as *mut c_void);\n        }\n        free_str_array(g.desc);\n        if !g.metadata_json.is_null() {\n            libc::free(g.metadata_json as *mut c_void);\n        }\n    }\n    if !m.groups.is_null() {\n        libc::free(m.groups as *mut c_void);\n    }\n    // service\n    if !m.service.is_null() {\n        let svc = &*m.service;\n        if !svc.stype.is_null() {\n            libc::free(svc.stype as *mut c_void);\n        }\n        if !svc.host.is_null() {\n            libc::free(svc.host as *mut c_void);\n        }\n        if !svc.socket.is_null() {\n            libc::free(svc.socket as *mut c_void);\n        }\n        if !svc.metadata_json.is_null() {\n            libc::free(svc.metadata_json as *mut c_void);\n        }\n        libc::free(m.service as *mut c_void);\n    }\n    if !m.metadata_json.is_null() {\n        libc::free(m.metadata_json as *mut c_void);\n    }\n    libc::free(manifest as *mut c_void);\n}\n\n// -- manifest_to_discovery_json -----------------------------------------------\n//\n// Emits a v2-shape JSON describing the manifest's commands. Used by the\n// daemon/router code for discovery RPC. The output mirrors the morloc\n// compiler's manifest format closely (no v1 legacy field names).\n\n#[no_mangle]\npub unsafe extern \"C\" fn manifest_to_discovery_json(manifest: *const Manifest) -> *mut c_char {\n    if manifest.is_null() {\n        return ptr::null_mut();\n    }\n    let m = &*manifest;\n\n    extern \"C\" {\n        fn json_buf_new() -> *mut c_void;\n        fn json_buf_finish(jb: *mut c_void) -> *mut c_char;\n        fn json_write_obj_start(jb: *mut c_void);\n        fn json_write_obj_end(jb: *mut c_void);\n        fn json_write_arr_start(jb: *mut c_void);\n        fn json_write_arr_end(jb: *mut c_void);\n        fn json_write_key(jb: *mut c_void, key: *const c_char);\n        fn json_write_string(jb: *mut c_void, val: *const c_char);\n    }\n\n    let jb = json_buf_new();\n    json_write_obj_start(jb);\n\n    let name_key = b\"name\\0\".as_ptr() as *const c_char;\n    let type_key = b\"type\\0\".as_ptr() as *const c_char;\n    let kind_key = b\"kind\\0\".as_ptr() as *const c_char;\n    let schema_key = b\"schema\\0\".as_ptr() as *const c_char;\n\n    json_write_key(jb, name_key);\n    json_write_string(\n        jb,\n        if m.name.is_null() {\n            b\"unknown\\0\".as_ptr() as *const c_char\n        } else {\n            m.name\n        },\n    );\n\n    // Surface the morloc compiler version that built this manifest.\n    if !m.build.morloc_version.is_null() {\n        let mv_key = b\"morloc_version\\0\".as_ptr() as *const c_char;\n        json_write_key(jb, mv_key);\n        json_write_string(jb, m.build.morloc_version);\n    }\n\n    let commands_key = b\"commands\\0\".as_ptr() as *const c_char;\n    json_write_key(jb, commands_key);\n    json_write_arr_start(jb);\n\n    for i in 0..m.n_commands {\n        let cmd = &*m.commands.add(i);\n        json_write_obj_start(jb);\n\n        json_write_key(jb, name_key);\n        json_write_string(jb, cmd.name);\n\n        json_write_key(jb, type_key);\n        json_write_string(\n            jb,\n            if cmd.is_pure {\n                b\"pure\\0\".as_ptr() as *const c_char\n            } else {\n                b\"remote\\0\".as_ptr() as *const c_char\n            },\n        );\n\n        // Return descriptor (nested return object).\n        if !cmd.ret.type_desc.is_null() || !cmd.ret.schema.is_null() {\n            let ret_key = b\"return\\0\".as_ptr() as *const c_char;\n            json_write_key(jb, ret_key);\n            json_write_obj_start(jb);\n            if !cmd.ret.type_desc.is_null() {\n                json_write_key(jb, type_key);\n                json_write_string(jb, cmd.ret.type_desc);\n            }\n            if !cmd.ret.schema.is_null() {\n                json_write_key(jb, schema_key);\n                json_write_string(jb, cmd.ret.schema);\n            }\n            json_write_obj_end(jb);\n        }\n\n        // Args. Each arg's schema (if any) is on the arg itself; no\n        // parallel array, no flag-skipping bug.\n        let args_key = b\"args\\0\".as_ptr() as *const c_char;\n        json_write_key(jb, args_key);\n        json_write_arr_start(jb);\n        for a in 0..cmd.n_args {\n            let arg = &*cmd.args.add(a);\n            json_write_obj_start(jb);\n\n            json_write_key(jb, kind_key);\n            match arg.kind {\n                ManifestArgKind::Pos => json_write_string(jb, b\"pos\\0\".as_ptr() as *const c_char),\n                ManifestArgKind::Opt => json_write_string(jb, b\"opt\\0\".as_ptr() as *const c_char),\n                ManifestArgKind::Flag => json_write_string(jb, b\"flag\\0\".as_ptr() as *const c_char),\n                ManifestArgKind::Grp => json_write_string(jb, b\"grp\\0\".as_ptr() as *const c_char),\n            }\n\n            if !arg.metavar.is_null() {\n                json_write_key(jb, b\"metavar\\0\".as_ptr() as *const c_char);\n                json_write_string(jb, arg.metavar);\n            }\n            if !arg.type_desc.is_null() {\n                json_write_key(jb, type_key);\n                json_write_string(jb, arg.type_desc);\n            }\n            if !arg.schema.is_null() {\n                json_write_key(jb, schema_key);\n                json_write_string(jb, arg.schema);\n            }\n            if !arg.default_val.is_null() {\n                json_write_key(jb, b\"default\\0\".as_ptr() as *const c_char);\n                json_write_string(jb, arg.default_val);\n            }\n            if !arg.long_opt.is_null() {\n                json_write_key(jb, b\"long\\0\".as_ptr() as *const c_char);\n                json_write_string(jb, arg.long_opt);\n            }\n            if arg.short_opt != 0 {\n                let short_str = [arg.short_opt as u8, 0];\n                json_write_key(jb, b\"short\\0\".as_ptr() as *const c_char);\n                json_write_string(jb, short_str.as_ptr() as *const c_char);\n            }\n            if arg.n_desc > 0 && !arg.desc.is_null() && !(*arg.desc).is_null() {\n                let first = *arg.desc;\n                if *first != 0 {\n                    let desc_key = b\"desc\\0\".as_ptr() as *const c_char;\n                    json_write_key(jb, desc_key);\n                    json_write_string(jb, first);\n                }\n            }\n\n            json_write_obj_end(jb);\n        }\n        json_write_arr_end(jb);\n\n        if cmd.n_desc > 0 && !cmd.desc.is_null() && !(*cmd.desc).is_null() {\n            let first = *cmd.desc;\n            if *first != 0 {\n                json_write_key(jb, b\"desc\\0\".as_ptr() as *const c_char);\n                json_write_string(jb, first);\n            }\n        }\n\n        if !cmd.group.is_null() {\n            json_write_key(jb, b\"group\\0\".as_ptr() as *const c_char);\n            json_write_string(jb, cmd.group);\n        }\n\n        json_write_obj_end(jb);\n    }\n\n    json_write_arr_end(jb);\n\n    if m.n_groups > 0 {\n        let groups_key = b\"groups\\0\".as_ptr() as *const c_char;\n        json_write_key(jb, groups_key);\n        json_write_arr_start(jb);\n        for i in 0..m.n_groups {\n            let g = &*m.groups.add(i);\n            json_write_obj_start(jb);\n            json_write_key(jb, name_key);\n            json_write_string(jb, g.name);\n            if g.n_desc > 0 && !g.desc.is_null() && !(*g.desc).is_null() {\n                json_write_key(jb, b\"desc\\0\".as_ptr() as *const c_char);\n                json_write_string(jb, *g.desc);\n            }\n            json_write_obj_end(jb);\n        }\n        json_write_arr_end(jb);\n    }\n\n    json_write_obj_end(jb);\n    json_buf_finish(jb)\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/mpack.rs",
    "content": "//! MessagePack <-> Voidstar conversion.\n//!\n//! Replaces serialize.c + mpack.c. Uses the `rmp` crate for MessagePack I/O.\n//! The voidstar binary format is morloc-specific (Array/Tensor structs with relptrs).\n\nuse crate::error::MorlocError;\nuse crate::schema::{Schema, SerialType};\nuse crate::shm::{self, AbsPtr, Array, RELNULL};\n\n// ── Voidstar -> MessagePack ────────────────────────────────────────────────\n\n/// Serialize voidstar data to MessagePack bytes.\npub fn pack_with_schema(ptr: AbsPtr, schema: &Schema) -> Result<Vec<u8>, MorlocError> {\n    let mut buf = Vec::with_capacity(256);\n    pack_data(ptr, schema, &mut buf)?;\n    Ok(buf)\n}\n\nfn pack_data(ptr: AbsPtr, schema: &Schema, buf: &mut Vec<u8>) -> Result<(), MorlocError> {\n    // SAFETY: ptr points to voidstar data in SHM with layout described by schema.\n    // All reads are within bounds defined by schema.width, Array headers, etc.\n    unsafe {\n        match schema.serial_type {\n            SerialType::Nil => {\n                rmp::encode::write_nil(buf)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack nil: {}\", e)))?;\n            }\n            SerialType::Bool => {\n                let v = *ptr != 0;\n                rmp::encode::write_bool(buf, v)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack bool: {}\", e)))?;\n            }\n            SerialType::Uint8 => {\n                rmp::encode::write_uint(buf, *ptr as u64)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack uint: {}\", e)))?;\n            }\n            SerialType::Uint16 => {\n                rmp::encode::write_uint(buf, *(ptr as *const u16) as u64)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack uint: {}\", e)))?;\n            }\n            SerialType::Uint32 => {\n                rmp::encode::write_uint(buf, *(ptr as *const u32) as u64)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack uint: {}\", e)))?;\n            }\n            SerialType::Uint64 => {\n                rmp::encode::write_uint(buf, *(ptr as *const u64))\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack uint: {}\", e)))?;\n            }\n            SerialType::Sint8 => {\n                rmp::encode::write_sint(buf, *(ptr as *const i8) as i64)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack sint: {}\", e)))?;\n            }\n            SerialType::Sint16 => {\n                rmp::encode::write_sint(buf, *(ptr as *const i16) as i64)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack sint: {}\", e)))?;\n            }\n            SerialType::Sint32 => {\n                rmp::encode::write_sint(buf, *(ptr as *const i32) as i64)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack sint: {}\", e)))?;\n            }\n            SerialType::Sint64 => {\n                rmp::encode::write_sint(buf, *(ptr as *const i64))\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack sint: {}\", e)))?;\n            }\n            SerialType::Float32 => {\n                let f = *(ptr as *const f32) as f64;\n                rmp::encode::write_f64(buf, f)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack float: {}\", e)))?;\n            }\n            SerialType::Float64 => {\n                let f = *(ptr as *const f64);\n                rmp::encode::write_f64(buf, f)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack float: {}\", e)))?;\n            }\n            SerialType::String => {\n                let arr = &*(ptr as *const Array);\n                let data = shm::rel2abs(arr.data)?;\n                let bytes = std::slice::from_raw_parts(data, arr.size);\n                rmp::encode::write_str_len(buf, arr.size as u32)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack str: {}\", e)))?;\n                buf.extend_from_slice(bytes);\n            }\n            SerialType::Array => {\n                let arr = &*(ptr as *const Array);\n                let elem_schema = &schema.parameters[0];\n                let elem_width = elem_schema.width;\n\n                rmp::encode::write_array_len(buf, arr.size as u32)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack array: {}\", e)))?;\n\n                if arr.size > 0 && arr.data != RELNULL {\n                    let data = shm::rel2abs(arr.data)?;\n                    for i in 0..arr.size {\n                        let elem_ptr = data.add(i * elem_width);\n                        pack_data(elem_ptr, elem_schema, buf)?;\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                rmp::encode::write_array_len(buf, schema.parameters.len() as u32)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack tuple: {}\", e)))?;\n\n                for (i, field_schema) in schema.parameters.iter().enumerate() {\n                    let field_ptr = ptr.add(schema.offsets[i]);\n                    pack_data(field_ptr, field_schema, buf)?;\n                }\n            }\n            SerialType::Optional => {\n                let tag = *ptr;\n                if tag == 0 {\n                    rmp::encode::write_nil(buf)\n                        .map_err(|e| MorlocError::Serialization(format!(\"msgpack nil: {}\", e)))?;\n                } else {\n                    let inner_schema = &schema.parameters[0];\n                    let inner_offset = schema.offsets.first().copied()\n                        .unwrap_or_else(|| shm::align_up(1, inner_schema.alignment().max(1)));\n                    let inner_ptr = ptr.add(inner_offset);\n                    pack_data(inner_ptr, inner_schema, buf)?;\n                }\n            }\n            SerialType::Tensor => {\n                return Err(MorlocError::Serialization(\n                    \"MessagePack serialization of tensors not yet supported\".into(),\n                ));\n            }\n        }\n    }\n    Ok(())\n}\n\n// ── MessagePack -> Voidstar ────────────────────────────────────────────────\n\n/// Deserialize MessagePack bytes into voidstar format in SHM.\npub fn unpack_with_schema(\n    data: &[u8],\n    schema: &Schema,\n) -> Result<AbsPtr, MorlocError> {\n    // Two-pass: first calculate size, then deserialize\n    let size = calc_unpack_size(data, schema)?;\n    let base = shm::shmalloc(size)?;\n    // SAFETY: base is freshly allocated with `size` bytes.\n    unsafe { std::ptr::write_bytes(base, 0, size) };\n\n    // SAFETY: cursor starts at base + schema.width, within the allocated region.\n    let mut cursor = unsafe { base.add(schema.width) };\n    let mut reader = &data[..];\n    unpack_obj(base, schema, &mut cursor, &mut reader)?;\n    Ok(base)\n}\n\nfn unpack_obj(\n    ptr: AbsPtr,\n    schema: &Schema,\n    cursor: &mut AbsPtr,\n    reader: &mut &[u8],\n) -> Result<(), MorlocError> {\n    use rmp::decode;\n\n    // SAFETY: ptr and cursor point into a single contiguous SHM allocation\n    // sized by calc_unpack_size. Each write respects schema.width bounds.\n    unsafe {\n        match schema.serial_type {\n            SerialType::Nil => {\n                decode::read_nil(reader)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack nil: {}\", e)))?;\n                *ptr = 0;\n            }\n            SerialType::Bool => {\n                let v = decode::read_bool(reader)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack bool: {}\", e)))?;\n                *ptr = v as u8;\n            }\n            SerialType::Uint8 | SerialType::Uint16 | SerialType::Uint32 | SerialType::Uint64\n            | SerialType::Sint8 | SerialType::Sint16 | SerialType::Sint32 | SerialType::Sint64 => {\n                unpack_int(ptr, schema.serial_type, reader)?;\n            }\n            SerialType::Float32 => {\n                let f = read_float(reader)?;\n                *(ptr as *mut f32) = f as f32;\n            }\n            SerialType::Float64 => {\n                let f = read_float(reader)?;\n                *(ptr as *mut f64) = f;\n            }\n            SerialType::String => {\n                let len = decode::read_str_len(reader)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack str len: {}\", e)))?\n                    as usize;\n                let arr = &mut *(ptr as *mut Array);\n                arr.size = len;\n                arr.data = shm::abs2rel(*cursor)?;\n\n                // Read string bytes directly\n                if len > 0 {\n                    if reader.len() < len {\n                        return Err(MorlocError::Serialization(\"msgpack str truncated\".into()));\n                    }\n                    std::ptr::copy_nonoverlapping(reader.as_ptr(), *cursor, len);\n                    *reader = &reader[len..];\n                }\n                *cursor = cursor.add(len);\n            }\n            SerialType::Array => {\n                let n = decode::read_array_len(reader)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack array len: {}\", e)))?\n                    as usize;\n                let elem_schema = &schema.parameters[0];\n                let elem_width = elem_schema.width;\n\n                let arr = &mut *(ptr as *mut Array);\n                arr.size = n;\n\n                // Align cursor for element data\n                let align = elem_schema.alignment();\n                let aligned = shm::align_up(*cursor as usize, align);\n                *cursor = aligned as AbsPtr;\n\n                arr.data = shm::abs2rel(*cursor)?;\n                let data_start = *cursor;\n                *cursor = cursor.add(n * elem_width);\n\n                for i in 0..n {\n                    let elem_ptr = data_start.add(i * elem_width);\n                    unpack_obj(elem_ptr, elem_schema, cursor, reader)?;\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                let _n = decode::read_array_len(reader)\n                    .map_err(|e| MorlocError::Serialization(format!(\"msgpack tuple len: {}\", e)))?;\n                for (i, field_schema) in schema.parameters.iter().enumerate() {\n                    let field_ptr = ptr.add(schema.offsets[i]);\n                    unpack_obj(field_ptr, field_schema, cursor, reader)?;\n                }\n            }\n            SerialType::Optional => {\n                let inner_schema = &schema.parameters[0];\n                let inner_offset = schema.offsets.first().copied()\n                    .unwrap_or_else(|| shm::align_up(1, inner_schema.alignment().max(1)));\n\n                // Peek at the next byte to detect nil\n                if !reader.is_empty() && reader[0] == 0xc0 {\n                    // Null: consume nil byte, set tag = 0\n                    decode::read_nil(reader)\n                        .map_err(|e| MorlocError::Serialization(format!(\"msgpack nil: {}\", e)))?;\n                    *ptr = 0;\n                } else {\n                    // Present: set tag = 1, parse inner\n                    *ptr = 1;\n                    let inner_ptr = ptr.add(inner_offset);\n                    unpack_obj(inner_ptr, inner_schema, cursor, reader)?;\n                }\n            }\n            SerialType::Tensor => {\n                return Err(MorlocError::Serialization(\n                    \"MessagePack tensor deserialization not yet supported\".into(),\n                ));\n            }\n        }\n    }\n    Ok(())\n}\n\nfn unpack_int(ptr: AbsPtr, st: SerialType, reader: &mut &[u8]) -> Result<(), MorlocError> {\n    // Use rmp's generic read_int which handles all integer markers\n    let val: i64 = rmp::decode::read_int(reader)\n        .map_err(|e| MorlocError::Serialization(format!(\"msgpack int: {}\", e)))?;\n\n    // SAFETY: ptr points to schema.width bytes in SHM; each cast writes exactly that width.\n    unsafe {\n        match st {\n            SerialType::Sint8 => *(ptr as *mut i8) = val as i8,\n            SerialType::Sint16 => *(ptr as *mut i16) = val as i16,\n            SerialType::Sint32 => *(ptr as *mut i32) = val as i32,\n            SerialType::Sint64 => *(ptr as *mut i64) = val,\n            SerialType::Uint8 => *ptr = val as u8,\n            SerialType::Uint16 => *(ptr as *mut u16) = val as u16,\n            SerialType::Uint32 => *(ptr as *mut u32) = val as u32,\n            SerialType::Uint64 => *(ptr as *mut u64) = val as u64,\n            _ => {}\n        }\n    }\n    Ok(())\n}\n\nfn read_float(reader: &mut &[u8]) -> Result<f64, MorlocError> {\n    let marker = rmp::decode::read_marker(reader)\n        .map_err(|_| MorlocError::Serialization(\"msgpack float: unexpected EOF\".into()))?;\n    match marker {\n        rmp::Marker::F32 => {\n            let bits = read_be_u32(reader)?;\n            Ok(f32::from_bits(bits) as f64)\n        }\n        rmp::Marker::F64 => {\n            let bits = read_be_u64(reader)?;\n            Ok(f64::from_bits(bits))\n        }\n        // Integer markers can appear for integer-valued floats - read the data manually\n        rmp::Marker::FixPos(v) => Ok(v as f64),\n        rmp::Marker::FixNeg(v) => Ok(v as f64),\n        _ => {\n            // For other integer encodings, read bytes manually\n            let n = match marker {\n                rmp::Marker::U8 => { read_byte(reader)? as f64 }\n                rmp::Marker::U16 => { read_be_u16(reader)? as f64 }\n                rmp::Marker::U32 => { read_be_u32(reader)? as f64 }\n                rmp::Marker::U64 => { read_be_u64(reader)? as f64 }\n                rmp::Marker::I8 => { read_byte(reader)? as i8 as f64 }\n                rmp::Marker::I16 => { read_be_u16(reader)? as i16 as f64 }\n                rmp::Marker::I32 => { read_be_u32(reader)? as i32 as f64 }\n                rmp::Marker::I64 => { read_be_u64(reader)? as i64 as f64 }\n                _ => {\n                    return Err(MorlocError::Serialization(format!(\n                        \"unexpected msgpack marker {:?} for float\", marker\n                    )));\n                }\n            };\n            Ok(n)\n        }\n    }\n}\n\n// Also fix read_f32/read_f64 - rmp's functions include the marker, but we already consumed it\n// So we need to read the raw data bytes directly.\n\nfn read_byte(reader: &mut &[u8]) -> Result<u8, MorlocError> {\n    if reader.is_empty() { return Err(MorlocError::Serialization(\"unexpected EOF\".into())); }\n    let v = reader[0];\n    *reader = &reader[1..];\n    Ok(v)\n}\n\nfn read_be_u16(reader: &mut &[u8]) -> Result<u16, MorlocError> {\n    if reader.len() < 2 { return Err(MorlocError::Serialization(\"unexpected EOF\".into())); }\n    let v = u16::from_be_bytes([reader[0], reader[1]]);\n    *reader = &reader[2..];\n    Ok(v)\n}\n\nfn read_be_u32(reader: &mut &[u8]) -> Result<u32, MorlocError> {\n    if reader.len() < 4 { return Err(MorlocError::Serialization(\"unexpected EOF\".into())); }\n    let v = u32::from_be_bytes([reader[0], reader[1], reader[2], reader[3]]);\n    *reader = &reader[4..];\n    Ok(v)\n}\n\nfn read_be_u64(reader: &mut &[u8]) -> Result<u64, MorlocError> {\n    if reader.len() < 8 { return Err(MorlocError::Serialization(\"unexpected EOF\".into())); }\n    let v = u64::from_be_bytes([reader[0], reader[1], reader[2], reader[3], reader[4], reader[5], reader[6], reader[7]]);\n    *reader = &reader[8..];\n    Ok(v)\n}\n\n// ── Size calculation for unpack ────────────────────────────────────────────\n\nfn calc_unpack_size(data: &[u8], schema: &Schema) -> Result<usize, MorlocError> {\n    let mut reader = data;\n    calc_size_r(schema, &mut reader)\n}\n\nfn calc_size_r(schema: &Schema, reader: &mut &[u8]) -> Result<usize, MorlocError> {\n    match schema.serial_type {\n        SerialType::Nil => {\n            rmp::decode::read_nil(reader).ok();\n            Ok(1)\n        }\n        SerialType::Bool => {\n            rmp::decode::read_bool(reader).ok();\n            Ok(1)\n        }\n        SerialType::Sint8 | SerialType::Uint8 => { skip_int(reader)?; Ok(1) }\n        SerialType::Sint16 | SerialType::Uint16 => { skip_int(reader)?; Ok(2) }\n        SerialType::Sint32 | SerialType::Uint32 | SerialType::Float32 => { skip_int(reader)?; Ok(4) }\n        SerialType::Sint64 | SerialType::Uint64 | SerialType::Float64 => { skip_int(reader)?; Ok(8) }\n        SerialType::String => {\n            let len = rmp::decode::read_str_len(reader)\n                .map_err(|e| MorlocError::Serialization(format!(\"size calc str: {}\", e)))?\n                as usize;\n            if reader.len() >= len { *reader = &reader[len..]; }\n            Ok(std::mem::size_of::<Array>() + len)\n        }\n        SerialType::Array => {\n            let n = rmp::decode::read_array_len(reader)\n                .map_err(|e| MorlocError::Serialization(format!(\"size calc array: {}\", e)))?\n                as usize;\n            let elem_schema = &schema.parameters[0];\n            let mut total = std::mem::size_of::<Array>();\n            // Alignment padding\n            total = shm::align_up(total, elem_schema.alignment());\n            for _ in 0..n {\n                total += calc_size_r(elem_schema, reader)?;\n            }\n            Ok(total)\n        }\n        SerialType::Tuple | SerialType::Map => {\n            let _n = rmp::decode::read_array_len(reader).ok();\n            let mut total = schema.width;\n            for field_schema in &schema.parameters {\n                if !field_schema.is_fixed_width() {\n                    total += calc_size_r(field_schema, reader)?;\n                } else {\n                    calc_size_r(field_schema, reader)?;\n                }\n            }\n            Ok(total)\n        }\n        SerialType::Optional => {\n            let inner_schema = &schema.parameters[0];\n            if !reader.is_empty() && reader[0] == 0xc0 {\n                rmp::decode::read_nil(reader).ok();\n                Ok(schema.width.max(1 + inner_schema.width))\n            } else {\n                let inner_size = calc_size_r(inner_schema, reader)?;\n                let align = inner_schema.alignment().max(1);\n                let offset = shm::align_up(1, align);\n                Ok(offset + inner_size)\n            }\n        }\n        SerialType::Tensor => Ok(0),\n    }\n}\n\nfn skip_int(reader: &mut &[u8]) -> Result<(), MorlocError> {\n    let marker = rmp::decode::read_marker(reader)\n        .map_err(|_| MorlocError::Serialization(\"skip int: unexpected EOF\".into()))?;\n    let skip = match marker {\n        rmp::Marker::FixPos(_) | rmp::Marker::FixNeg(_) => 0,\n        rmp::Marker::U8 | rmp::Marker::I8 => 1,\n        rmp::Marker::U16 | rmp::Marker::I16 => 2,\n        rmp::Marker::U32 | rmp::Marker::I32 | rmp::Marker::F32 => 4,\n        rmp::Marker::U64 | rmp::Marker::I64 | rmp::Marker::F64 => 8,\n        _ => 0,\n    };\n    if reader.len() >= skip {\n        *reader = &reader[skip..];\n    }\n    Ok(())\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n    use crate::schema::parse_schema;\n    use crate::json;\n\n    fn setup_shm() {\n        crate::init_test_shm();\n    }\n\n    #[test]\n    fn test_roundtrip_int_via_msgpack() {\n        setup_shm();\n        let schema = parse_schema(\"i4\").unwrap();\n        // JSON -> voidstar -> msgpack -> voidstar -> JSON\n        let ptr1 = json::read_json_with_schema(\"42\", &schema).unwrap();\n        let mpk = pack_with_schema(ptr1, &schema).unwrap();\n        let ptr2 = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr2, &schema).unwrap();\n        assert_eq!(json_out, \"42\");\n    }\n\n    #[test]\n    fn test_roundtrip_string_via_msgpack() {\n        setup_shm();\n        let schema = parse_schema(\"s\").unwrap();\n        let ptr1 = json::read_json_with_schema(\"\\\"hello world\\\"\", &schema).unwrap();\n        let mpk = pack_with_schema(ptr1, &schema).unwrap();\n        let ptr2 = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr2, &schema).unwrap();\n        assert_eq!(json_out, \"\\\"hello world\\\"\");\n    }\n\n    #[test]\n    fn test_roundtrip_array_via_msgpack() {\n        setup_shm();\n        let schema = parse_schema(\"ai4\").unwrap();\n        let ptr1 = json::read_json_with_schema(\"[10,20,30]\", &schema).unwrap();\n        let mpk = pack_with_schema(ptr1, &schema).unwrap();\n        let ptr2 = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr2, &schema).unwrap();\n        assert_eq!(json_out, \"[10,20,30]\");\n    }\n\n    #[test]\n    fn test_roundtrip_bool_via_msgpack() {\n        setup_shm();\n        let schema = parse_schema(\"b\").unwrap();\n        let ptr1 = json::read_json_with_schema(\"true\", &schema).unwrap();\n        let mpk = pack_with_schema(ptr1, &schema).unwrap();\n        let ptr2 = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr2, &schema).unwrap();\n        assert_eq!(json_out, \"true\");\n    }\n\n    #[test]\n    fn test_roundtrip_optional_null_via_msgpack() {\n        setup_shm();\n        let schema = parse_schema(\"?i4\").unwrap();\n        let ptr1 = json::read_json_with_schema(\"null\", &schema).unwrap();\n        let mpk = pack_with_schema(ptr1, &schema).unwrap();\n        let ptr2 = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr2, &schema).unwrap();\n        assert_eq!(json_out, \"null\");\n    }\n\n    #[test]\n    fn test_pack_only_string() {\n        setup_shm();\n        let schema = parse_schema(\"s\").unwrap();\n        let ptr1 = json::read_json_with_schema(\"\\\"hi\\\"\", &schema).unwrap();\n        let mpk = pack_with_schema(ptr1, &schema).unwrap();\n        assert!(!mpk.is_empty());\n        assert_eq!(mpk.len(), 3);\n    }\n\n    #[test]\n    fn test_unpack_only_int() {\n        setup_shm();\n        let schema = parse_schema(\"i4\").unwrap();\n        // msgpack for 42 = [42] (fixint)\n        let mpk = vec![42u8];\n        let ptr = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr, &schema).unwrap();\n        assert_eq!(json_out, \"42\");\n    }\n\n    #[test]\n    fn test_unpack_only_string() {\n        setup_shm();\n        let schema = parse_schema(\"s\").unwrap();\n        // msgpack for \"hi\" = [0xa2, 0x68, 0x69]\n        let mpk = vec![0xa2, 0x68, 0x69];\n        let size = calc_unpack_size(&mpk, &schema).unwrap();\n        eprintln!(\"unpack size for string: {} (Array={}, total={})\", size, std::mem::size_of::<shm::Array>(), size);\n        let ptr = unpack_with_schema(&mpk, &schema).unwrap();\n        let json_out = json::voidstar_to_json_string(ptr, &schema).unwrap();\n        assert_eq!(json_out, \"\\\"hi\\\"\");\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/packet.rs",
    "content": "use crate::error::MorlocError;\n\n// ── Magic & version constants ──────────────────────────────────────────────\n\npub const PACKET_MAGIC: u32 = 0x0707_f86d;\npub const THIS_PLAIN: u16 = 0;\npub const THIS_VERSION: u16 = 0;\npub const DEFAULT_FLAVOR: u16 = 0;\npub const DEFAULT_MODE: u16 = 0;\n\n// ── Command type discriminants ─────────────────────────────────────────────\n\npub const PACKET_TYPE_DATA: u8 = 0;\npub const PACKET_TYPE_CALL: u8 = 1;\npub const PACKET_TYPE_PING: u8 = 2;\n\n// ── Data source ────────────────────────────────────────────────────────────\n\npub const PACKET_SOURCE_MESG: u8 = 0x00;\npub const PACKET_SOURCE_FILE: u8 = 0x01;\npub const PACKET_SOURCE_RPTR: u8 = 0x02;\n\n// ── Data format ────────────────────────────────────────────────────────────\n\npub const PACKET_FORMAT_JSON: u8 = 0x00;\npub const PACKET_FORMAT_MSGPACK: u8 = 0x01;\npub const PACKET_FORMAT_TEXT: u8 = 0x02;\npub const PACKET_FORMAT_DATA: u8 = 0x03;\npub const PACKET_FORMAT_VOIDSTAR: u8 = 0x04;\npub const PACKET_FORMAT_ARROW: u8 = 0x05;\n\n// ── Compression / encryption ───────────────────────────────────────────────\n\npub const PACKET_COMPRESSION_NONE: u8 = 0x00;\npub const PACKET_ENCRYPTION_NONE: u8 = 0x00;\n\n// ── Status ─────────────────────────────────────────────────────────────────\n\npub const PACKET_STATUS_PASS: u8 = 0x00;\npub const PACKET_STATUS_FAIL: u8 = 0x01;\n\n// ── Entrypoint ────────────────────���────────────────────────────────────────\n\npub const PACKET_ENTRYPOINT_LOCAL: u8 = 0x00;\npub const PACKET_ENTRYPOINT_REMOTE_SFS: u8 = 0x01;\n\n// ── Inline threshold ─────────────────────────────────────��─────────────────\n\npub const MORLOC_INLINE_THRESHOLD: usize = 64 * 1024;\n\n// ── Metadata ─────────��─────────────────────────────────────────────────────\n\npub const METADATA_TYPE_SCHEMA_STRING: u8 = 0x01;\npub const METADATA_TYPE_XXHASH: u8 = 0x02;\npub const METADATA_HEADER_MAGIC: [u8; 3] = *b\"mmh\";\n\n// ── Packed structs matching the C binary layout ────���───────────────────────\n\n/// 8-byte command union. We represent each variant as its own struct and\n/// transmute at the boundary.\n#[derive(Debug, Clone, Copy)]\n#[repr(C, packed)]\npub struct CommandType {\n    pub cmd_type: u8,\n    pub padding: [u8; 7],\n}\n\n#[derive(Debug, Clone, Copy)]\n#[repr(C, packed)]\npub struct CommandCall {\n    pub cmd_type: u8,\n    pub entrypoint: u8,\n    pub padding: [u8; 2],\n    pub midx: u32,\n}\n\n#[derive(Debug, Clone, Copy)]\n#[repr(C, packed)]\npub struct CommandData {\n    pub cmd_type: u8,\n    pub source: u8,\n    pub format: u8,\n    pub compression: u8,\n    pub encryption: u8,\n    pub status: u8,\n    pub padding: [u8; 2],\n}\n\n#[derive(Debug, Clone, Copy)]\n#[repr(C, packed)]\npub struct CommandPing {\n    pub cmd_type: u8,\n    pub padding: [u8; 7],\n}\n\n/// The 8-byte command field stored as raw bytes. Interpreted based on the\n/// first byte (cmd_type discriminant).\n#[derive(Clone, Copy)]\n#[repr(C, packed)]\npub union PacketCommand {\n    pub cmd_type: CommandType,\n    pub call: CommandCall,\n    pub data: CommandData,\n    pub ping: CommandPing,\n    pub raw: [u8; 8],\n}\n\nimpl std::fmt::Debug for PacketCommand {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        let tag = unsafe { self.cmd_type.cmd_type };\n        match tag {\n            PACKET_TYPE_DATA => write!(f, \"Command::Data({:?})\", unsafe { self.data }),\n            PACKET_TYPE_CALL => write!(f, \"Command::Call({:?})\", unsafe { self.call }),\n            PACKET_TYPE_PING => write!(f, \"Command::Ping\"),\n            _ => write!(f, \"Command::Unknown({tag})\"),\n        }\n    }\n}\n\n/// 32-byte packet header. Must match morloc_packet_header_t exactly.\n#[derive(Debug, Clone, Copy)]\n#[repr(C, packed)]\npub struct PacketHeader {\n    pub magic: u32,\n    pub plain: u16,\n    pub version: u16,\n    pub flavor: u16,\n    pub mode: u16,\n    pub command: PacketCommand,\n    pub offset: u32,\n    pub length: u64,\n}\n\nconst _: () = assert!(std::mem::size_of::<PacketHeader>() == 32);\nconst _: () = assert!(std::mem::size_of::<PacketCommand>() == 8);\nconst _: () = assert!(std::mem::size_of::<CommandCall>() == 8);\nconst _: () = assert!(std::mem::size_of::<CommandData>() == 8);\n\n/// 8-byte metadata header.\n#[derive(Debug, Clone, Copy)]\n#[repr(C, packed)]\npub struct MetadataHeader {\n    pub magic: [u8; 3],\n    pub metadata_type: u8,\n    pub size: u32,\n}\n\nconst _: () = assert!(std::mem::size_of::<MetadataHeader>() == 8);\n\n// ── Packet construction helpers ────────────────────────────────────────────\n\nimpl PacketHeader {\n    fn new(command: PacketCommand, offset: u32, length: u64) -> Self {\n        PacketHeader {\n            magic: PACKET_MAGIC,\n            plain: THIS_PLAIN,\n            version: THIS_VERSION,\n            flavor: DEFAULT_FLAVOR,\n            mode: DEFAULT_MODE,\n            command,\n            offset,\n            length,\n        }\n    }\n\n    /// Create a ping packet (no payload).\n    pub fn ping() -> Self {\n        Self::new(\n            PacketCommand {\n                ping: CommandPing {\n                    cmd_type: PACKET_TYPE_PING,\n                    padding: [0; 7],\n                },\n            },\n            0, // no metadata\n            0, // no payload\n        )\n    }\n\n    /// Create a local call packet header.\n    pub fn local_call(midx: u32, payload_len: u64) -> Self {\n        Self::new(\n            PacketCommand {\n                call: CommandCall {\n                    cmd_type: PACKET_TYPE_CALL,\n                    entrypoint: PACKET_ENTRYPOINT_LOCAL,\n                    padding: [0; 2],\n                    midx,\n                },\n            },\n            0, // no metadata between header and arg packets\n            payload_len,\n        )\n    }\n\n    /// Create a remote call packet header.\n    pub fn remote_call(midx: u32, payload_len: u64) -> Self {\n        Self::new(\n            PacketCommand {\n                call: CommandCall {\n                    cmd_type: PACKET_TYPE_CALL,\n                    entrypoint: PACKET_ENTRYPOINT_REMOTE_SFS,\n                    padding: [0; 2],\n                    midx,\n                },\n            },\n            0,\n            payload_len,\n        )\n    }\n\n    /// Create a data packet header for inline message data.\n    pub fn data_mesg(format: u8, payload_len: u64) -> Self {\n        Self::new(\n            PacketCommand {\n                data: CommandData {\n                    cmd_type: PACKET_TYPE_DATA,\n                    source: PACKET_SOURCE_MESG,\n                    format,\n                    compression: PACKET_COMPRESSION_NONE,\n                    encryption: PACKET_ENCRYPTION_NONE,\n                    status: PACKET_STATUS_PASS,\n                    padding: [0; 2],\n                },\n            },\n            0, // metadata size set separately when building full packet\n            payload_len,\n        )\n    }\n\n    /// Create a data packet header for relative pointer (shared memory).\n    pub fn data_rptr(format: u8, payload_len: u64) -> Self {\n        Self::new(\n            PacketCommand {\n                data: CommandData {\n                    cmd_type: PACKET_TYPE_DATA,\n                    source: PACKET_SOURCE_RPTR,\n                    format,\n                    compression: PACKET_COMPRESSION_NONE,\n                    encryption: PACKET_ENCRYPTION_NONE,\n                    status: PACKET_STATUS_PASS,\n                    padding: [0; 2],\n                },\n            },\n            0, // metadata size set separately when building full packet\n            payload_len,\n        )\n    }\n\n    /// Create a fail packet with an error message.\n    pub fn fail(error_msg_len: u64) -> Self {\n        Self::new(\n            PacketCommand {\n                data: CommandData {\n                    cmd_type: PACKET_TYPE_DATA,\n                    source: PACKET_SOURCE_MESG,\n                    format: PACKET_FORMAT_TEXT,\n                    compression: PACKET_COMPRESSION_NONE,\n                    encryption: PACKET_ENCRYPTION_NONE,\n                    status: PACKET_STATUS_FAIL,\n                    padding: [0; 2],\n                },\n            },\n            0,\n            error_msg_len,\n        )\n    }\n\n    /// Total packet size (header + payload).\n    pub fn total_size(&self) -> u64 {\n        self.offset as u64 + self.length\n    }\n\n    /// Check if this is a valid morloc packet.\n    pub fn is_valid(&self) -> bool {\n        self.magic == PACKET_MAGIC\n    }\n\n    /// Get the command type.\n    pub fn command_type(&self) -> u8 {\n        unsafe { self.command.cmd_type.cmd_type }\n    }\n\n    pub fn is_ping(&self) -> bool {\n        self.command_type() == PACKET_TYPE_PING\n    }\n\n    pub fn is_call(&self) -> bool {\n        self.command_type() == PACKET_TYPE_CALL\n    }\n\n    pub fn is_data(&self) -> bool {\n        self.command_type() == PACKET_TYPE_DATA\n    }\n\n    pub fn is_local_call(&self) -> bool {\n        self.is_call() && unsafe { self.command.call.entrypoint } == PACKET_ENTRYPOINT_LOCAL\n    }\n\n    pub fn is_remote_call(&self) -> bool {\n        self.is_call() && unsafe { self.command.call.entrypoint } == PACKET_ENTRYPOINT_REMOTE_SFS\n    }\n\n    pub fn is_fail(&self) -> bool {\n        self.is_data() && unsafe { self.command.data.status } == PACKET_STATUS_FAIL\n    }\n\n    /// Serialize the header to bytes.\n    pub fn to_bytes(&self) -> [u8; 32] {\n        unsafe { std::mem::transmute(*self) }\n    }\n\n    /// Deserialize a header from bytes.\n    pub fn from_bytes(bytes: &[u8; 32]) -> Result<Self, MorlocError> {\n        let header: PacketHeader = unsafe { std::mem::transmute(*bytes) };\n        if !header.is_valid() {\n            let magic = { header.magic };\n            return Err(MorlocError::Packet(format!(\n                \"invalid magic: 0x{magic:08x}\"\n            )));\n        }\n        Ok(header)\n    }\n}\n\n// ── Full packet construction (header + metadata + payload) ─────────────────\n\n/// Build a complete data packet with schema metadata and relptr payload.\npub fn make_standard_data_packet(relptr: crate::shm::RelPtr, schema: &crate::Schema) -> Vec<u8> {\n    let schema_str = crate::schema::schema_to_string(schema);\n    let schema_bytes = schema_str.as_bytes();\n    let schema_len = schema_bytes.len() + 1; // +1 for null terminator\n\n    // Metadata: header (8 bytes) + schema string (null-terminated), padded to 32-byte boundary\n    let meta_header_size = std::mem::size_of::<MetadataHeader>();\n    let raw_meta_len = meta_header_size + schema_len;\n    let padded_meta_len = ((raw_meta_len + 31) / 32) * 32;\n\n    // Payload: relptr\n    let payload_len = std::mem::size_of::<crate::shm::RelPtr>();\n\n    let total = 32 + padded_meta_len + payload_len;\n    let mut packet = vec![0u8; total];\n\n    // Write header\n    let header = PacketHeader::data_rptr(PACKET_FORMAT_VOIDSTAR, payload_len as u64);\n    let mut hdr = header;\n    // Override offset to include metadata\n    unsafe {\n        let hdr_ptr = &mut hdr as *mut PacketHeader as *mut u8;\n        // Set offset field (at byte 20 in packed struct)\n        let offset_ptr = hdr_ptr.add(20) as *mut u32;\n        *offset_ptr = padded_meta_len as u32;\n    }\n    let hdr_bytes = hdr.to_bytes();\n    packet[..32].copy_from_slice(&hdr_bytes);\n\n    // Write metadata header\n    let meta_start = 32;\n    packet[meta_start] = b'm';\n    packet[meta_start + 1] = b'm';\n    packet[meta_start + 2] = b'h';\n    packet[meta_start + 3] = METADATA_TYPE_SCHEMA_STRING;\n    let meta_size_bytes = (schema_len as u32).to_le_bytes();\n    packet[meta_start + 4..meta_start + 8].copy_from_slice(&meta_size_bytes);\n\n    // Write schema string (null-terminated)\n    let schema_data_start = meta_start + meta_header_size;\n    packet[schema_data_start..schema_data_start + schema_bytes.len()].copy_from_slice(schema_bytes);\n    // Null terminator already there from vec![0u8]\n\n    // Write relptr payload\n    let payload_start = 32 + padded_meta_len;\n    let relptr_bytes = relptr.to_ne_bytes();\n    packet[payload_start..payload_start + relptr_bytes.len()].copy_from_slice(&relptr_bytes);\n\n    packet\n}\n\n/// Build an inline MESG+MSGPACK data packet with schema metadata.\npub fn make_mesg_data_packet(mpk_data: &[u8], schema: &crate::Schema) -> Vec<u8> {\n    let schema_str = crate::schema::schema_to_string(schema);\n    let schema_bytes = schema_str.as_bytes();\n    let schema_len = schema_bytes.len() + 1; // +1 for null terminator\n\n    let meta_header_size = std::mem::size_of::<MetadataHeader>();\n    let raw_meta_len = meta_header_size + schema_len;\n    let padded_meta_len = ((raw_meta_len + 31) / 32) * 32;\n\n    let total = 32 + padded_meta_len + mpk_data.len();\n    let mut packet = vec![0u8; total];\n\n    // Write header\n    let mut header = PacketHeader::data_mesg(PACKET_FORMAT_MSGPACK, mpk_data.len() as u64);\n    // Set offset to metadata size\n    unsafe {\n        let hdr_ptr = &mut header as *mut PacketHeader as *mut u8;\n        let offset_ptr = hdr_ptr.add(20) as *mut u32;\n        *offset_ptr = padded_meta_len as u32;\n    }\n    let hdr_bytes = header.to_bytes();\n    packet[..32].copy_from_slice(&hdr_bytes);\n\n    // Write metadata header\n    let meta_start = 32;\n    packet[meta_start] = b'm';\n    packet[meta_start + 1] = b'm';\n    packet[meta_start + 2] = b'h';\n    packet[meta_start + 3] = METADATA_TYPE_SCHEMA_STRING;\n    let meta_size_bytes = (schema_len as u32).to_le_bytes();\n    packet[meta_start + 4..meta_start + 8].copy_from_slice(&meta_size_bytes);\n\n    // Write schema string\n    let schema_data_start = meta_start + meta_header_size;\n    packet[schema_data_start..schema_data_start + schema_bytes.len()].copy_from_slice(schema_bytes);\n\n    // Write msgpack payload\n    let payload_start = 32 + padded_meta_len;\n    packet[payload_start..payload_start + mpk_data.len()].copy_from_slice(mpk_data);\n\n    packet\n}\n\n/// Build a call packet from argument data packets.\npub fn make_local_call_packet(midx: u32, arg_packets: &[Vec<u8>]) -> Vec<u8> {\n    let data_length: usize = arg_packets.iter().map(|p| p.len()).sum();\n    let total = 32 + data_length;\n    let mut packet = vec![0u8; total];\n\n    // Write call header\n    let header = PacketHeader::local_call(midx, data_length as u64);\n    let hdr_bytes = header.to_bytes();\n    packet[..32].copy_from_slice(&hdr_bytes);\n\n    // Concatenate argument packets\n    let mut pos = 32;\n    for arg in arg_packets {\n        packet[pos..pos + arg.len()].copy_from_slice(arg);\n        pos += arg.len();\n    }\n\n    packet\n}\n\n/// Build a fail packet with an error message string.\npub fn make_fail_packet_bytes(error_msg: &str) -> Vec<u8> {\n    let msg_bytes = error_msg.as_bytes();\n    let total = 32 + msg_bytes.len();\n    let mut packet = vec![0u8; total];\n\n    let header = PacketHeader::fail(msg_bytes.len() as u64);\n    let hdr_bytes = header.to_bytes();\n    packet[..32].copy_from_slice(&hdr_bytes);\n    packet[32..].copy_from_slice(msg_bytes);\n\n    packet\n}\n\n/// Extract the payload from a data packet (bytes after header + metadata offset).\npub fn get_data_payload(packet: &[u8]) -> Result<&[u8], MorlocError> {\n    if packet.len() < 32 {\n        return Err(MorlocError::Packet(\"packet too small\".into()));\n    }\n    let header = PacketHeader::from_bytes(packet[..32].try_into().unwrap())?;\n    let offset = { header.offset } as usize;\n    let length = { header.length } as usize;\n    let start = 32 + offset;\n    let end = start + length;\n    if end > packet.len() {\n        return Err(MorlocError::Packet(\"payload extends past packet end\".into()));\n    }\n    Ok(&packet[start..end])\n}\n\n/// Extract error message from a fail packet.\npub fn get_error_message(packet: &[u8]) -> Result<Option<String>, MorlocError> {\n    if packet.len() < 32 {\n        return Err(MorlocError::Packet(\"packet too small\".into()));\n    }\n    let header = PacketHeader::from_bytes(packet[..32].try_into().unwrap())?;\n    if !header.is_fail() {\n        return Ok(None);\n    }\n    let payload = get_data_payload(packet)?;\n    Ok(Some(String::from_utf8_lossy(payload).into_owned()))\n}\n\n/// Read the schema string from packet metadata section.\npub fn read_schema_from_meta(packet: &[u8]) -> Result<Option<String>, MorlocError> {\n    if packet.len() < 32 {\n        return Err(MorlocError::Packet(\"packet too small\".into()));\n    }\n    let header = PacketHeader::from_bytes(packet[..32].try_into().unwrap())?;\n    let offset = { header.offset } as usize;\n    if offset == 0 {\n        return Ok(None);\n    }\n\n    // Scan metadata headers\n    let meta_start = 32;\n    let meta_end = meta_start + offset;\n    let mut pos = meta_start;\n    while pos + 8 <= meta_end {\n        if packet[pos] == b'm' && packet[pos + 1] == b'm' && packet[pos + 2] == b'h' {\n            let meta_type = packet[pos + 3];\n            let meta_size = u32::from_le_bytes([\n                packet[pos + 4], packet[pos + 5], packet[pos + 6], packet[pos + 7],\n            ]) as usize;\n            if meta_type == METADATA_TYPE_SCHEMA_STRING {\n                let str_start = pos + 8;\n                let str_end = str_start + meta_size;\n                if str_end <= meta_end {\n                    let bytes = &packet[str_start..str_end];\n                    // Find null terminator\n                    let len = bytes.iter().position(|&b| b == 0).unwrap_or(bytes.len());\n                    return Ok(Some(String::from_utf8_lossy(&bytes[..len]).into_owned()));\n                }\n            }\n            pos += 8 + meta_size;\n        } else {\n            break;\n        }\n    }\n    Ok(None)\n}\n\n/// Get the voidstar value from a data packet (resolves relptr to absptr).\npub fn get_data_value(\n    packet: &[u8],\n    schema: &crate::Schema,\n) -> Result<crate::shm::AbsPtr, MorlocError> {\n    let header = PacketHeader::from_bytes(packet[..32].try_into().unwrap())?;\n    let source = unsafe { header.command.data.source };\n    let format = unsafe { header.command.data.format };\n\n    let payload = get_data_payload(packet)?;\n\n    match source {\n        PACKET_SOURCE_RPTR => {\n            // Payload is a relptr\n            if payload.len() < std::mem::size_of::<crate::shm::RelPtr>() {\n                return Err(MorlocError::Packet(\"relptr payload too small\".into()));\n            }\n            let relptr = crate::shm::RelPtr::from_ne_bytes(\n                payload[..std::mem::size_of::<crate::shm::RelPtr>()].try_into().unwrap()\n            );\n            crate::shm::rel2abs(relptr)\n        }\n        PACKET_SOURCE_MESG => {\n            match format {\n                PACKET_FORMAT_MSGPACK => {\n                    crate::mpack::unpack_with_schema(payload, schema)\n                }\n                PACKET_FORMAT_JSON => {\n                    let json_str = std::str::from_utf8(payload)\n                        .map_err(|e| MorlocError::Packet(format!(\"invalid UTF-8: {}\", e)))?;\n                    crate::json::read_json_with_schema(json_str, schema)\n                }\n                PACKET_FORMAT_VOIDSTAR => {\n                    read_voidstar_binary(payload, schema)\n                }\n                _ => {\n                    Err(MorlocError::Packet(format!(\n                        \"unsupported data format: {}\", format\n                    )))\n                }\n            }\n        }\n        _ => Err(MorlocError::Packet(format!(\"unsupported source: {}\", source))),\n    }\n}\n\n// ── Inline voidstar deserialization ─────────────────────────────────────────\n\n/// Read a flat voidstar binary blob into shared memory, adjusting relptrs.\nfn read_voidstar_binary(\n    blob: &[u8],\n    schema: &crate::Schema,\n) -> Result<crate::shm::AbsPtr, MorlocError> {\n    use crate::shm;\n\n    let base = shm::shmalloc(blob.len())?;\n    unsafe { std::ptr::copy_nonoverlapping(blob.as_ptr(), base, blob.len()) };\n\n    let base_rel = shm::abs2rel(base)?;\n    adjust_voidstar_relptrs(base, schema, base_rel)?;\n    Ok(base)\n}\n\n/// Adjust relptrs in a voidstar blob that was copied into SHM.\n/// The blob's internal relptrs are offsets from position 0 of the blob.\n/// Adding `base_rel` converts them to valid SHM relptrs.\nfn adjust_voidstar_relptrs(\n    data: crate::shm::AbsPtr,\n    schema: &crate::Schema,\n    base_rel: crate::shm::RelPtr,\n) -> Result<(), MorlocError> {\n    use crate::schema::SerialType;\n    use crate::shm::{self, Array, Tensor};\n\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let arr = &mut *(data as *mut Array);\n                arr.data += base_rel;\n                // Recurse into elements if variable-width (strings are always fixed-width bytes)\n                if !schema.parameters.is_empty() && !schema.parameters[0].is_fixed_width() {\n                    let arr_data = shm::rel2abs(arr.data)?;\n                    let elem_width = schema.parameters[0].width;\n                    for i in 0..arr.size {\n                        let elem = arr_data.add(i * elem_width);\n                        adjust_voidstar_relptrs(elem, &schema.parameters[0], base_rel)?;\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                for i in 0..schema.parameters.len() {\n                    let child = data.add(schema.offsets[i]);\n                    adjust_voidstar_relptrs(child, &schema.parameters[i], base_rel)?;\n                }\n            }\n            SerialType::Optional => {\n                let tag = *data;\n                if tag != 0 {\n                    let inner_offset = schema.offsets.first().copied().unwrap_or(\n                        shm::align_up(1, schema.parameters[0].alignment().max(1)),\n                    );\n                    let child = data.add(inner_offset);\n                    adjust_voidstar_relptrs(child, &schema.parameters[0], base_rel)?;\n                }\n            }\n            SerialType::Tensor => {\n                let tensor = &mut *(data as *mut Tensor);\n                if tensor.total_elements > 0 {\n                    tensor.shape += base_rel;\n                    tensor.data += base_rel;\n                }\n            }\n            _ => {} // Fixed-width primitives: no relptrs to adjust\n        }\n    }\n    Ok(())\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_header_size() {\n        assert_eq!(std::mem::size_of::<PacketHeader>(), 32);\n    }\n\n    #[test]\n    fn test_ping_roundtrip() {\n        let ping = PacketHeader::ping();\n        assert!(ping.is_valid());\n        assert!(ping.is_ping());\n        let bytes = ping.to_bytes();\n        let recovered = PacketHeader::from_bytes(&bytes).unwrap();\n        assert!(recovered.is_ping());\n    }\n\n    #[test]\n    fn test_call_packet() {\n        let call = PacketHeader::local_call(42, 128);\n        assert!(call.is_call());\n        assert!(call.is_local_call());\n        assert!(!call.is_remote_call());\n        let bytes = call.to_bytes();\n        let recovered = PacketHeader::from_bytes(&bytes).unwrap();\n        assert!(recovered.is_local_call());\n        let midx = unsafe { recovered.command.call.midx };\n        assert_eq!(midx, 42);\n        let len = { recovered.length };\n        assert_eq!(len, 128);\n    }\n\n    #[test]\n    fn test_fail_packet() {\n        let fail = PacketHeader::fail(100);\n        assert!(fail.is_data());\n        assert!(fail.is_fail());\n    }\n\n    #[test]\n    fn test_data_mesg() {\n        let data = PacketHeader::data_mesg(PACKET_FORMAT_MSGPACK, 256);\n        assert!(data.is_data());\n        assert!(!data.is_fail());\n        let fmt = unsafe { data.command.data.format };\n        assert_eq!(fmt, PACKET_FORMAT_MSGPACK);\n        let len = { data.length };\n        assert_eq!(len, 256);\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/packet_ffi.rs",
    "content": "//! C ABI wrappers for packet functions.\n//! Replaces packet.c with calls to Rust packet.rs + voidstar.rs.\n\nuse std::ffi::{c_char, c_void, CStr};\nuse std::ptr;\n\nuse crate::cschema::CSchema;\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::packet::*;\nuse crate::shm::{self, AbsPtr, RelPtr};\n\n// ── morloc_call_t ────────────────────────────────────────────────────────────\n\n/// Matches C `morloc_call_t` layout.\n#[repr(C)]\npub struct MorlocCall {\n    pub midx: u32,\n    pub args: *mut *mut u8,\n    pub nargs: usize,\n    pub owns_args: i32,\n}\n\n// ── Header reading ───────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_morloc_packet_header(\n    msg: *const u8,\n    errmsg: *mut *mut c_char,\n) -> *mut PacketHeader {\n    clear_errmsg(errmsg);\n    if msg.is_null() {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Cannot make packet from NULL pointer\".into()));\n        return ptr::null_mut();\n    }\n    // Validate magic\n    let header = &*(msg as *const PacketHeader);\n    if !header.is_valid() {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Malformed morloc packet\".into()));\n        return ptr::null_mut();\n    }\n    msg as *mut PacketHeader\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn packet_is_ping(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return false; }\n    (*header).is_ping()\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn packet_is_local_call(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return false; }\n    (*header).is_local_call()\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn packet_is_remote_call(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return false; }\n    (*header).is_remote_call()\n}\n\n// ── Packet size ──────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn morloc_packet_size_from_header(\n    header: *const PacketHeader,\n) -> usize {\n    if header.is_null() { return 0; }\n    32 + (*header).offset as usize + (*header).length as usize\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn morloc_packet_size(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> usize {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return 0; }\n    morloc_packet_size_from_header(header)\n}\n\n// ── Ping ─────────────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn return_ping(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    if !packet_is_ping(packet, errmsg) {\n        if (*errmsg).is_null() {\n            set_errmsg(errmsg, &MorlocError::Packet(\"Not a ping packet\".into()));\n        }\n        return ptr::null_mut();\n    }\n    let size = morloc_packet_size(packet, errmsg);\n    if size == 0 { return ptr::null_mut(); }\n    let buf = libc::calloc(size, 1) as *mut u8;\n    if buf.is_null() {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Failed to allocate ping response\".into()));\n        return ptr::null_mut();\n    }\n    ptr::copy_nonoverlapping(packet, buf, size);\n    buf\n}\n\n#[no_mangle]\npub extern \"C\" fn make_ping_packet() -> *mut u8 {\n    // SAFETY: calloc returns null or a valid pointer to 32 zeroed bytes.\n    let buf = unsafe { libc::calloc(32, 1) as *mut u8 };\n    if buf.is_null() { return ptr::null_mut(); }\n    let header = PacketHeader::ping();\n    let bytes = header.to_bytes();\n    // SAFETY: buf points to 32 bytes; bytes is exactly 32 bytes from to_bytes().\n    unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), buf, 32) };\n    buf\n}\n\n// ── Data packet construction helpers ─────────────────────────────────────────\n\n/// Build metadata section: metadata header + schema string, padded to 32-byte boundary.\n/// Returns (metadata_buffer, padded_length). Returns (null, 0) if schema is null.\n///\n/// # Safety\n/// `schema` must be null or a valid CSchema pointer.\nunsafe fn build_schema_metadata(schema: *const CSchema) -> (*mut u8, usize) {\n    if schema.is_null() {\n        return (ptr::null_mut(), 0);\n    }\n    let rs = CSchema::to_rust(schema);\n    let schema_str = crate::schema::schema_to_string(&rs);\n    let schema_bytes = schema_str.as_bytes();\n    let schema_len = schema_bytes.len() + 1; // +1 for null terminator\n    let meta_header_size = 8; // sizeof(morloc_metadata_header_t)\n    let raw_meta_len = meta_header_size + schema_len;\n    let padded_meta_len = ((raw_meta_len + 31) / 32) * 32;\n\n    let metadata = libc::calloc(padded_meta_len, 1) as *mut u8;\n    if metadata.is_null() {\n        return (ptr::null_mut(), 0);\n    }\n\n    // Write metadata header\n    *metadata = b'm';\n    *metadata.add(1) = b'm';\n    *metadata.add(2) = b'h';\n    *metadata.add(3) = METADATA_TYPE_SCHEMA_STRING;\n    *(metadata.add(4) as *mut u32) = schema_len as u32;\n\n    // Write schema string\n    ptr::copy_nonoverlapping(schema_bytes.as_ptr(), metadata.add(meta_header_size), schema_bytes.len());\n    // Null terminator already zeroed by calloc\n\n    (metadata, padded_meta_len)\n}\n\n/// Generic data packet builder matching C's make_morloc_data_packet.\n///\n/// # Safety\n/// If non-null, `data` must point to `data_length` readable bytes.\n/// If non-null, `metadata` must point to `metadata_length` readable bytes.\nunsafe fn make_data_packet_raw(\n    data: *const u8,\n    data_length: usize,\n    metadata: *const u8,\n    metadata_length: usize,\n    src: u8,\n    fmt: u8,\n    cmpr: u8,\n    encr: u8,\n    status: u8,\n) -> *mut u8 {\n    let total = 32 + metadata_length + data_length;\n    let packet = libc::calloc(total, 1) as *mut u8;\n    if packet.is_null() { return ptr::null_mut(); }\n\n    // Build command\n    let cmd = CommandData {\n        cmd_type: PACKET_TYPE_DATA,\n        source: src,\n        format: fmt,\n        compression: cmpr,\n        encryption: encr,\n        status,\n        padding: [0; 2],\n    };\n    let header = PacketHeader {\n        magic: PACKET_MAGIC,\n        plain: THIS_PLAIN,\n        version: THIS_VERSION,\n        flavor: DEFAULT_FLAVOR,\n        mode: DEFAULT_MODE,\n        command: PacketCommand { data: cmd },\n        offset: metadata_length as u32,\n        length: data_length as u64,\n    };\n    let hdr_bytes = header.to_bytes();\n    ptr::copy_nonoverlapping(hdr_bytes.as_ptr(), packet, 32);\n\n    if !metadata.is_null() && metadata_length > 0 {\n        ptr::copy_nonoverlapping(metadata, packet.add(32), metadata_length);\n    }\n    if !data.is_null() && data_length > 0 {\n        ptr::copy_nonoverlapping(data, packet.add(32 + metadata_length), data_length);\n    }\n\n    packet\n}\n\n/// Generic data packet with schema metadata.\nunsafe fn make_data_packet_with_schema(\n    data: *const u8,\n    data_length: usize,\n    schema: *const CSchema,\n    src: u8,\n    fmt: u8,\n    cmpr: u8,\n    encr: u8,\n    status: u8,\n) -> *mut u8 {\n    let (metadata, metadata_length) = build_schema_metadata(schema);\n    let result = make_data_packet_raw(\n        data, data_length, metadata, metadata_length, src, fmt, cmpr, encr, status,\n    );\n    if !metadata.is_null() {\n        libc::free(metadata as *mut c_void);\n    }\n    result\n}\n\n// ── Standard data packet (RPTR + VOIDSTAR) ──────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_standard_data_packet(\n    relptr: RelPtr,\n    schema: *const CSchema,\n) -> *mut u8 {\n    let packet = make_data_packet_with_schema(\n        ptr::null(),\n        std::mem::size_of::<RelPtr>(),\n        schema,\n        PACKET_SOURCE_RPTR,\n        PACKET_FORMAT_VOIDSTAR,\n        PACKET_COMPRESSION_NONE,\n        PACKET_ENCRYPTION_NONE,\n        PACKET_STATUS_PASS,\n    );\n    if packet.is_null() { return ptr::null_mut(); }\n\n    // Write the relptr into the payload area\n    let header = &*(packet as *const PacketHeader);\n    let payload_offset = 32 + header.offset as usize;\n    *(packet.add(payload_offset) as *mut RelPtr) = relptr;\n\n    packet\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_arrow_data_packet(\n    relptr: RelPtr,\n    schema: *const CSchema,\n) -> *mut u8 {\n    let packet = make_data_packet_with_schema(\n        ptr::null(),\n        std::mem::size_of::<RelPtr>(),\n        schema,\n        PACKET_SOURCE_RPTR,\n        PACKET_FORMAT_ARROW,\n        PACKET_COMPRESSION_NONE,\n        PACKET_ENCRYPTION_NONE,\n        PACKET_STATUS_PASS,\n    );\n    if packet.is_null() { return ptr::null_mut(); }\n\n    let header = &*(packet as *const PacketHeader);\n    let payload_offset = 32 + header.offset as usize;\n    *(packet.add(payload_offset) as *mut RelPtr) = relptr;\n\n    packet\n}\n\n// ── Msgpack packets ──────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_mpk_data_packet(\n    mpk_filename: *const c_char,\n    schema: *const CSchema,\n) -> *mut u8 {\n    if mpk_filename.is_null() { return ptr::null_mut(); }\n    let filename = CStr::from_ptr(mpk_filename);\n    let bytes = filename.to_bytes();\n    make_data_packet_with_schema(\n        bytes.as_ptr(),\n        bytes.len(),\n        schema,\n        PACKET_SOURCE_FILE,\n        PACKET_FORMAT_MSGPACK,\n        PACKET_COMPRESSION_NONE,\n        PACKET_ENCRYPTION_NONE,\n        PACKET_STATUS_PASS,\n    )\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_data_packet_from_mpk(\n    mpk: *const c_char,\n    mpk_size: usize,\n    schema: *const CSchema,\n) -> *mut u8 {\n    make_data_packet_with_schema(\n        mpk as *const u8,\n        mpk_size,\n        schema,\n        PACKET_SOURCE_MESG,\n        PACKET_FORMAT_MSGPACK,\n        PACKET_COMPRESSION_NONE,\n        PACKET_ENCRYPTION_NONE,\n        PACKET_STATUS_PASS,\n    )\n}\n\n// ── get_data_packet_as_mpk ───────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn get_data_packet_as_mpk(\n    packet: *const u8,\n    schema: *const CSchema,\n    mpk_out: *mut *mut c_char,\n    mpk_size_out: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    *mpk_out = ptr::null_mut();\n    *mpk_size_out = 0;\n\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return 0; }\n\n    if (*header).command_type() != PACKET_TYPE_DATA {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Expected a data packet\".into()));\n        return 0;\n    }\n\n    // Check for error\n    if (*header).is_fail() {\n        let payload_start = 32 + (*header).offset as usize;\n        let payload_len = (*header).length as usize;\n        let msg = std::str::from_utf8_unchecked(\n            std::slice::from_raw_parts(packet.add(payload_start), payload_len)\n        );\n        set_errmsg(errmsg, &MorlocError::Packet(format!(\"\\n{}\", msg)));\n        return 0;\n    }\n\n    let rs = CSchema::to_rust(schema);\n    let source = (*header).command.data.source;\n    let format = (*header).command.data.format;\n    let payload_start = 32 + (*header).offset as usize;\n    let payload_len = (*header).length as usize;\n    let payload = std::slice::from_raw_parts(packet.add(payload_start), payload_len);\n\n    if source == PACKET_SOURCE_MESG && format == PACKET_FORMAT_MSGPACK {\n        // Inline msgpack: copy directly\n        let buf = libc::malloc(payload_len) as *mut c_char;\n        if buf.is_null() {\n            set_errmsg(errmsg, &MorlocError::Packet(\"malloc failed\".into()));\n            return 0;\n        }\n        ptr::copy_nonoverlapping(payload.as_ptr(), buf as *mut u8, payload_len);\n        *mpk_out = buf;\n        *mpk_size_out = payload_len;\n    } else if source == PACKET_SOURCE_MESG && format == PACKET_FORMAT_VOIDSTAR {\n        // Inline voidstar: load into SHM then convert to msgpack\n        match crate::voidstar::read_binary(payload, &rs) {\n            Ok(abs) => {\n                match crate::mpack::pack_with_schema(abs, &rs) {\n                    Ok(data) => {\n                        let buf = libc::malloc(data.len()) as *mut u8;\n                        if buf.is_null() {\n                            set_errmsg(errmsg, &MorlocError::Packet(\"malloc failed\".into()));\n                            return 0;\n                        }\n                        ptr::copy_nonoverlapping(data.as_ptr(), buf, data.len());\n                        *mpk_out = buf as *mut c_char;\n                        *mpk_size_out = data.len();\n                    }\n                    Err(e) => { set_errmsg(errmsg, &e); return 0; }\n                }\n                // Free SHM\n                let _ = crate::voidstar::free_by_schema(abs, &rs);\n                let _ = shm::shfree(abs);\n            }\n            Err(e) => { set_errmsg(errmsg, &e); return 0; }\n        }\n    } else if source == PACKET_SOURCE_FILE && format == PACKET_FORMAT_MSGPACK {\n        // File-based msgpack: read the file\n        let filename_bytes = &payload[..payload_len.min(4096)];\n        let filename = std::str::from_utf8(filename_bytes).unwrap_or(\"\");\n        let filename = filename.trim_end_matches('\\0');\n        match std::fs::read(filename) {\n            Ok(data) => {\n                let buf = libc::malloc(data.len()) as *mut u8;\n                if buf.is_null() {\n                    set_errmsg(errmsg, &MorlocError::Packet(\"malloc failed\".into()));\n                    return 0;\n                }\n                ptr::copy_nonoverlapping(data.as_ptr(), buf, data.len());\n                *mpk_out = buf as *mut c_char;\n                *mpk_size_out = data.len();\n            }\n            Err(e) => {\n                set_errmsg(errmsg, &MorlocError::Io(e));\n                return 0;\n            }\n        }\n    } else if source == PACKET_SOURCE_RPTR && format == PACKET_FORMAT_VOIDSTAR {\n        // Voidstar via relptr: convert to msgpack\n        let relptr = *(payload.as_ptr() as *const RelPtr);\n        match shm::rel2abs(relptr) {\n            Ok(abs) => {\n                match crate::mpack::pack_with_schema(abs, &rs) {\n                    Ok(data) => {\n                        let buf = libc::malloc(data.len()) as *mut u8;\n                        if buf.is_null() {\n                            set_errmsg(errmsg, &MorlocError::Packet(\"malloc failed\".into()));\n                            return 0;\n                        }\n                        ptr::copy_nonoverlapping(data.as_ptr(), buf, data.len());\n                        *mpk_out = buf as *mut c_char;\n                        *mpk_size_out = data.len();\n                    }\n                    Err(e) => { set_errmsg(errmsg, &e); return 0; }\n                }\n            }\n            Err(e) => { set_errmsg(errmsg, &e); return 0; }\n        }\n    } else {\n        set_errmsg(errmsg, &MorlocError::Packet(\n            format!(\"Unsupported packet source/format: 0x{:02x}/0x{:02x}\", source, format)\n        ));\n        return 0;\n    }\n\n    1 // true\n}\n\n// ── Schema from metadata ─────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_schema_from_packet_meta(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return ptr::null_mut(); }\n\n    let offset = (*header).offset as usize;\n    if offset < 8 { return ptr::null_mut(); } // no room for metadata header\n\n    let meta_start = 32usize;\n    let meta_end = meta_start + offset;\n    let mut pos = meta_start;\n    while pos + 8 <= meta_end {\n        if *packet.add(pos) == b'm' && *packet.add(pos + 1) == b'm' && *packet.add(pos + 2) == b'h' {\n            let meta_type = *packet.add(pos + 3);\n            let meta_size = *(packet.add(pos + 4) as *const u32) as usize;\n            if meta_type == METADATA_TYPE_SCHEMA_STRING {\n                // Return pointer into the packet buffer (matches C behavior)\n                return packet.add(pos + 8) as *mut c_char;\n            }\n            pos += 8 + meta_size;\n        } else {\n            break;\n        }\n    }\n    ptr::null_mut()\n}\n\n// ── Fail packet ──────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_fail_packet(\n    failure_message: *const c_char,\n) -> *mut u8 {\n    if failure_message.is_null() { return ptr::null_mut(); }\n    let msg = CStr::from_ptr(failure_message).to_bytes();\n    make_data_packet_raw(\n        msg.as_ptr(),\n        msg.len(),\n        ptr::null(),\n        0,\n        PACKET_SOURCE_MESG,\n        PACKET_FORMAT_TEXT,\n        PACKET_COMPRESSION_NONE,\n        PACKET_ENCRYPTION_NONE,\n        PACKET_STATUS_FAIL,\n    )\n}\n\n// ── Error message extraction ─────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn get_morloc_data_packet_error_message(\n    data: *const u8,\n    errmsg: *mut *mut c_char,\n) -> *mut c_char {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(data, errmsg);\n    if header.is_null() { return ptr::null_mut(); }\n\n    if (*header).is_fail() {\n        let payload_start = 32 + (*header).offset as usize;\n        let payload_len = (*header).length as usize;\n        let buf = libc::calloc(payload_len + 1, 1) as *mut c_char;\n        if buf.is_null() {\n            set_errmsg(errmsg, &MorlocError::Packet(\"Failed to allocate error message\".into()));\n            return ptr::null_mut();\n        }\n        ptr::copy_nonoverlapping(data.add(payload_start), buf as *mut u8, payload_len);\n        return buf;\n    }\n\n    ptr::null_mut()\n}\n\n// ── get_morloc_data_packet_value ─────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn get_morloc_data_packet_value(\n    data: *const u8,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(data, errmsg);\n    if header.is_null() { return ptr::null_mut(); }\n\n    if (*header).command_type() != PACKET_TYPE_DATA {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Expected a data packet\".into()));\n        return ptr::null_mut();\n    }\n\n    // Check for error\n    let packet_error = get_morloc_data_packet_error_message(data, errmsg);\n    if !packet_error.is_null() {\n        let err_str = CStr::from_ptr(packet_error).to_string_lossy().into_owned();\n        libc::free(packet_error as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Packet(format!(\"\\n{}\", err_str)));\n        return ptr::null_mut();\n    }\n    // Clear any errmsg from get_morloc_data_packet_error_message\n    clear_errmsg(errmsg);\n\n    let rs = CSchema::to_rust(schema);\n    let source = (*header).command.data.source;\n    let format = (*header).command.data.format;\n    let payload_start = 32 + (*header).offset as usize;\n    let payload_len = (*header).length as usize;\n\n    match source {\n        PACKET_SOURCE_MESG => {\n            if format == PACKET_FORMAT_MSGPACK {\n                let payload = std::slice::from_raw_parts(data.add(payload_start), payload_len);\n                match crate::mpack::unpack_with_schema(payload, &rs) {\n                    Ok(abs) => abs,\n                    Err(e) => { set_errmsg(errmsg, &e); ptr::null_mut() }\n                }\n            } else if format == PACKET_FORMAT_VOIDSTAR {\n                let payload = std::slice::from_raw_parts(data.add(payload_start), payload_len);\n                match crate::voidstar::read_binary(payload, &rs) {\n                    Ok(abs) => abs,\n                    Err(e) => { set_errmsg(errmsg, &e); ptr::null_mut() }\n                }\n            } else {\n                set_errmsg(errmsg, &MorlocError::Packet(\n                    format!(\"Invalid format from mesg: 0x{:02x}\", format)\n                ));\n                ptr::null_mut()\n            }\n        }\n        PACKET_SOURCE_FILE => {\n            if format == PACKET_FORMAT_MSGPACK {\n                let filename_bytes = std::slice::from_raw_parts(data.add(payload_start), payload_len.min(4096));\n                let filename = std::str::from_utf8(filename_bytes).unwrap_or(\"\");\n                let filename = filename.trim_end_matches('\\0');\n                match std::fs::read(filename) {\n                    Ok(file_data) => {\n                        match crate::mpack::unpack_with_schema(&file_data, &rs) {\n                            Ok(abs) => abs,\n                            Err(e) => { set_errmsg(errmsg, &e); ptr::null_mut() }\n                        }\n                    }\n                    Err(e) => {\n                        set_errmsg(errmsg, &MorlocError::Io(e));\n                        ptr::null_mut()\n                    }\n                }\n            } else {\n                set_errmsg(errmsg, &MorlocError::Packet(\n                    format!(\"Invalid format from file: 0x{:02x}\", format)\n                ));\n                ptr::null_mut()\n            }\n        }\n        PACKET_SOURCE_RPTR => {\n            if format == PACKET_FORMAT_VOIDSTAR || format == PACKET_FORMAT_ARROW {\n                let relptr = *(data.add(payload_start) as *const RelPtr);\n                match shm::rel2abs(relptr) {\n                    Ok(abs) => abs,\n                    Err(e) => { set_errmsg(errmsg, &e); ptr::null_mut() }\n                }\n            } else {\n                set_errmsg(errmsg, &MorlocError::Packet(\n                    format!(\"For RPTR source, expected voidstar or arrow format, found: 0x{:02x}\", format)\n                ));\n                ptr::null_mut()\n            }\n        }\n        _ => {\n            set_errmsg(errmsg, &MorlocError::Packet(\"Invalid source\".into()));\n            ptr::null_mut()\n        }\n    }\n}\n\n// ── Call packet construction ─────────────────────────────────────────────────\n\nunsafe fn make_call_packet_gen(\n    midx: u32,\n    entrypoint: u8,\n    arg_packets: *const *const u8,\n    nargs: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n\n    // Calculate total data length\n    let mut data_length: usize = 0;\n    for i in 0..nargs {\n        let arg = read_morloc_packet_header(*arg_packets.add(i), errmsg);\n        if arg.is_null() { return ptr::null_mut(); }\n        data_length += morloc_packet_size_from_header(arg);\n    }\n\n    let total = 32 + data_length;\n    let packet = libc::calloc(total, 1) as *mut u8;\n    if packet.is_null() {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Failed to allocate call packet\".into()));\n        return ptr::null_mut();\n    }\n\n    let cmd = CommandCall {\n        cmd_type: PACKET_TYPE_CALL,\n        entrypoint,\n        padding: [0; 2],\n        midx,\n    };\n    let header = PacketHeader {\n        magic: PACKET_MAGIC,\n        plain: THIS_PLAIN,\n        version: THIS_VERSION,\n        flavor: DEFAULT_FLAVOR,\n        mode: DEFAULT_MODE,\n        command: PacketCommand { call: cmd },\n        offset: 0,\n        length: data_length as u64,\n    };\n    let hdr_bytes = header.to_bytes();\n    ptr::copy_nonoverlapping(hdr_bytes.as_ptr(), packet, 32);\n\n    let mut pos = 32;\n    for i in 0..nargs {\n        let arg = read_morloc_packet_header(*arg_packets.add(i), errmsg);\n        if arg.is_null() {\n            libc::free(packet as *mut c_void);\n            return ptr::null_mut();\n        }\n        let arg_size = morloc_packet_size_from_header(arg);\n        ptr::copy_nonoverlapping(*arg_packets.add(i), packet.add(pos), arg_size);\n        pos += arg_size;\n    }\n\n    packet\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_morloc_local_call_packet(\n    midx: u32,\n    arg_packets: *const *const u8,\n    nargs: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    make_call_packet_gen(midx, PACKET_ENTRYPOINT_LOCAL, arg_packets, nargs, errmsg)\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_morloc_remote_call_packet(\n    midx: u32,\n    arg_packets: *const *const u8,\n    nargs: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    make_call_packet_gen(midx, PACKET_ENTRYPOINT_REMOTE_SFS, arg_packets, nargs, errmsg)\n}\n\n// ── Call packet reading ──────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_morloc_call_packet(\n    packet: *const u8,\n    errmsg: *mut *mut c_char,\n) -> *mut MorlocCall {\n    clear_errmsg(errmsg);\n\n    let call = libc::calloc(1, std::mem::size_of::<MorlocCall>()) as *mut MorlocCall;\n    if call.is_null() {\n        set_errmsg(errmsg, &MorlocError::Packet(\"calloc failed\".into()));\n        return ptr::null_mut();\n    }\n\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() {\n        libc::free(call as *mut c_void);\n        return ptr::null_mut();\n    }\n    if (*header).command_type() != PACKET_TYPE_CALL {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Expected packet to be a call\".into()));\n        libc::free(call as *mut c_void);\n        return ptr::null_mut();\n    }\n\n    (*call).midx = (*header).command.call.midx;\n    (*call).nargs = 0;\n    (*call).args = ptr::null_mut();\n    (*call).owns_args = 0; // borrowing pointers into packet\n\n    let start_pos = 32 + (*header).offset as usize;\n    let end_pos = start_pos + (*header).length as usize;\n\n    // First pass: count args\n    let mut pos = start_pos;\n    while pos < end_pos {\n        let arg_size = morloc_packet_size(packet.add(pos), errmsg);\n        if arg_size == 0 {\n            free_morloc_call(call);\n            return ptr::null_mut();\n        }\n        pos += arg_size;\n        (*call).nargs += 1;\n    }\n\n    // Allocate args array\n    (*call).args = libc::calloc((*call).nargs, std::mem::size_of::<*mut u8>()) as *mut *mut u8;\n    if (*call).args.is_null() {\n        set_errmsg(errmsg, &MorlocError::Packet(\"calloc failed for args\".into()));\n        libc::free(call as *mut c_void);\n        return ptr::null_mut();\n    }\n\n    // Second pass: fill pointers (borrowing into original packet)\n    pos = start_pos;\n    for i in 0..(*call).nargs {\n        let arg_header = read_morloc_packet_header(packet.add(pos), errmsg);\n        if arg_header.is_null() {\n            free_morloc_call(call);\n            return ptr::null_mut();\n        }\n        if (*arg_header).command_type() != PACKET_TYPE_DATA {\n            set_errmsg(errmsg, &MorlocError::Packet(\n                format!(\"Argument #{} is not a DATA packet (type={})\", i, (*arg_header).command_type())\n            ));\n            free_morloc_call(call);\n            return ptr::null_mut();\n        }\n        *(*call).args.add(i) = packet.add(pos) as *mut u8;\n        pos += morloc_packet_size_from_header(arg_header);\n    }\n\n    call\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn free_morloc_call(call: *mut MorlocCall) {\n    if call.is_null() { return; }\n    let c = &*call;\n    if !c.args.is_null() {\n        if c.owns_args != 0 {\n            for i in 0..c.nargs {\n                let arg = *c.args.add(i);\n                if !arg.is_null() {\n                    libc::free(arg as *mut c_void);\n                }\n            }\n        }\n        libc::free(c.args as *mut c_void);\n    }\n    libc::free(call as *mut c_void);\n}\n\n// adjust_voidstar_relptrs: still provided by cli.c (will move to Rust when cli.c is ported)\n// read_voidstar_binary: still provided by cli.c (will move to Rust when cli.c is ported)\n\n// ── write_voidstar_binary (for intrinsics.c) ─────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn write_voidstar_binary(\n    fd: i32,\n    data: *const c_void,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> RelPtr {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n    match crate::voidstar::write_binary_to_fd(fd, data as AbsPtr, &rs) {\n        Ok(n) => n as RelPtr,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            -1isize as RelPtr\n        }\n    }\n}\n\n// ── flatten_voidstar_to_buffer ───────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn flatten_voidstar_to_buffer(\n    data: *const c_void,\n    schema: *const CSchema,\n    out_buf: *mut *mut u8,\n    out_size: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    *out_buf = ptr::null_mut();\n    *out_size = 0;\n\n    let rs = CSchema::to_rust(schema);\n    match crate::voidstar::flatten_to_buffer(data as AbsPtr, &rs) {\n        Ok(buf) => {\n            let len = buf.len();\n            let c_buf = libc::malloc(len) as *mut u8;\n            if c_buf.is_null() {\n                set_errmsg(errmsg, &MorlocError::Packet(\"malloc failed\".into()));\n                return 1;\n            }\n            ptr::copy_nonoverlapping(buf.as_ptr(), c_buf, len);\n            *out_buf = c_buf;\n            *out_size = len;\n            0\n        }\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            1\n        }\n    }\n}\n\n// read_voidstar_binary: still provided by cli.c (will move to Rust when cli.c is ported)\n\n// ── make_data_packet_auto ────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn make_data_packet_auto(\n    voidstar: *mut c_void,\n    relptr: RelPtr,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    let rs = CSchema::to_rust(schema);\n\n    let flat_size = match crate::ffi::calc_voidstar_size_inner(voidstar as *const u8, &rs) {\n        Ok(s) => s,\n        Err(e) => {\n            set_errmsg(errmsg, &e);\n            return ptr::null_mut();\n        }\n    };\n\n    if flat_size <= MORLOC_INLINE_THRESHOLD {\n        match crate::voidstar::flatten_to_buffer(voidstar as AbsPtr, &rs) {\n            Ok(blob) => {\n                let packet = make_data_packet_with_schema(\n                    blob.as_ptr(),\n                    blob.len(),\n                    schema,\n                    PACKET_SOURCE_MESG,\n                    PACKET_FORMAT_VOIDSTAR,\n                    PACKET_COMPRESSION_NONE,\n                    PACKET_ENCRYPTION_NONE,\n                    PACKET_STATUS_PASS,\n                );\n                if packet.is_null() {\n                    set_errmsg(errmsg, &MorlocError::Packet(\"Failed to create inline data packet\".into()));\n                }\n                return packet;\n            }\n            Err(e) => {\n                set_errmsg(errmsg, &e);\n                return ptr::null_mut();\n            }\n        }\n    }\n\n    make_standard_data_packet(relptr, schema)\n}\n\n// ── print_morloc_data_packet ─────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn print_morloc_data_packet(\n    packet: *const u8,\n    schema: *const CSchema,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    let header = read_morloc_packet_header(packet, errmsg);\n    if header.is_null() { return 1; }\n\n    if (*header).command_type() != PACKET_TYPE_DATA {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Expected a data packet\".into()));\n        return 1;\n    }\n\n    // Check for error\n    let packet_error = get_morloc_data_packet_error_message(packet, errmsg);\n    if !packet_error.is_null() {\n        let err_str = CStr::from_ptr(packet_error).to_string_lossy().into_owned();\n        libc::free(packet_error as *mut c_void);\n        set_errmsg(errmsg, &MorlocError::Packet(format!(\"\\n{}\", err_str)));\n        return 1;\n    }\n    clear_errmsg(errmsg);\n\n    let rs = CSchema::to_rust(schema);\n    let source = (*header).command.data.source;\n    let format = (*header).command.data.format;\n    let packet_size = morloc_packet_size_from_header(header);\n\n    match source {\n        PACKET_SOURCE_MESG | PACKET_SOURCE_FILE => {\n            // Print the raw packet bytes\n            if print_binary(packet, packet_size, errmsg) != 0 {\n                return 1;\n            }\n        }\n        PACKET_SOURCE_RPTR => {\n            match format {\n                PACKET_FORMAT_VOIDSTAR => {\n                    let payload_start = 32 + (*header).offset as usize;\n                    let relptr = *(packet.add(payload_start) as *const RelPtr);\n                    let voidstar_ptr = match shm::rel2abs(relptr) {\n                        Ok(p) => p,\n                        Err(e) => { set_errmsg(errmsg, &e); return 1; }\n                    };\n\n                    // Build modified header with flat size\n                    let flat_size = match crate::ffi::calc_voidstar_size_inner(voidstar_ptr, &rs) {\n                        Ok(s) => s,\n                        Err(e) => { set_errmsg(errmsg, &e); return 1; }\n                    };\n\n                    let mut new_header = *header;\n                    new_header.command.data.format = PACKET_FORMAT_VOIDSTAR;\n                    // Safely set length (packed struct)\n                    let new_hdr_ptr = &mut new_header as *mut PacketHeader as *mut u8;\n                    *(new_hdr_ptr.add(24) as *mut u64) = flat_size as u64;\n\n                    // Print header\n                    if print_binary(&new_header as *const PacketHeader as *const u8, 32, errmsg) != 0 {\n                        return 1;\n                    }\n\n                    // Print metadata\n                    let offset = (*header).offset as usize;\n                    if offset > 0 {\n                        if print_binary(packet.add(32), offset, errmsg) != 0 {\n                            return 1;\n                        }\n                    }\n\n                    // Write flattened voidstar data to stdout\n                    match crate::voidstar::write_binary_to_fd(libc::STDOUT_FILENO, voidstar_ptr, &rs) {\n                        Ok(_) => {}\n                        Err(e) => { set_errmsg(errmsg, &e); return 1; }\n                    }\n                }\n                _ => {\n                    // Other formats: print raw packet\n                    if print_binary(packet, packet_size, errmsg) != 0 {\n                        return 1;\n                    }\n                }\n            }\n        }\n        _ => {\n            set_errmsg(errmsg, &MorlocError::Packet(\"Invalid source\".into()));\n            return 1;\n        }\n    }\n\n    0 // EXIT_PASS\n}\n\n/// Write binary data to stdout.\n///\n/// # Safety\n/// `buf` must point to at least `count` readable bytes.\nunsafe fn print_binary(\n    buf: *const u8,\n    count: usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    let mut written: usize = 0;\n    while written < count {\n        let n = libc::write(\n            libc::STDOUT_FILENO,\n            buf.add(written) as *const c_void,\n            count - written,\n        );\n        if n < 0 {\n            set_errmsg(errmsg, &MorlocError::Io(std::io::Error::last_os_error()));\n            return 1;\n        }\n        written += n as usize;\n    }\n    0\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/pool_ffi.rs",
    "content": "//! Pool server lifecycle: accept connections, dispatch packets, manage workers.\n//! Replaces pool.c. Uses std::thread instead of raw pthreads for thread mode.\n\nuse std::ffi::{c_char, c_void};\nuse std::ptr;\nuse std::sync::atomic::{AtomicBool, AtomicI32, Ordering};\nuse std::sync::{Arc, Mutex, Condvar};\n\n// ── C-compatible types matching pool.h ───────────────────────────────────────\n\npub type PoolDispatchFn = unsafe extern \"C\" fn(\n    mid: u32, args: *const *const u8, nargs: usize, ctx: *mut c_void,\n) -> *mut u8;\n\n#[repr(C)]\n#[derive(Debug, Clone, Copy, PartialEq)]\npub enum PoolConcurrency {\n    Threads = 0,\n    Fork = 1,\n    Single = 2,\n}\n\n#[repr(C)]\npub struct PoolConfig {\n    pub local_dispatch: PoolDispatchFn,\n    pub remote_dispatch: PoolDispatchFn,\n    pub dispatch_ctx: *mut c_void,\n    pub concurrency: PoolConcurrency,\n    pub initial_workers: i32,\n    pub dynamic_scaling: bool,\n    pub post_fork_child: Option<unsafe extern \"C\" fn(*mut c_void)>,\n}\n\n// SAFETY: PoolConfig contains function pointers and a *mut c_void dispatch_ctx.\n// The function pointers are set once at startup and never mutated.\n// dispatch_ctx points to language-runtime state that is either thread-local\n// (fork mode) or protected by the runtime's own synchronization (thread mode).\n// The pool architecture guarantees dispatch_ctx is not concurrently mutated.\nunsafe impl Send for PoolConfig {}\nunsafe impl Sync for PoolConfig {}\n\n// ── Global state ─────────────────────────────────────────────────────────────\n\nstatic SHUTTING_DOWN: AtomicBool = AtomicBool::new(false);\nstatic BUSY_COUNT: AtomicI32 = AtomicI32::new(0);\nstatic TOTAL_WORKERS: AtomicI32 = AtomicI32::new(0);\n\n// SAFETY: SHARED_BUSY is set once in pool_main_fork (parent process) before\n// forking children. After fork, each process accesses the mmap'd AtomicI32\n// via atomic operations only. Reset to null during shutdown.\nstatic mut SHARED_BUSY: *mut AtomicI32 = ptr::null_mut();\n\n#[no_mangle]\npub extern \"C\" fn pool_mark_busy() {\n    // SAFETY: SHARED_BUSY is either null (thread mode, use local atomic) or a valid\n    // mmap'd AtomicI32 pointer set during pool_main_fork initialization.\n    unsafe {\n        if !SHARED_BUSY.is_null() {\n            (*SHARED_BUSY).fetch_add(1, Ordering::Relaxed);\n        } else {\n            BUSY_COUNT.fetch_add(1, Ordering::Relaxed);\n        }\n    }\n}\n\n#[no_mangle]\npub extern \"C\" fn pool_mark_idle() {\n    // SAFETY: Same as pool_mark_busy - SHARED_BUSY is null or a valid mmap'd pointer.\n    unsafe {\n        if !SHARED_BUSY.is_null() {\n            (*SHARED_BUSY).fetch_sub(1, Ordering::Relaxed);\n        } else {\n            BUSY_COUNT.fetch_sub(1, Ordering::Relaxed);\n        }\n    }\n}\n\nextern \"C\" fn pool_sigterm_handler(_sig: i32) {\n    SHUTTING_DOWN.store(true, Ordering::Relaxed);\n}\n\n// ── Packet dispatch ──────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn pool_dispatch_packet(\n    packet: *const u8,\n    local_dispatch: PoolDispatchFn,\n    remote_dispatch: PoolDispatchFn,\n    ctx: *mut c_void,\n) -> *mut u8 {\n    extern \"C\" {\n        fn make_fail_packet(msg: *const c_char) -> *mut u8;\n        fn packet_is_ping(packet: *const u8, errmsg: *mut *mut c_char) -> bool;\n        fn return_ping(packet: *const u8, errmsg: *mut *mut c_char) -> *mut u8;\n        fn packet_is_local_call(packet: *const u8, errmsg: *mut *mut c_char) -> bool;\n        fn packet_is_remote_call(packet: *const u8, errmsg: *mut *mut c_char) -> bool;\n        fn read_morloc_call_packet(packet: *const u8, errmsg: *mut *mut c_char) -> *mut crate::packet_ffi::MorlocCall;\n        fn free_morloc_call(call: *mut crate::packet_ffi::MorlocCall);\n    }\n\n    if packet.is_null() {\n        return make_fail_packet(b\"NULL packet in pool dispatch\\0\".as_ptr() as *const c_char);\n    }\n\n    let mut errmsg: *mut c_char = ptr::null_mut();\n\n    if packet_is_ping(packet, &mut errmsg) {\n        if !errmsg.is_null() { return fail_from_errmsg(errmsg); }\n        let pong = return_ping(packet, &mut errmsg);\n        if !errmsg.is_null() { return fail_from_errmsg(errmsg); }\n        return pong;\n    }\n    if !errmsg.is_null() { return fail_from_errmsg(errmsg); }\n\n    let is_local = packet_is_local_call(packet, &mut errmsg);\n    if !errmsg.is_null() { return fail_from_errmsg(errmsg); }\n    let is_remote = packet_is_remote_call(packet, &mut errmsg);\n    if !errmsg.is_null() { return fail_from_errmsg(errmsg); }\n\n    if is_local || is_remote {\n        let call = read_morloc_call_packet(packet, &mut errmsg);\n        if !errmsg.is_null() { return fail_from_errmsg(errmsg); }\n\n        let mid = (*call).midx;\n        let args = (*call).args as *const *const u8;\n        let nargs = (*call).nargs;\n\n        let dispatch_fn = if is_local { local_dispatch } else { remote_dispatch };\n        let result = dispatch_fn(mid, args, nargs, ctx);\n\n        free_morloc_call(call);\n\n        if result.is_null() {\n            return make_fail_packet(b\"dispatch callback returned NULL\\0\".as_ptr() as *const c_char);\n        }\n        return result;\n    }\n\n    make_fail_packet(b\"Unexpected packet type in pool dispatch\\0\".as_ptr() as *const c_char)\n}\n\nunsafe fn fail_from_errmsg(errmsg: *mut c_char) -> *mut u8 {\n    extern \"C\" { fn make_fail_packet(msg: *const c_char) -> *mut u8; }\n    let pkt = make_fail_packet(errmsg);\n    libc::free(errmsg as *mut c_void);\n    pkt\n}\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nunsafe fn try_send_fail(client_fd: i32, msg: *const c_char) {\n    extern \"C\" {\n        fn make_fail_packet(msg: *const c_char) -> *mut u8;\n        fn send_packet_to_foreign_server(fd: i32, packet: *mut u8, errmsg: *mut *mut c_char) -> usize;\n    }\n    let fail = make_fail_packet(if msg.is_null() { b\"Unknown error\\0\".as_ptr() as *const c_char } else { msg });\n    if !fail.is_null() {\n        let mut err: *mut c_char = ptr::null_mut();\n        send_packet_to_foreign_server(client_fd, fail, &mut err);\n        libc::free(fail as *mut c_void);\n        if !err.is_null() { libc::free(err as *mut c_void); }\n    }\n}\n\n// ── Thread mode job queue ────────────────────────────────────────────────────\n\nstruct JobQueue {\n    jobs: Mutex<Vec<i32>>,\n    cond: Condvar,\n}\n\nimpl JobQueue {\n    fn new() -> Self {\n        JobQueue { jobs: Mutex::new(Vec::new()), cond: Condvar::new() }\n    }\n\n    fn push(&self, fd: i32) {\n        let mut jobs = self.jobs.lock().unwrap();\n        jobs.push(fd);\n        self.cond.notify_one();\n    }\n\n    fn pop(&self) -> Option<i32> {\n        let mut jobs = self.jobs.lock().unwrap();\n        loop {\n            if SHUTTING_DOWN.load(Ordering::Relaxed) { return None; }\n            if let Some(fd) = jobs.pop() { return Some(fd); }\n            let result = self.cond.wait_timeout(jobs, std::time::Duration::from_millis(100)).unwrap();\n            jobs = result.0;\n        }\n    }\n}\n\n// ── Worker thread ────────────────────────────────────────────────────────────\n\nunsafe fn worker_loop(queue: &JobQueue, config: &PoolConfig) {\n    extern \"C\" {\n        fn stream_from_client(fd: i32, errmsg: *mut *mut c_char) -> *mut u8;\n        fn send_packet_to_foreign_server(fd: i32, packet: *mut u8, errmsg: *mut *mut c_char) -> usize;\n        fn close_socket(fd: i32);\n    }\n\n    while !SHUTTING_DOWN.load(Ordering::Relaxed) {\n        let client_fd = match queue.pop() {\n            Some(fd) => fd,\n            None => break,\n        };\n\n        let mut errmsg: *mut c_char = ptr::null_mut();\n        let data = stream_from_client(client_fd, &mut errmsg);\n        if data.is_null() || !errmsg.is_null() {\n            if !errmsg.is_null() {\n                try_send_fail(client_fd, errmsg);\n                libc::free(errmsg as *mut c_void);\n            }\n            libc::free(data as *mut c_void);\n            close_socket(client_fd);\n            continue;\n        }\n\n        // Track busy state so the accept loop can spawn new workers if needed\n        pool_mark_busy();\n        let result = pool_dispatch_packet(data, config.local_dispatch, config.remote_dispatch, config.dispatch_ctx);\n        pool_mark_idle();\n        libc::free(data as *mut c_void);\n\n        if !result.is_null() {\n            send_packet_to_foreign_server(client_fd, result, &mut errmsg);\n            libc::free(result as *mut c_void);\n            if !errmsg.is_null() { libc::free(errmsg as *mut c_void); }\n        }\n\n        libc::fflush(ptr::null_mut()); // flush stdout\n        close_socket(client_fd);\n    }\n}\n\n// ── Pool main: threads mode ──────────────────────────────────────────────────\n\nunsafe fn pool_main_threads(config: &PoolConfig, socket_path: *const c_char, tmpdir: *const c_char, shm_basename: *const c_char) -> i32 {\n    extern \"C\" {\n        fn start_daemon(socket_path: *const c_char, tmpdir: *const c_char, shm_basename: *const c_char, size: usize, errmsg: *mut *mut c_char) -> *mut c_void;\n        fn close_daemon(daemon: *mut *mut c_void);\n        fn wait_for_client_with_timeout(daemon: *mut c_void, timeout_us: i32, errmsg: *mut *mut c_char) -> i32;\n    }\n\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let mut daemon = start_daemon(socket_path, tmpdir, shm_basename, 0xffff, &mut errmsg);\n    if !errmsg.is_null() {\n        libc::fprintf(libc::fdopen(2, b\"w\\0\".as_ptr() as *const c_char),\n            b\"Failed to start language server:\\n%s\\n\\0\".as_ptr() as *const c_char, errmsg);\n        libc::free(errmsg as *mut c_void);\n        return 1;\n    }\n\n    let queue = Arc::new(JobQueue::new());\n    let nthreads = config.initial_workers.max(1) as usize;\n    TOTAL_WORKERS.store(nthreads as i32, Ordering::Relaxed);\n\n    let mut handles = Vec::with_capacity(nthreads);\n    for _ in 0..nthreads {\n        let q = Arc::clone(&queue);\n        let cfg = ptr::read(config); // Copy config for thread\n        handles.push(std::thread::spawn(move || {\n            worker_loop(&q, &cfg);\n        }));\n    }\n\n    while !SHUTTING_DOWN.load(Ordering::Relaxed) {\n        let client_fd = wait_for_client_with_timeout(daemon, 10000, &mut errmsg);\n        if !errmsg.is_null() { libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n        if client_fd > 0 {\n            queue.push(client_fd);\n        }\n\n        // Dynamic scaling: spawn a new worker if all are busy\n        if config.dynamic_scaling {\n            let busy = BUSY_COUNT.load(Ordering::Relaxed);\n            let total = TOTAL_WORKERS.load(Ordering::Relaxed);\n            if busy >= total {\n                let q = Arc::clone(&queue);\n                let cfg = ptr::read(config);\n                handles.push(std::thread::spawn(move || {\n                    worker_loop(&q, &cfg);\n                }));\n                TOTAL_WORKERS.fetch_add(1, Ordering::Relaxed);\n            }\n        }\n    }\n\n    SHUTTING_DOWN.store(true, Ordering::Relaxed);\n    queue.cond.notify_all();\n\n    for h in handles { let _ = h.join(); }\n\n    close_daemon(&mut daemon);\n    0\n}\n\n// ── Pool main: single mode ───────────────────────────────────────────────────\n\nunsafe fn pool_main_single(config: &PoolConfig, socket_path: *const c_char, tmpdir: *const c_char, shm_basename: *const c_char) -> i32 {\n    extern \"C\" {\n        fn start_daemon(socket_path: *const c_char, tmpdir: *const c_char, shm_basename: *const c_char, size: usize, errmsg: *mut *mut c_char) -> *mut c_void;\n        fn close_daemon(daemon: *mut *mut c_void);\n        fn wait_for_client_with_timeout(daemon: *mut c_void, timeout_us: i32, errmsg: *mut *mut c_char) -> i32;\n        fn stream_from_client(fd: i32, errmsg: *mut *mut c_char) -> *mut u8;\n        fn send_packet_to_foreign_server(fd: i32, packet: *mut u8, errmsg: *mut *mut c_char) -> usize;\n        fn close_socket(fd: i32);\n    }\n\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let mut daemon = start_daemon(socket_path, tmpdir, shm_basename, 0xffff, &mut errmsg);\n    if !errmsg.is_null() {\n        libc::fprintf(libc::fdopen(2, b\"w\\0\".as_ptr() as *const c_char),\n            b\"Failed to start language server:\\n%s\\n\\0\".as_ptr() as *const c_char, errmsg);\n        libc::free(errmsg as *mut c_void);\n        return 1;\n    }\n\n    while !SHUTTING_DOWN.load(Ordering::Relaxed) {\n        let client_fd = wait_for_client_with_timeout(daemon, 10000, &mut errmsg);\n        if !errmsg.is_null() { libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n        if client_fd <= 0 { continue; }\n\n        let data = stream_from_client(client_fd, &mut errmsg);\n        if data.is_null() || !errmsg.is_null() {\n            if !errmsg.is_null() { try_send_fail(client_fd, errmsg); libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n            libc::free(data as *mut c_void);\n            close_socket(client_fd);\n            continue;\n        }\n\n        let result = pool_dispatch_packet(data, config.local_dispatch, config.remote_dispatch, config.dispatch_ctx);\n        libc::free(data as *mut c_void);\n\n        if !result.is_null() {\n            send_packet_to_foreign_server(client_fd, result, &mut errmsg);\n            libc::free(result as *mut c_void);\n            if !errmsg.is_null() { libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n        }\n\n        libc::fflush(ptr::null_mut());\n        close_socket(client_fd);\n    }\n\n    close_daemon(&mut daemon);\n    0\n}\n\n// ── Pool main: fork mode ─────────────────────────────────────────────────────\n\nunsafe fn pool_main_fork(config: &PoolConfig, socket_path: *const c_char, tmpdir: *const c_char, shm_basename: *const c_char) -> i32 {\n    extern \"C\" {\n        fn start_daemon(socket_path: *const c_char, tmpdir: *const c_char, shm_basename: *const c_char, size: usize, errmsg: *mut *mut c_char) -> *mut c_void;\n        fn close_daemon(daemon: *mut *mut c_void);\n        fn wait_for_client_with_timeout(daemon: *mut c_void, timeout_us: i32, errmsg: *mut *mut c_char) -> i32;\n        fn stream_from_client(fd: i32, errmsg: *mut *mut c_char) -> *mut u8;\n        fn send_packet_to_foreign_server(fd: i32, packet: *mut u8, errmsg: *mut *mut c_char) -> usize;\n        fn close_socket(fd: i32);\n        fn shinit(basename: *const c_char, volume: usize, size: usize, errmsg: *mut *mut c_char) -> *mut c_void;\n    }\n\n    let mut errmsg: *mut c_char = ptr::null_mut();\n    let mut daemon = start_daemon(socket_path, tmpdir, shm_basename, 0xffff, &mut errmsg);\n    if !errmsg.is_null() {\n        libc::fprintf(libc::fdopen(2, b\"w\\0\".as_ptr() as *const c_char),\n            b\"Failed to start language server:\\n%s\\n\\0\".as_ptr() as *const c_char, errmsg);\n        libc::free(errmsg as *mut c_void);\n        return 1;\n    }\n\n    // Create socketpair for fd passing\n    let mut sv = [0i32; 2];\n    if libc::socketpair(libc::AF_UNIX, libc::SOCK_STREAM, 0, sv.as_mut_ptr()) < 0 {\n        close_daemon(&mut daemon);\n        return 1;\n    }\n\n    // Shared busy counter via mmap\n    let shared_counter = libc::mmap(\n        ptr::null_mut(), std::mem::size_of::<AtomicI32>(),\n        libc::PROT_READ | libc::PROT_WRITE,\n        libc::MAP_SHARED | libc::MAP_ANONYMOUS, -1, 0,\n    ) as *mut AtomicI32;\n    if shared_counter == libc::MAP_FAILED as *mut AtomicI32 {\n        libc::close(sv[0]); libc::close(sv[1]);\n        close_daemon(&mut daemon);\n        return 1;\n    }\n    (*shared_counter).store(0, Ordering::Relaxed);\n    SHARED_BUSY = shared_counter;\n\n    let nworkers = config.initial_workers.max(1);\n    let mut child_pids: Vec<i32> = Vec::new();\n\n    for i in 0..nworkers {\n        let pid = libc::fork();\n        if pid < 0 { break; }\n        if pid == 0 {\n            // Child\n            libc::close(sv[1]); // close write end\n            // Get daemon server_fd from opaque pointer and close it\n            // (we can't access the struct fields directly since daemon is *mut c_void,\n            //  but the child doesn't need to accept connections)\n            if let Some(pfk) = config.post_fork_child {\n                pfk(config.dispatch_ctx);\n            }\n\n            shinit(shm_basename, (i + 1) as usize, 0xffff, &mut errmsg);\n            if !errmsg.is_null() {\n                // Print the error to stderr before exiting so the nexus can\n                // capture it via the pool's redirected stderr file. Without\n                // this, a failed shinit in a forked worker child leaves no\n                // diagnostic trace anywhere.\n                libc::fprintf(\n                    libc::fdopen(2, b\"w\\0\".as_ptr() as *const c_char),\n                    b\"Worker %d shinit failed: %s\\n\\0\".as_ptr() as *const c_char,\n                    i as i32,\n                    errmsg,\n                );\n                libc::free(errmsg as *mut c_void);\n                libc::_exit(1);\n            }\n\n            // Worker loop: receive fds and process\n            loop {\n                if SHUTTING_DOWN.load(Ordering::Relaxed) { break; }\n                let mut pfd = libc::pollfd { fd: sv[0], events: libc::POLLIN, revents: 0 };\n                let ready = libc::poll(&mut pfd, 1, 100);\n                if ready <= 0 { continue; }\n\n                let client_fd = recv_fd(sv[0]);\n                if client_fd < 0 { break; }\n\n                let data = stream_from_client(client_fd, &mut errmsg);\n                if data.is_null() || !errmsg.is_null() {\n                    if !errmsg.is_null() { try_send_fail(client_fd, errmsg); libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n                    libc::free(data as *mut c_void);\n                    close_socket(client_fd);\n                    continue;\n                }\n\n                let result = pool_dispatch_packet(data, config.local_dispatch, config.remote_dispatch, config.dispatch_ctx);\n                libc::free(data as *mut c_void);\n\n                if !result.is_null() {\n                    send_packet_to_foreign_server(client_fd, result, &mut errmsg);\n                    libc::free(result as *mut c_void);\n                    if !errmsg.is_null() { libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n                }\n                libc::fflush(ptr::null_mut());\n                close_socket(client_fd);\n            }\n            libc::close(sv[0]);\n            libc::_exit(0);\n        }\n        child_pids.push(pid);\n    }\n    TOTAL_WORKERS.store(child_pids.len() as i32, Ordering::Relaxed);\n\n    // Parent: accept loop\n    while !SHUTTING_DOWN.load(Ordering::Relaxed) {\n        let client_fd = wait_for_client_with_timeout(daemon, 10000, &mut errmsg);\n        if !errmsg.is_null() { libc::free(errmsg as *mut c_void); errmsg = ptr::null_mut(); }\n        if client_fd > 0 {\n            send_fd(sv[1], client_fd);\n            close_socket(client_fd);\n        }\n\n        // Reap dead children\n        for pid in child_pids.iter_mut() {\n            if *pid > 0 {\n                let mut wstatus: i32 = 0;\n                if libc::waitpid(*pid, &mut wstatus, libc::WNOHANG) > 0 {\n                    *pid = -1;\n                }\n            }\n        }\n    }\n\n    // Shutdown\n    for &pid in &child_pids {\n        if pid > 0 { libc::kill(pid, libc::SIGTERM); }\n    }\n    for &pid in &child_pids {\n        if pid > 0 { libc::waitpid(pid, ptr::null_mut(), 0); }\n    }\n\n    libc::close(sv[0]); libc::close(sv[1]);\n    libc::munmap(shared_counter as *mut c_void, std::mem::size_of::<AtomicI32>());\n    SHARED_BUSY = ptr::null_mut();\n\n    close_daemon(&mut daemon);\n    0\n}\n\n// fd-passing helpers\nunsafe fn send_fd(sock: i32, fd: i32) -> i32 {\n    let mut buf = [0u8; 1];\n    let mut iov = libc::iovec { iov_base: buf.as_mut_ptr() as *mut c_void, iov_len: 1 };\n    let cmsg_space = libc::CMSG_SPACE(std::mem::size_of::<i32>() as u32) as usize;\n    let mut cmsg_buf = vec![0u8; cmsg_space];\n\n    let mut msg: libc::msghdr = std::mem::zeroed();\n    msg.msg_iov = &mut iov;\n    msg.msg_iovlen = 1;\n    msg.msg_control = cmsg_buf.as_mut_ptr() as *mut c_void;\n    msg.msg_controllen = cmsg_space as _;\n\n    let cmsg = libc::CMSG_FIRSTHDR(&msg);\n    (*cmsg).cmsg_level = libc::SOL_SOCKET;\n    (*cmsg).cmsg_type = libc::SCM_RIGHTS;\n    (*cmsg).cmsg_len = libc::CMSG_LEN(std::mem::size_of::<i32>() as u32) as _;\n    ptr::copy_nonoverlapping(&fd as *const i32 as *const u8, libc::CMSG_DATA(cmsg), std::mem::size_of::<i32>());\n\n    if libc::sendmsg(sock, &msg, 0) >= 0 { 0 } else { -1 }\n}\n\nunsafe fn recv_fd(sock: i32) -> i32 {\n    let mut buf = [0u8; 1];\n    let mut iov = libc::iovec { iov_base: buf.as_mut_ptr() as *mut c_void, iov_len: 1 };\n    let cmsg_space = libc::CMSG_SPACE(std::mem::size_of::<i32>() as u32) as usize;\n    let mut cmsg_buf = vec![0u8; cmsg_space];\n\n    let mut msg: libc::msghdr = std::mem::zeroed();\n    msg.msg_iov = &mut iov;\n    msg.msg_iovlen = 1;\n    msg.msg_control = cmsg_buf.as_mut_ptr() as *mut c_void;\n    msg.msg_controllen = cmsg_space as _;\n\n    let n = libc::recvmsg(sock, &mut msg, 0);\n    if n <= 0 { return -1; }\n\n    let cmsg = libc::CMSG_FIRSTHDR(&msg);\n    if cmsg.is_null() || (*cmsg).cmsg_level != libc::SOL_SOCKET || (*cmsg).cmsg_type != libc::SCM_RIGHTS {\n        return -1;\n    }\n\n    let mut fd: i32 = 0;\n    ptr::copy_nonoverlapping(libc::CMSG_DATA(cmsg), &mut fd as *mut i32 as *mut u8, std::mem::size_of::<i32>());\n    fd\n}\n\n// ── Entry point ──────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn pool_main(\n    argc: i32,\n    argv: *mut *mut c_char,\n    config: *mut PoolConfig,\n) -> i32 {\n    if argc != 4 {\n        libc::fprintf(libc::fdopen(2, b\"w\\0\".as_ptr() as *const c_char),\n            b\"Usage: %s <socket_path> <tmpdir> <shm_basename>\\n\\0\".as_ptr() as *const c_char,\n            if argc > 0 { *argv } else { b\"pool\\0\".as_ptr() as *const c_char });\n        return 1;\n    }\n\n    let cfg = &mut *config;\n    if cfg.initial_workers <= 0 { cfg.initial_workers = 1; }\n\n    SHUTTING_DOWN.store(false, Ordering::Relaxed);\n    BUSY_COUNT.store(0, Ordering::Relaxed);\n\n    // SIGTERM handler\n    let mut sa: libc::sigaction = std::mem::zeroed();\n    sa.sa_sigaction = pool_sigterm_handler as *const () as usize;\n    libc::sigemptyset(&mut sa.sa_mask);\n    libc::sigaction(libc::SIGTERM, &sa, ptr::null_mut());\n\n    let socket_path = *argv.add(1);\n    let tmpdir = *argv.add(2);\n    let shm_basename = *argv.add(3);\n\n    match cfg.concurrency {\n        PoolConcurrency::Threads => pool_main_threads(cfg, socket_path, tmpdir, shm_basename),\n        PoolConcurrency::Fork => pool_main_fork(cfg, socket_path, tmpdir, shm_basename),\n        PoolConcurrency::Single => pool_main_single(cfg, socket_path, tmpdir, shm_basename),\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/router_ffi.rs",
    "content": "//! C ABI wrappers for router subsystems.\n//! Replaces router.c. Routes requests to per-program daemons.\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::time::Instant;\n\nuse crate::daemon_ffi::{\n    DaemonConfig, DaemonResponse, MorlocSocket,\n};\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\nuse crate::http_ffi::{DaemonMethod, DaemonRequest, HttpMethod, HttpRequest};\n\n// -- Constants ----------------------------------------------------------------\n\n/// Max size of sun_path in sockaddr_un (108 on Linux)\nconst SUN_PATH_LEN: usize = 108;\n\n// Daemon startup polling (exponential backoff, ~5s total).\n// Sum of 100 * 1.25^i for i in 0..16 is ~4650ms.\nconst DAEMON_POLL_INITIAL_MS: f64 = 100.0;\nconst DAEMON_POLL_MULTIPLIER: f64 = 1.25;\nconst DAEMON_POLL_MAX_RETRIES: usize = 16;\n\n// -- Global state -------------------------------------------------------------\n\nstatic ROUTER_SHUTDOWN_REQUESTED: AtomicBool = AtomicBool::new(false);\n\nextern \"C\" fn router_signal_handler_fn(_sig: i32) {\n    ROUTER_SHUTDOWN_REQUESTED.store(true, Ordering::Relaxed);\n}\n\n// -- C-compatible types -------------------------------------------------------\n\n#[repr(C)]\npub struct RouterProgram {\n    pub name: *mut c_char,\n    pub manifest_path: *mut c_char,\n    pub manifest: *mut c_void, // manifest_t*\n    pub daemon_pid: libc::pid_t,\n    pub daemon_socket: [c_char; SUN_PATH_LEN],\n}\n\n#[repr(C)]\npub struct Router {\n    pub programs: *mut RouterProgram,\n    pub n_programs: usize,\n    pub fdb_path: *mut c_char,\n}\n\n// -- router_init --------------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn router_init(\n    fdb_path: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut Router {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn read_manifest(path: *const c_char, errmsg: *mut *mut c_char) -> *mut c_void;\n    }\n\n    let dir = libc::opendir(fdb_path);\n    if dir.is_null() {\n        let errno_msg = CStr::from_ptr(libc::strerror(crate::utility::errno_val()))\n            .to_string_lossy();\n        let path_str = CStr::from_ptr(fdb_path).to_string_lossy();\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\n                \"Cannot open fdb directory '{}': {}\",\n                path_str, errno_msg\n            )),\n        );\n        return ptr::null_mut();\n    }\n\n    let router = libc::calloc(1, std::mem::size_of::<Router>()) as *mut Router;\n    (*router).fdb_path = libc::strdup(fdb_path);\n\n    let mut cap: usize = 8;\n    (*router).programs =\n        libc::calloc(cap, std::mem::size_of::<RouterProgram>()) as *mut RouterProgram;\n    (*router).n_programs = 0;\n\n    loop {\n        let entry = libc::readdir(dir);\n        if entry.is_null() {\n            break;\n        }\n\n        let name = CStr::from_ptr((*entry).d_name.as_ptr());\n        let name_str = name.to_string_lossy();\n\n        if name_str.len() < 10 || !name_str.ends_with(\".manifest\") {\n            continue;\n        }\n\n        // Grow array if needed\n        if (*router).n_programs >= cap {\n            cap *= 2;\n            (*router).programs = libc::realloc(\n                (*router).programs as *mut c_void,\n                cap * std::mem::size_of::<RouterProgram>(),\n            ) as *mut RouterProgram;\n        }\n\n        let prog = &mut *(*router).programs.add((*router).n_programs);\n        ptr::write_bytes(prog as *mut RouterProgram, 0, 1);\n\n        // Extract program name (filename without .manifest)\n        let prog_name_len = name_str.len() - 9;\n        let prog_name = &name_str[..prog_name_len];\n        let c_prog_name = CString::new(prog_name).unwrap_or_default();\n        prog.name = libc::strdup(c_prog_name.as_ptr());\n\n        // Build full path\n        let fdb_str = CStr::from_ptr(fdb_path).to_string_lossy();\n        let full_path = format!(\"{}/{}\", fdb_str, name_str);\n        let c_path = CString::new(full_path).unwrap_or_default();\n        prog.manifest_path = libc::strdup(c_path.as_ptr());\n\n        // Read and parse manifest\n        let mut child_err: *mut c_char = ptr::null_mut();\n        prog.manifest = read_manifest(prog.manifest_path, &mut child_err);\n        if !child_err.is_null() {\n            let err_str = CStr::from_ptr(child_err).to_string_lossy();\n            let path_str = CStr::from_ptr(prog.manifest_path).to_string_lossy();\n            eprintln!(\"router: warning: failed to parse {}: {}\", path_str, err_str);\n            libc::free(child_err as *mut c_void);\n            libc::free(prog.name as *mut c_void);\n            libc::free(prog.manifest_path as *mut c_void);\n            continue;\n        }\n\n        prog.daemon_pid = 0;\n        // Set socket path\n        let socket_path = format!(\"/tmp/morloc-router-{}.sock\", prog_name);\n        let c_socket = CString::new(socket_path).unwrap_or_default();\n        let socket_bytes = c_socket.as_bytes_with_nul();\n        let copy_len = socket_bytes.len().min(SUN_PATH_LEN);\n        ptr::copy_nonoverlapping(\n            socket_bytes.as_ptr() as *const c_char,\n            prog.daemon_socket.as_mut_ptr(),\n            copy_len,\n        );\n\n        (*router).n_programs += 1;\n    }\n\n    libc::closedir(dir);\n\n    // Empty fdb is fine -- programs can be added while the router is running\n\n    router\n}\n\n// -- router_free --------------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn router_free(router: *mut Router) {\n    if router.is_null() {\n        return;\n    }\n\n    extern \"C\" {\n        fn free_manifest(manifest: *mut c_void);\n    }\n\n    for i in 0..(*router).n_programs {\n        let prog = &mut *(*router).programs.add(i);\n        libc::free(prog.name as *mut c_void);\n        libc::free(prog.manifest_path as *mut c_void);\n        if !prog.manifest.is_null() {\n            free_manifest(prog.manifest);\n        }\n        if prog.daemon_pid > 0 {\n            libc::kill(prog.daemon_pid, libc::SIGTERM);\n        }\n    }\n    libc::free((*router).programs as *mut c_void);\n    libc::free((*router).fdb_path as *mut c_void);\n    libc::free(router as *mut c_void);\n}\n\n// -- morloc-nexus path resolution ---------------------------------------------\n\n/// Locate the morloc-nexus executable.\n///\n/// Tries, in order:\n///   1. `$MORLOC_NEXUS` (explicit override)\n///   2. `$MORLOC_HOME/bin/morloc-nexus` (deploy convention)\n///   3. `morloc-nexus` on `$PATH`\n///   4. `$HOME/.local/bin/morloc-nexus` (bare-metal developer install)\n///\n/// Returns the path on the first candidate whose `access(_, X_OK)` succeeds,\n/// or the list of attempted paths on failure.\nunsafe fn find_morloc_nexus() -> Result<String, Vec<String>> {\n    fn is_executable(path: &str) -> bool {\n        if let Ok(c) = CString::new(path) {\n            unsafe { libc::access(c.as_ptr(), libc::X_OK) == 0 }\n        } else {\n            false\n        }\n    }\n\n    fn getenv_str(name: &str) -> Option<String> {\n        let c_name = CString::new(name).ok()?;\n        let p = unsafe { libc::getenv(c_name.as_ptr()) };\n        if p.is_null() {\n            None\n        } else {\n            Some(unsafe { CStr::from_ptr(p) }.to_string_lossy().into_owned())\n        }\n    }\n\n    let mut tried: Vec<String> = Vec::new();\n\n    // 1. $MORLOC_NEXUS\n    if let Some(p) = getenv_str(\"MORLOC_NEXUS\") {\n        if is_executable(&p) {\n            return Ok(p);\n        }\n        tried.push(format!(\"$MORLOC_NEXUS={}\", p));\n    }\n\n    // 2. $MORLOC_HOME/bin/morloc-nexus\n    if let Some(h) = getenv_str(\"MORLOC_HOME\") {\n        let p = format!(\"{}/bin/morloc-nexus\", h);\n        if is_executable(&p) {\n            return Ok(p);\n        }\n        tried.push(p);\n    }\n\n    // 3. Search $PATH\n    if let Some(path) = getenv_str(\"PATH\") {\n        for dir in path.split(':') {\n            if dir.is_empty() {\n                continue;\n            }\n            let p = format!(\"{}/morloc-nexus\", dir);\n            if is_executable(&p) {\n                return Ok(p);\n            }\n        }\n        tried.push(format!(\"$PATH ({})\", path));\n    }\n\n    // 4. $HOME/.local/bin/morloc-nexus\n    if let Some(h) = getenv_str(\"HOME\") {\n        let p = format!(\"{}/.local/bin/morloc-nexus\", h);\n        if is_executable(&p) {\n            return Ok(p);\n        }\n        tried.push(p);\n    }\n\n    Err(tried)\n}\n\n// -- router_start_program -----------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn router_start_program(\n    prog: *mut RouterProgram,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n\n    let nexus_path = match find_morloc_nexus() {\n        Ok(p) => p,\n        Err(tried) => {\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(format!(\n                    \"morloc-nexus binary not found; tried: {}\",\n                    tried.join(\", \")\n                )),\n            );\n            return false;\n        }\n    };\n    let c_nexus = CString::new(nexus_path.as_str()).unwrap_or_default();\n\n    let pid = libc::fork();\n    if pid == 0 {\n        // Child: exec morloc-nexus with --daemon\n        libc::setpgid(0, 0);\n        let arg_nexus = CString::new(\"morloc-nexus\").unwrap();\n        let arg_daemon = CString::new(\"--daemon\").unwrap();\n        let arg_socket = CString::new(\"--socket\").unwrap();\n        let socket_path = CStr::from_ptr((*prog).daemon_socket.as_ptr());\n        libc::execl(\n            c_nexus.as_ptr(),\n            arg_nexus.as_ptr(),\n            (*prog).manifest_path,\n            arg_daemon.as_ptr(),\n            arg_socket.as_ptr(),\n            socket_path.as_ptr(),\n            ptr::null::<c_char>(),\n        );\n        // If exec fails\n        let prog_name = CStr::from_ptr((*prog).name).to_string_lossy();\n        let errno_msg = CStr::from_ptr(libc::strerror(crate::utility::errno_val()))\n            .to_string_lossy();\n        eprintln!(\n            \"router: failed to exec morloc-nexus for {}: {}\",\n            prog_name, errno_msg\n        );\n        libc::_exit(1);\n    } else if pid > 0 {\n        (*prog).daemon_pid = pid;\n\n        // Poll until the daemon socket is connectable (exponential backoff)\n        let mut delay_ms = DAEMON_POLL_INITIAL_MS;\n        let mut connected = false;\n        for _attempt in 0..DAEMON_POLL_MAX_RETRIES {\n            let ts = libc::timespec {\n                tv_sec: 0,\n                tv_nsec: (delay_ms * 1_000_000.0) as i64,\n            };\n            libc::nanosleep(&ts, ptr::null_mut());\n\n            // Check if child died during startup\n            let mut status: i32 = 0;\n            let result = libc::waitpid(pid, &mut status, libc::WNOHANG);\n            if result == pid {\n                (*prog).daemon_pid = 0;\n                let prog_name = CStr::from_ptr((*prog).name).to_string_lossy();\n                set_errmsg(\n                    errmsg,\n                    &MorlocError::Other(format!(\n                        \"Daemon for '{}' exited during startup (status {})\",\n                        prog_name, status\n                    )),\n                );\n                return false;\n            }\n\n            // Try connecting to the daemon socket\n            let test_sock = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n            if test_sock >= 0 {\n                let mut addr: libc::sockaddr_un = std::mem::zeroed();\n                addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n                let socket_path = (*prog).daemon_socket.as_ptr();\n                let path_bytes = CStr::from_ptr(socket_path).to_bytes();\n                let copy_len = path_bytes.len().min(addr.sun_path.len() - 1);\n                ptr::copy_nonoverlapping(\n                    path_bytes.as_ptr() as *const c_char,\n                    addr.sun_path.as_mut_ptr(),\n                    copy_len,\n                );\n                let rc = libc::connect(\n                    test_sock,\n                    &addr as *const libc::sockaddr_un as *const libc::sockaddr,\n                    std::mem::size_of::<libc::sockaddr_un>() as libc::socklen_t,\n                );\n                libc::close(test_sock);\n                if rc == 0 {\n                    connected = true;\n                    break;\n                }\n            }\n\n            delay_ms *= DAEMON_POLL_MULTIPLIER;\n        }\n\n        if !connected {\n            // Final check: did the daemon die?\n            let mut status: i32 = 0;\n            let result = libc::waitpid(pid, &mut status, libc::WNOHANG);\n            if result == pid {\n                (*prog).daemon_pid = 0;\n                let prog_name = CStr::from_ptr((*prog).name).to_string_lossy();\n                set_errmsg(\n                    errmsg,\n                    &MorlocError::Other(format!(\n                        \"Daemon for '{}' exited during startup (status {})\",\n                        prog_name, status\n                    )),\n                );\n                return false;\n            }\n            // Daemon alive but socket not yet connectable -- proceed anyway,\n            // router_forward() will retry on connect failure.\n        }\n\n        true\n    } else {\n        let errno_msg = CStr::from_ptr(libc::strerror(crate::utility::errno_val()))\n            .to_string_lossy();\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\"fork failed: {}\", errno_msg)),\n        );\n        false\n    }\n}\n\n// -- router_forward -----------------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn router_forward(\n    router: *mut Router,\n    program: *const c_char,\n    request: *mut DaemonRequest,\n    errmsg: *mut *mut c_char,\n) -> *mut DaemonResponse {\n    clear_errmsg(errmsg);\n\n    extern \"C\" {\n        fn daemon_parse_response(\n            json: *const c_char,\n            len: usize,\n            errmsg: *mut *mut c_char,\n        ) -> *mut DaemonResponse;\n    }\n\n    // Find program\n    let program_name = CStr::from_ptr(program);\n    let mut prog: *mut RouterProgram = ptr::null_mut();\n    for i in 0..(*router).n_programs {\n        let p = (*router).programs.add(i);\n        if CStr::from_ptr((*p).name) == program_name {\n            prog = p;\n            break;\n        }\n    }\n\n    if prog.is_null() {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\n                \"Unknown program: {}\",\n                program_name.to_string_lossy()\n            )),\n        );\n        return ptr::null_mut();\n    }\n\n    // Check if a previously-started daemon has exited (crash recovery)\n    if (*prog).daemon_pid > 0 {\n        let mut status: i32 = 0;\n        let result = libc::waitpid((*prog).daemon_pid, &mut status, libc::WNOHANG);\n        if result == (*prog).daemon_pid || result < 0 {\n            let prog_name = CStr::from_ptr((*prog).name).to_string_lossy();\n            eprintln!(\n                \"router: daemon for '{}' exited (status {}), will restart\",\n                prog_name, status\n            );\n            (*prog).daemon_pid = 0;\n        }\n    }\n\n    // Start daemon if not running\n    if (*prog).daemon_pid <= 0 {\n        let mut child_err: *mut c_char = ptr::null_mut();\n        if !router_start_program(prog, &mut child_err) {\n            if !child_err.is_null() {\n                *errmsg = child_err;\n            } else {\n                set_errmsg(\n                    errmsg,\n                    &MorlocError::Other(\"Failed to start program daemon\".into()),\n                );\n            }\n            return ptr::null_mut();\n        }\n    }\n\n    // Serialize request to JSON\n    let req_json = serialize_request_to_json(request);\n    let c_req = CString::new(req_json.as_str()).unwrap_or_default();\n    let req_len = req_json.len();\n\n    // Try to connect, retry once on failure\n    let sock = connect_to_daemon(prog, errmsg);\n    let sock = if sock < 0 {\n        // Try restarting daemon\n        (*prog).daemon_pid = 0;\n        // Clear previous error\n        if !(*errmsg).is_null() {\n            libc::free(*errmsg as *mut c_void);\n            *errmsg = ptr::null_mut();\n        }\n        let mut child_err: *mut c_char = ptr::null_mut();\n        if !router_start_program(prog, &mut child_err) {\n            if !child_err.is_null() {\n                *errmsg = child_err;\n            }\n            return ptr::null_mut();\n        }\n        let sock2 = connect_to_daemon(prog, errmsg);\n        if sock2 < 0 {\n            return ptr::null_mut();\n        }\n        sock2\n    } else {\n        sock\n    };\n\n    // Send length-prefixed message\n    let len_buf: [u8; 4] = [\n        ((req_len >> 24) & 0xFF) as u8,\n        ((req_len >> 16) & 0xFF) as u8,\n        ((req_len >> 8) & 0xFF) as u8,\n        (req_len & 0xFF) as u8,\n    ];\n\n    let n = libc::send(\n        sock,\n        len_buf.as_ptr() as *const c_void,\n        4,\n        crate::utility::SEND_NOSIGNAL,\n    );\n    if n != 4 {\n        libc::close(sock);\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to send request length to daemon\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    let mut total_sent: usize = 0;\n    while total_sent < req_len {\n        let n = libc::send(\n            sock,\n            c_req.as_ptr().add(total_sent) as *const c_void,\n            req_len - total_sent,\n            crate::utility::SEND_NOSIGNAL,\n        );\n        if n <= 0 {\n            libc::close(sock);\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(\"Failed to send request body to daemon\".into()),\n            );\n            return ptr::null_mut();\n        }\n        total_sent += n as usize;\n    }\n\n    // Read response length\n    let mut resp_len_buf = [0u8; 4];\n    let n = libc::recv(\n        sock,\n        resp_len_buf.as_mut_ptr() as *mut c_void,\n        4,\n        libc::MSG_WAITALL,\n    );\n    if n != 4 {\n        libc::close(sock);\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to read response length from daemon\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    let resp_len = ((resp_len_buf[0] as u32) << 24)\n        | ((resp_len_buf[1] as u32) << 16)\n        | ((resp_len_buf[2] as u32) << 8)\n        | (resp_len_buf[3] as u32);\n\n    let resp_json = libc::malloc(resp_len as usize + 1) as *mut c_char;\n    if resp_json.is_null() {\n        libc::close(sock);\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to allocate response buffer\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    let mut total_recv: usize = 0;\n    while total_recv < resp_len as usize {\n        let n = libc::recv(\n            sock,\n            resp_json.add(total_recv) as *mut c_void,\n            resp_len as usize - total_recv,\n            0,\n        );\n        if n <= 0 {\n            libc::free(resp_json as *mut c_void);\n            libc::close(sock);\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(\"Failed to read response body from daemon\".into()),\n            );\n            return ptr::null_mut();\n        }\n        total_recv += n as usize;\n    }\n    *resp_json.add(resp_len as usize) = 0;\n    libc::close(sock);\n\n    let resp = daemon_parse_response(resp_json, resp_len as usize, errmsg);\n    libc::free(resp_json as *mut c_void);\n    resp\n}\n\n/// Helper: connect to a program daemon's unix socket with 60s timeouts.\nunsafe fn connect_to_daemon(\n    prog: *mut RouterProgram,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    let sock = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n    if sock < 0 {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to create socket\".into()),\n        );\n        return -1;\n    }\n    crate::utility::set_nosigpipe(sock);\n\n    let tv = libc::timeval {\n        tv_sec: 60,\n        tv_usec: 0,\n    };\n    libc::setsockopt(\n        sock,\n        libc::SOL_SOCKET,\n        libc::SO_RCVTIMEO,\n        &tv as *const libc::timeval as *const c_void,\n        std::mem::size_of::<libc::timeval>() as libc::socklen_t,\n    );\n    libc::setsockopt(\n        sock,\n        libc::SOL_SOCKET,\n        libc::SO_SNDTIMEO,\n        &tv as *const libc::timeval as *const c_void,\n        std::mem::size_of::<libc::timeval>() as libc::socklen_t,\n    );\n\n    let mut addr: libc::sockaddr_un = std::mem::zeroed();\n    addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n    let socket_path = (*prog).daemon_socket.as_ptr();\n    let path_bytes = CStr::from_ptr(socket_path).to_bytes();\n    let copy_len = path_bytes.len().min(addr.sun_path.len() - 1);\n    ptr::copy_nonoverlapping(\n        path_bytes.as_ptr() as *const c_char,\n        addr.sun_path.as_mut_ptr(),\n        copy_len,\n    );\n\n    if libc::connect(\n        sock,\n        &addr as *const libc::sockaddr_un as *const libc::sockaddr,\n        std::mem::size_of::<libc::sockaddr_un>() as libc::socklen_t,\n    ) < 0\n    {\n        libc::close(sock);\n        let prog_name = CStr::from_ptr((*prog).name).to_string_lossy();\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(format!(\n                \"Failed to connect to daemon for '{}'\",\n                prog_name\n            )),\n        );\n        return -1;\n    }\n\n    sock\n}\n\n/// Serialize a DaemonRequest to JSON using serde_json.\nunsafe fn serialize_request_to_json(request: *mut DaemonRequest) -> String {\n    let mut map = serde_json::Map::new();\n\n    if !(*request).id.is_null() {\n        let id = CStr::from_ptr((*request).id).to_string_lossy();\n        map.insert(\"id\".into(), serde_json::Value::String(id.into_owned()));\n    }\n\n    let method_str = match (*request).method {\n        DaemonMethod::Call => \"call\",\n        DaemonMethod::Discover => \"discover\",\n        DaemonMethod::Health => \"health\",\n        DaemonMethod::Eval => \"eval\",\n        DaemonMethod::Typecheck => \"typecheck\",\n        DaemonMethod::Bind => \"bind\",\n        DaemonMethod::Bindings => \"bindings\",\n        DaemonMethod::Unbind => \"unbind\",\n    };\n    map.insert(\n        \"method\".into(),\n        serde_json::Value::String(method_str.into()),\n    );\n\n    if !(*request).command.is_null() {\n        let cmd = CStr::from_ptr((*request).command).to_string_lossy();\n        map.insert(\n            \"command\".into(),\n            serde_json::Value::String(cmd.into_owned()),\n        );\n    }\n\n    if !(*request).args_json.is_null() {\n        let args_str = CStr::from_ptr((*request).args_json).to_string_lossy();\n        // Try to parse as JSON value to embed directly\n        if let Ok(v) = serde_json::from_str::<serde_json::Value>(&args_str) {\n            map.insert(\"args\".into(), v);\n        }\n    }\n\n    if !(*request).expr.is_null() {\n        let expr = CStr::from_ptr((*request).expr).to_string_lossy();\n        map.insert(\"expr\".into(), serde_json::Value::String(expr.into_owned()));\n    }\n\n    if !(*request).name.is_null() {\n        let name = CStr::from_ptr((*request).name).to_string_lossy();\n        map.insert(\"name\".into(), serde_json::Value::String(name.into_owned()));\n    }\n\n    serde_json::to_string(&map).unwrap_or_else(|_| \"{}\".into())\n}\n\n// -- router_build_discovery ---------------------------------------------------\n\n#[no_mangle]\npub unsafe extern \"C\" fn router_build_discovery(router: *mut Router) -> *mut c_char {\n    // Walk the canonical Manifest C struct from manifest_ffi.rs. No\n    // local mirror -- the in-memory layout is shared.\n    use crate::manifest_ffi::Manifest as ManifestC;\n\n    #[derive(serde::Serialize)]\n    struct CommandInfo {\n        name: String,\n        r#type: String,\n        return_type: String,\n    }\n\n    #[derive(serde::Serialize)]\n    struct ProgramInfo {\n        name: String,\n        running: bool,\n        #[serde(skip_serializing_if = \"Option::is_none\")]\n        commands: Option<Vec<CommandInfo>>,\n    }\n\n    #[derive(serde::Serialize)]\n    struct Discovery {\n        programs: Vec<ProgramInfo>,\n    }\n\n    let mut programs = Vec::with_capacity((*router).n_programs);\n\n    for i in 0..(*router).n_programs {\n        let prog = &*(*router).programs.add(i);\n        let name = CStr::from_ptr(prog.name).to_string_lossy().into_owned();\n        let running =\n            prog.daemon_pid > 0 && libc::kill(prog.daemon_pid, 0) == 0;\n\n        let commands = if !prog.manifest.is_null() {\n            let mv = prog.manifest as *const ManifestC;\n            let mut cmds = Vec::with_capacity((*mv).n_commands);\n            for c in 0..(*mv).n_commands {\n                let cmd = &*(*mv).commands.add(c);\n                let cmd_name = CStr::from_ptr(cmd.name).to_string_lossy().into_owned();\n                let cmd_type = if cmd.is_pure { \"pure\" } else { \"remote\" };\n                let ret_type = if !cmd.ret.type_desc.is_null() {\n                    CStr::from_ptr(cmd.ret.type_desc)\n                        .to_string_lossy()\n                        .into_owned()\n                } else {\n                    String::new()\n                };\n                cmds.push(CommandInfo {\n                    name: cmd_name,\n                    r#type: cmd_type.into(),\n                    return_type: ret_type,\n                });\n            }\n            Some(cmds)\n        } else {\n            None\n        };\n\n        programs.push(ProgramInfo {\n            name,\n            running,\n            commands,\n        });\n    }\n\n    let disco = Discovery { programs };\n    let json = serde_json::to_string(&disco).unwrap_or_else(|_| \"{}\".into());\n    let c = CString::new(json).unwrap_or_default();\n    libc::strdup(c.as_ptr())\n}\n\n// -- Router HTTP request routing ----------------------------------------------\n\n/// Route HTTP requests for the router. Sets *out_program to the target program\n/// name (caller-owned) for per-program requests, or NULL for router-level requests.\nunsafe fn router_http_to_request(\n    req: *mut HttpRequest,\n    out_program: *mut *mut c_char,\n    errmsg: *mut *mut c_char,\n) -> *mut DaemonRequest {\n    clear_errmsg(errmsg);\n\n    let dreq = libc::calloc(1, std::mem::size_of::<DaemonRequest>()) as *mut DaemonRequest;\n    if dreq.is_null() {\n        set_errmsg(\n            errmsg,\n            &MorlocError::Other(\"Failed to allocate daemon_request_t\".into()),\n        );\n        return ptr::null_mut();\n    }\n\n    *out_program = ptr::null_mut();\n\n    let path = CStr::from_ptr((*req).path.as_ptr())\n        .to_str()\n        .unwrap_or(\"\");\n    let method = (*req).method;\n\n    let body_str = if !(*req).body.is_null() && (*req).body_len > 0 {\n        std::str::from_utf8(std::slice::from_raw_parts(\n            (*req).body as *const u8,\n            (*req).body_len,\n        ))\n        .unwrap_or(\"\")\n    } else {\n        \"\"\n    };\n\n    // GET /health or GET /health/<program>\n    if method == HttpMethod::Get && (path == \"/health\" || path.starts_with(\"/health/\")) {\n        (*dreq).method = DaemonMethod::Health;\n        if path.starts_with(\"/health/\") {\n            let prog_name = &path[8..];\n            if !prog_name.is_empty() {\n                let c = CString::new(prog_name).unwrap_or_default();\n                *out_program = libc::strdup(c.as_ptr());\n            }\n        }\n        return dreq;\n    }\n\n    // GET /programs or GET /discover\n    if method == HttpMethod::Get && (path == \"/programs\" || path == \"/discover\") {\n        (*dreq).method = DaemonMethod::Discover;\n        return dreq;\n    }\n\n    // GET /discover/<program>\n    if method == HttpMethod::Get && path.starts_with(\"/discover/\") {\n        let prog_name = &path[10..];\n        if !prog_name.is_empty() {\n            let c = CString::new(prog_name).unwrap_or_default();\n            *out_program = libc::strdup(c.as_ptr());\n            (*dreq).method = DaemonMethod::Discover;\n            return dreq;\n        }\n    }\n\n    // POST /eval\n    if method == HttpMethod::Post && path == \"/eval\" {\n        (*dreq).method = DaemonMethod::Eval;\n        if !body_str.is_empty() {\n            if let Ok(v) = serde_json::from_str::<serde_json::Value>(body_str) {\n                if let Some(expr) = v.get(\"expr\").and_then(|e| e.as_str()) {\n                    let c = CString::new(expr).unwrap_or_default();\n                    (*dreq).expr = libc::strdup(c.as_ptr());\n                }\n            }\n        }\n        if (*dreq).expr.is_null() {\n            libc::free(dreq as *mut c_void);\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(\"Missing 'expr' field in /eval request body\".into()),\n            );\n            return ptr::null_mut();\n        }\n        return dreq;\n    }\n\n    // POST /call/<program>/<command>\n    if method == HttpMethod::Post && path.starts_with(\"/call/\") {\n        let rest = &path[6..];\n        let slash = rest.find('/');\n        match slash {\n            Some(pos) if pos + 1 < rest.len() => {\n                let prog_name = &rest[..pos];\n                let cmd_name = &rest[pos + 1..];\n                let c_prog = CString::new(prog_name).unwrap_or_default();\n                *out_program = libc::strdup(c_prog.as_ptr());\n                (*dreq).method = DaemonMethod::Call;\n                let c_cmd = CString::new(cmd_name).unwrap_or_default();\n                (*dreq).command = libc::strdup(c_cmd.as_ptr());\n\n                // Parse body for args\n                let trimmed = body_str.trim();\n                if trimmed.starts_with('[') {\n                    let c = CString::new(trimmed).unwrap_or_default();\n                    (*dreq).args_json = libc::strdup(c.as_ptr());\n                } else if trimmed.starts_with('{') {\n                    if let Ok(v) = serde_json::from_str::<serde_json::Value>(trimmed) {\n                        if let Some(args) = v.get(\"args\") {\n                            let args_str = serde_json::to_string(args).unwrap_or_default();\n                            let c = CString::new(args_str).unwrap_or_default();\n                            (*dreq).args_json = libc::strdup(c.as_ptr());\n                        }\n                    }\n                }\n                return dreq;\n            }\n            _ => {\n                libc::free(dreq as *mut c_void);\n                set_errmsg(\n                    errmsg,\n                    &MorlocError::Other(\"Expected /call/<program>/<command>\".into()),\n                );\n                return ptr::null_mut();\n            }\n        }\n    }\n\n    // OPTIONS (CORS)\n    if method == HttpMethod::Options {\n        (*dreq).method = DaemonMethod::Health;\n        return dreq;\n    }\n\n    libc::free(dreq as *mut c_void);\n    let method_str = match method {\n        HttpMethod::Get => \"GET\",\n        HttpMethod::Post => \"POST\",\n        HttpMethod::Delete => \"DELETE\",\n        HttpMethod::Options => \"OPTIONS\",\n    };\n    set_errmsg(\n        errmsg,\n        &MorlocError::Other(format!(\"Unknown router endpoint: {} {}\", method_str, path)),\n    );\n    ptr::null_mut()\n}\n\n// -- Router event loop --------------------------------------------------------\n\nconst ROUTER_MAX_LISTENERS: usize = 3;\n\n#[no_mangle]\npub unsafe extern \"C\" fn router_run(config: *mut DaemonConfig, router: *mut Router) {\n    extern \"C\" {\n        fn http_parse_request(fd: i32, errmsg: *mut *mut c_char) -> *mut HttpRequest;\n        fn http_free_request(req: *mut HttpRequest);\n        fn http_write_response(\n            fd: i32,\n            status: i32,\n            content_type: *const c_char,\n            body: *const c_char,\n            body_len: usize,\n        ) -> bool;\n        fn daemon_dispatch(\n            manifest: *mut c_void,\n            request: *mut DaemonRequest,\n            sockets: *mut MorlocSocket,\n            shm_basename: *const c_char,\n        ) -> *mut DaemonResponse;\n        fn daemon_serialize_response(\n            response: *mut DaemonResponse,\n            out_len: *mut usize,\n        ) -> *mut c_char;\n        fn daemon_free_request(req: *mut DaemonRequest);\n        fn daemon_free_response(resp: *mut DaemonResponse);\n        fn daemon_set_eval_timeout(timeout_sec: i32);\n        fn manifest_to_discovery_json(manifest: *const c_void) -> *mut c_char;\n    }\n\n    daemon_set_eval_timeout((*config).eval_timeout);\n\n    // Install signal handlers\n    ROUTER_SHUTDOWN_REQUESTED.store(false, Ordering::Relaxed);\n    let handler: libc::sighandler_t =\n        std::mem::transmute::<extern \"C\" fn(i32), libc::sighandler_t>(router_signal_handler_fn);\n    libc::signal(libc::SIGTERM, handler);\n    libc::signal(libc::SIGINT, handler);\n\n    let mut fds = [libc::pollfd {\n        fd: -1,\n        events: 0,\n        revents: 0,\n    }; ROUTER_MAX_LISTENERS];\n    let mut nfds: usize = 0;\n\n    let ct = b\"application/json\\0\";\n\n    // HTTP listener\n    if (*config).http_port > 0 {\n        let http_fd = libc::socket(libc::AF_INET, libc::SOCK_STREAM, 0);\n        if http_fd < 0 {\n            eprintln!(\"router: failed to create http socket\");\n            return;\n        }\n        let opt: i32 = 1;\n        libc::setsockopt(\n            http_fd,\n            libc::SOL_SOCKET,\n            libc::SO_REUSEADDR,\n            &opt as *const i32 as *const c_void,\n            std::mem::size_of::<i32>() as libc::socklen_t,\n        );\n        let mut addr: libc::sockaddr_in = std::mem::zeroed();\n        addr.sin_family = libc::AF_INET as libc::sa_family_t;\n        addr.sin_addr.s_addr = libc::INADDR_ANY;\n        addr.sin_port = ((*config).http_port as u16).to_be();\n        if libc::bind(\n            http_fd,\n            &addr as *const libc::sockaddr_in as *const libc::sockaddr,\n            std::mem::size_of::<libc::sockaddr_in>() as libc::socklen_t,\n        ) < 0\n        {\n            eprintln!(\n                \"router: failed to bind http port {}\",\n                (*config).http_port\n            );\n            libc::close(http_fd);\n            return;\n        }\n        libc::listen(http_fd, 16);\n        eprintln!(\"router: listening on http port {}\", (*config).http_port);\n        fds[nfds].fd = http_fd;\n        fds[nfds].events = libc::POLLIN as i16;\n        nfds += 1;\n    }\n\n    // Unix socket\n    if !(*config).unix_socket_path.is_null() {\n        let sock_fd = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n        if sock_fd < 0 {\n            eprintln!(\"router: failed to create unix socket\");\n            return;\n        }\n        let mut addr: libc::sockaddr_un = std::mem::zeroed();\n        addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n        let path_bytes = CStr::from_ptr((*config).unix_socket_path).to_bytes();\n        let copy_len = path_bytes.len().min(addr.sun_path.len() - 1);\n        ptr::copy_nonoverlapping(\n            path_bytes.as_ptr() as *const c_char,\n            addr.sun_path.as_mut_ptr(),\n            copy_len,\n        );\n        libc::unlink((*config).unix_socket_path);\n        if libc::bind(\n            sock_fd,\n            &addr as *const libc::sockaddr_un as *const libc::sockaddr,\n            std::mem::size_of::<libc::sockaddr_un>() as libc::socklen_t,\n        ) < 0\n        {\n            eprintln!(\"router: failed to bind unix socket\");\n            libc::close(sock_fd);\n            return;\n        }\n        libc::listen(sock_fd, 16);\n        eprintln!(\n            \"router: listening on unix socket {}\",\n            CStr::from_ptr((*config).unix_socket_path).to_string_lossy()\n        );\n        fds[nfds].fd = sock_fd;\n        fds[nfds].events = libc::POLLIN as i16;\n        nfds += 1;\n    }\n\n    if nfds == 0 {\n        eprintln!(\"router: no listeners configured\");\n        return;\n    }\n\n    // Eagerly start all program daemons so /health reports ok immediately\n    for i in 0..(*router).n_programs {\n        let prog = &mut *(*router).programs.add(i);\n        if (*prog).daemon_pid <= 0 {\n            let mut child_err: *mut c_char = ptr::null_mut();\n            if router_start_program(prog, &mut child_err) {\n                eprintln!(\n                    \"router: started daemon for '{}'\",\n                    CStr::from_ptr((*prog).name).to_string_lossy()\n                );\n            } else {\n                let err_msg = if !child_err.is_null() {\n                    let s = CStr::from_ptr(child_err).to_string_lossy().to_string();\n                    libc::free(child_err as *mut c_void);\n                    s\n                } else {\n                    \"unknown error\".to_string()\n                };\n                eprintln!(\n                    \"router: warning: failed to start daemon for '{}': {}\",\n                    CStr::from_ptr((*prog).name).to_string_lossy(),\n                    err_msg\n                );\n            }\n        }\n    }\n\n    while !ROUTER_SHUTDOWN_REQUESTED.load(Ordering::Relaxed) {\n        let ready = libc::poll(fds.as_mut_ptr(), nfds as libc::nfds_t, 1000);\n        if ready < 0 {\n            if crate::utility::errno_val() == libc::EINTR {\n                continue;\n            }\n            eprintln!(\"router: poll error\");\n            break;\n        }\n        if ready == 0 {\n            continue;\n        }\n\n        for i in 0..nfds {\n            if fds[i].revents & libc::POLLIN as i16 == 0 {\n                continue;\n            }\n\n            let client_fd = libc::accept(fds[i].fd, ptr::null_mut(), ptr::null_mut());\n            if client_fd < 0 {\n                continue;\n            }\n            let req_start = Instant::now();\n            crate::utility::set_nosigpipe(client_fd);\n\n            let tv = libc::timeval {\n                tv_sec: 30,\n                tv_usec: 0,\n            };\n            libc::setsockopt(\n                client_fd,\n                libc::SOL_SOCKET,\n                libc::SO_RCVTIMEO,\n                &tv as *const libc::timeval as *const c_void,\n                std::mem::size_of::<libc::timeval>() as libc::socklen_t,\n            );\n            libc::setsockopt(\n                client_fd,\n                libc::SOL_SOCKET,\n                libc::SO_SNDTIMEO,\n                &tv as *const libc::timeval as *const c_void,\n                std::mem::size_of::<libc::timeval>() as libc::socklen_t,\n            );\n\n            let mut err: *mut c_char = ptr::null_mut();\n\n            let http_req = http_parse_request(client_fd, &mut err);\n            if !err.is_null() {\n                let body = b\"{\\\"status\\\":\\\"error\\\",\\\"error\\\":\\\"Bad request\\\"}\\0\";\n                http_write_response(\n                    client_fd,\n                    400,\n                    ct.as_ptr() as *const c_char,\n                    body.as_ptr() as *const c_char,\n                    body.len() - 1,\n                );\n                libc::free(err as *mut c_void);\n                let elapsed = req_start.elapsed();\n                eprintln!(\"router: ??? ??? -> 400 ({:.1}ms)\", elapsed.as_secs_f64() * 1000.0);\n                libc::close(client_fd);\n                continue;\n            }\n\n            // Extract method and path for access logging before request is consumed\n            let log_method = match (*http_req).method {\n                HttpMethod::Get => \"GET\",\n                HttpMethod::Post => \"POST\",\n                HttpMethod::Delete => \"DELETE\",\n                HttpMethod::Options => \"OPTIONS\",\n            };\n            let log_path_cstr = CStr::from_ptr((*http_req).path.as_ptr());\n            let log_path = log_path_cstr.to_str().unwrap_or(\"???\").to_string();\n\n            let mut target_program: *mut c_char = ptr::null_mut();\n            let dreq = router_http_to_request(http_req, &mut target_program, &mut err);\n            http_free_request(http_req);\n\n            if !err.is_null() {\n                let err_json = make_error_json(&CStr::from_ptr(err).to_string_lossy());\n                let c = CString::new(err_json.as_str()).unwrap_or_default();\n                http_write_response(\n                    client_fd,\n                    404,\n                    ct.as_ptr() as *const c_char,\n                    c.as_ptr(),\n                    err_json.len(),\n                );\n                libc::free(err as *mut c_void);\n                let elapsed = req_start.elapsed();\n                eprintln!(\"router: {} {} -> 404 ({:.1}ms)\", log_method, log_path, elapsed.as_secs_f64() * 1000.0);\n                libc::close(client_fd);\n                continue;\n            }\n\n            // Track response status for access log\n            let mut resp_status: i32 = 200;\n\n            // Router-level requests\n            if target_program.is_null() {\n                if (*dreq).method == DaemonMethod::Health {\n                    // Aggregate per-program health\n                    let mut all_ok = true;\n                    let mut prog_entries = Vec::new();\n                    for i in 0..(*router).n_programs {\n                        let prog = &*(*router).programs.add(i);\n                        let name = CStr::from_ptr(prog.name).to_string_lossy();\n                        let alive =\n                            prog.daemon_pid > 0 && libc::kill(prog.daemon_pid, 0) == 0;\n                        if !alive {\n                            all_ok = false;\n                        }\n                        let status_str = if alive { \"ok\" } else { \"error\" };\n                        prog_entries.push(serde_json::json!({\n                            \"program\": name.as_ref(),\n                            \"status\": status_str,\n                        }));\n                    }\n                    let overall = if all_ok { \"ok\" } else { \"degraded\" };\n                    let body = serde_json::json!({\n                        \"status\": overall,\n                        \"programs\": prog_entries,\n                    }).to_string();\n                    let status_code = if all_ok { 200 } else { 503 };\n                    resp_status = status_code;\n                    let c = CString::new(body.as_str()).unwrap_or_default();\n                    http_write_response(\n                        client_fd,\n                        status_code,\n                        ct.as_ptr() as *const c_char,\n                        c.as_ptr(),\n                        body.len(),\n                    );\n                } else if (*dreq).method == DaemonMethod::Discover {\n                    let disco = router_build_discovery(router);\n                    let disco_len = libc::strlen(disco);\n                    http_write_response(\n                        client_fd,\n                        200,\n                        ct.as_ptr() as *const c_char,\n                        disco,\n                        disco_len,\n                    );\n                    libc::free(disco as *mut c_void);\n                } else if (*dreq).method == DaemonMethod::Eval {\n                    // daemon_dispatch takes manifest as first arg, NULL is fine for eval\n                    let resp = daemon_dispatch(ptr::null_mut(), dreq, ptr::null_mut(), ptr::null());\n                    let mut resp_len: usize = 0;\n                    let resp_json = daemon_serialize_response(resp, &mut resp_len);\n                    let status = if (*resp).success { 200 } else { 500 };\n                    resp_status = status;\n                    http_write_response(\n                        client_fd,\n                        status,\n                        ct.as_ptr() as *const c_char,\n                        resp_json,\n                        resp_len,\n                    );\n                    libc::free(resp_json as *mut c_void);\n                    daemon_free_response(resp);\n                }\n                daemon_free_request(dreq);\n                let elapsed = req_start.elapsed();\n                eprintln!(\"router: {} {} -> {} ({:.1}ms)\", log_method, log_path, resp_status, elapsed.as_secs_f64() * 1000.0);\n                libc::close(client_fd);\n                continue;\n            }\n\n            // Per-program request\n            if (*dreq).method == DaemonMethod::Health {\n                let mut found = false;\n                for p in 0..(*router).n_programs {\n                    let rprog = &*(*router).programs.add(p);\n                    if CStr::from_ptr(rprog.name) == CStr::from_ptr(target_program) {\n                        found = true;\n                        let alive =\n                            rprog.daemon_pid > 0 && libc::kill(rprog.daemon_pid, 0) == 0;\n                        let prog_str = CStr::from_ptr(rprog.name).to_string_lossy();\n                        let body = if alive {\n                            serde_json::json!({\n                                \"status\": \"ok\",\n                                \"program\": prog_str.as_ref(),\n                            }).to_string()\n                        } else {\n                            resp_status = 503;\n                            serde_json::json!({\n                                \"status\": \"error\",\n                                \"program\": prog_str.as_ref(),\n                                \"error\": \"daemon not running\",\n                            }).to_string()\n                        };\n                        let c = CString::new(body.as_str()).unwrap_or_default();\n                        http_write_response(\n                            client_fd,\n                            resp_status,\n                            ct.as_ptr() as *const c_char,\n                            c.as_ptr(),\n                            body.len(),\n                        );\n                        break;\n                    }\n                }\n                if !found {\n                    resp_status = 404;\n                    let body = b\"{\\\"status\\\":\\\"error\\\",\\\"error\\\":\\\"Unknown program\\\"}\\0\";\n                    http_write_response(\n                        client_fd,\n                        404,\n                        ct.as_ptr() as *const c_char,\n                        body.as_ptr() as *const c_char,\n                        body.len() - 1,\n                    );\n                }\n            } else if (*dreq).method == DaemonMethod::Discover {\n                let mut found = false;\n                for p in 0..(*router).n_programs {\n                    let rprog = &*(*router).programs.add(p);\n                    if CStr::from_ptr(rprog.name) == CStr::from_ptr(target_program) {\n                        if !rprog.manifest.is_null() {\n                            let disco = manifest_to_discovery_json(rprog.manifest);\n                            let disco_len = libc::strlen(disco);\n                            http_write_response(\n                                client_fd,\n                                200,\n                                ct.as_ptr() as *const c_char,\n                                disco,\n                                disco_len,\n                            );\n                            libc::free(disco as *mut c_void);\n                            found = true;\n                        }\n                        break;\n                    }\n                }\n                if !found {\n                    resp_status = 404;\n                    let body = b\"{\\\"status\\\":\\\"error\\\",\\\"error\\\":\\\"Unknown program\\\"}\\0\";\n                    http_write_response(\n                        client_fd,\n                        404,\n                        ct.as_ptr() as *const c_char,\n                        body.as_ptr() as *const c_char,\n                        body.len() - 1,\n                    );\n                }\n            } else {\n                // Forward to program daemon\n                let resp = router_forward(router, target_program, dreq, &mut err);\n                if !err.is_null() {\n                    resp_status = 500;\n                    let err_json =\n                        make_error_json(&CStr::from_ptr(err).to_string_lossy());\n                    let c = CString::new(err_json.as_str()).unwrap_or_default();\n                    http_write_response(\n                        client_fd,\n                        500,\n                        ct.as_ptr() as *const c_char,\n                        c.as_ptr(),\n                        err_json.len(),\n                    );\n                    libc::free(err as *mut c_void);\n                } else {\n                    let mut resp_len: usize = 0;\n                    let resp_json = daemon_serialize_response(resp, &mut resp_len);\n                    let status = if (*resp).success { 200 } else { 500 };\n                    resp_status = status;\n                    http_write_response(\n                        client_fd,\n                        status,\n                        ct.as_ptr() as *const c_char,\n                        resp_json,\n                        resp_len,\n                    );\n                    libc::free(resp_json as *mut c_void);\n                    daemon_free_response(resp);\n                }\n            }\n\n            libc::free(target_program as *mut c_void);\n            daemon_free_request(dreq);\n            let elapsed = req_start.elapsed();\n            eprintln!(\"router: {} {} -> {} ({:.1}ms)\", log_method, log_path, resp_status, elapsed.as_secs_f64() * 1000.0);\n            libc::close(client_fd);\n        }\n    }\n\n    // Kill all program daemons\n    for i in 0..(*router).n_programs {\n        let prog = &*(*router).programs.add(i);\n        if prog.daemon_pid > 0 {\n            libc::kill(prog.daemon_pid, libc::SIGTERM);\n            libc::unlink(prog.daemon_socket.as_ptr());\n        }\n    }\n\n    // Wait for children\n    for i in 0..(*router).n_programs {\n        let prog = &*(*router).programs.add(i);\n        if prog.daemon_pid > 0 {\n            libc::waitpid(prog.daemon_pid, ptr::null_mut(), 0);\n        }\n    }\n\n    // Close listeners\n    for i in 0..nfds {\n        libc::close(fds[i].fd);\n    }\n\n    if !(*config).unix_socket_path.is_null() {\n        libc::unlink((*config).unix_socket_path);\n    }\n}\n\n/// Build a JSON error response string.\nfn make_error_json(error: &str) -> String {\n    let map: serde_json::Map<String, serde_json::Value> = [\n        (\"status\".into(), serde_json::Value::String(\"error\".into())),\n        (\"error\".into(), serde_json::Value::String(error.into())),\n    ]\n    .into_iter()\n    .collect();\n    serde_json::to_string(&map).unwrap_or_else(|_| \"{}\".into())\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/schema.rs",
    "content": "use crate::error::MorlocError;\n\n/// Morloc serial type identifiers, matching the C enum morloc_serial_type.\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n#[repr(u32)]\npub enum SerialType {\n    Nil = 0,\n    Bool = 1,\n    Sint8 = 2,\n    Sint16 = 3,\n    Sint32 = 4,\n    Sint64 = 5,\n    Uint8 = 6,\n    Uint16 = 7,\n    Uint32 = 8,\n    Uint64 = 9,\n    Float32 = 10,\n    Float64 = 11,\n    Tensor = 12,\n    String = 13,\n    Array = 14,\n    Tuple = 15,\n    Map = 16,\n    Optional = 17,\n}\n\n/// Schema character codes for parsing schema strings.\nconst SCHEMA_NIL: u8 = b'z';\nconst SCHEMA_BOOL: u8 = b'b';\nconst SCHEMA_SINT: u8 = b'i';\nconst SCHEMA_UINT: u8 = b'u';\nconst SCHEMA_FLOAT: u8 = b'f';\nconst SCHEMA_STRING: u8 = b's';\nconst SCHEMA_ARRAY: u8 = b'a';\nconst SCHEMA_TENSOR: u8 = b'T';\nconst SCHEMA_TUPLE: u8 = b't';\nconst SCHEMA_MAP: u8 = b'm';\nconst SCHEMA_OPTIONAL: u8 = b'?';\n\n/// Recursive schema definition, mirroring the C Schema struct.\n#[derive(Debug, Clone)]\npub struct Schema {\n    pub serial_type: SerialType,\n    /// Number of parameters (e.g., array has 1, tuple has N).\n    pub size: usize,\n    /// Byte width when stored in a fixed-width array.\n    pub width: usize,\n    /// Field offsets for tuples/records, or ndim storage for tensors.\n    pub offsets: Vec<usize>,\n    /// Optional type hint string.\n    pub hint: Option<String>,\n    /// Child schemas (element type for arrays, field types for tuples, etc.).\n    pub parameters: Vec<Schema>,\n    /// Field names for records (None for non-record types).\n    pub keys: Vec<String>,\n}\n\nimpl Schema {\n    pub fn primitive(serial_type: SerialType) -> Self {\n        use crate::shm;\n        let width = match serial_type {\n            SerialType::Nil => 0,\n            SerialType::Bool | SerialType::Sint8 | SerialType::Uint8 => 1,\n            SerialType::Sint16 | SerialType::Uint16 => 2,\n            SerialType::Sint32 | SerialType::Uint32 | SerialType::Float32 => 4,\n            SerialType::Sint64 | SerialType::Uint64 | SerialType::Float64 => 8,\n            SerialType::String => std::mem::size_of::<shm::Array>(),\n            _ => 0,\n        };\n        Schema {\n            serial_type,\n            size: 0,\n            width,\n            offsets: Vec::new(),\n            hint: None,\n            parameters: Vec::new(),\n            keys: Vec::new(),\n        }\n    }\n\n    /// Returns true if this type has a fixed byte width (no variable-length data).\n    pub fn is_fixed_width(&self) -> bool {\n        match self.serial_type {\n            SerialType::Nil\n            | SerialType::Bool\n            | SerialType::Sint8\n            | SerialType::Sint16\n            | SerialType::Sint32\n            | SerialType::Sint64\n            | SerialType::Uint8\n            | SerialType::Uint16\n            | SerialType::Uint32\n            | SerialType::Uint64\n            | SerialType::Float32\n            | SerialType::Float64 => true,\n            SerialType::Tuple => self.parameters.iter().all(|p| p.is_fixed_width()),\n            SerialType::Optional => false,\n            _ => false,\n        }\n    }\n\n    /// Alignment requirement for this type.\n    pub fn alignment(&self) -> usize {\n        match self.serial_type {\n            SerialType::Nil => 1,\n            SerialType::Bool | SerialType::Sint8 | SerialType::Uint8 => 1,\n            SerialType::Sint16 | SerialType::Uint16 => 2,\n            SerialType::Sint32 | SerialType::Uint32 | SerialType::Float32 => 4,\n            SerialType::Sint64 | SerialType::Uint64 | SerialType::Float64 => 8,\n            SerialType::String | SerialType::Array | SerialType::Map | SerialType::Tensor => {\n                std::mem::size_of::<usize>() // pointer-sized alignment\n            }\n            SerialType::Tuple => {\n                self.parameters\n                    .iter()\n                    .map(|p| p.alignment())\n                    .max()\n                    .unwrap_or(1)\n            }\n            SerialType::Optional => {\n                if let Some(inner) = self.parameters.first() {\n                    std::cmp::max(1, inner.alignment())\n                } else {\n                    1\n                }\n            }\n        }\n    }\n}\n\n/// Parse a schema string into a Schema tree.\n///\n/// Positional format (no parentheses/commas):\n/// - `z` -> Nil, `b` -> Bool, `s` -> String\n/// - `i4` -> Sint32, `u8` -> Uint64, `f8` -> Float64\n/// - `ai4` -> Array of Sint32\n/// - `t2i4s` -> Tuple of (Sint32, String)\n/// - `m24namesi4` -> Map with keys \"name\"->String, \"i4\"  (base-62 field count, then key-len + key + value for each)\n/// - `?i4` -> Optional Sint32\n/// - `T2f8` -> 2D Tensor of Float64\n/// - `<hint>i4` -> Sint32 with hint annotation\npub fn parse_schema(input: &str) -> Result<Schema, MorlocError> {\n    let bytes = input.as_bytes();\n    let (schema, consumed) = parse_schema_r(bytes, 0)?;\n    if consumed != bytes.len() {\n        return Err(MorlocError::Schema(format!(\n            \"trailing characters after schema at position {consumed}\"\n        )));\n    }\n    Ok(schema)\n}\n\n/// Recursive schema parser matching the C `parse_schema_r` format exactly.\nfn parse_schema_r(bytes: &[u8], pos: usize) -> Result<(Schema, usize), MorlocError> {\n    if pos >= bytes.len() {\n        return Err(MorlocError::Schema(\"unexpected end of schema\".into()));\n    }\n\n    let c = bytes[pos];\n    let cur = pos + 1;\n\n    match c {\n        b'<' => {\n            // Hint: <...> with nesting support, then parse the actual type\n            let (hint, after_hint) = parse_hint(bytes, cur)?;\n            let (mut schema, end) = parse_schema_r(bytes, after_hint)?;\n            schema.hint = Some(hint);\n            Ok((schema, end))\n        }\n        SCHEMA_NIL => Ok((Schema::primitive(SerialType::Nil), cur)),\n        SCHEMA_BOOL => Ok((Schema::primitive(SerialType::Bool), cur)),\n        SCHEMA_STRING => {\n            // String schema has one parameter (uint8) for array compatibility,\n            // matching the C string_schema() constructor.\n            Ok((Schema {\n                serial_type: SerialType::String,\n                size: 1,\n                width: std::mem::size_of::<crate::shm::Array>(),\n                offsets: Vec::new(),\n                hint: None,\n                parameters: vec![Schema::primitive(SerialType::Uint8)],\n                keys: Vec::new(),\n            }, cur))\n        }\n        SCHEMA_SINT => parse_sized_int(bytes, cur, true),\n        SCHEMA_UINT => parse_sized_int(bytes, cur, false),\n        SCHEMA_FLOAT => parse_sized_float(bytes, cur),\n        SCHEMA_ARRAY => {\n            // Array: one child schema follows immediately\n            let (child, end) = parse_schema_r(bytes, cur)?;\n            Ok((make_array_schema(child), end))\n        }\n        SCHEMA_OPTIONAL => {\n            // Optional: one child schema follows immediately\n            let (child, end) = parse_schema_r(bytes, cur)?;\n            Ok((make_optional_schema(child), end))\n        }\n        SCHEMA_TUPLE => {\n            // Tuple: base-62 size char, then N child schemas\n            if cur >= bytes.len() {\n                return Err(MorlocError::Schema(\"expected tuple size\".into()));\n            }\n            let n = decode_base62(bytes[cur])?;\n            let mut params = Vec::with_capacity(n);\n            let mut p = cur + 1;\n            for _ in 0..n {\n                let (child, end) = parse_schema_r(bytes, p)?;\n                params.push(child);\n                p = end;\n            }\n            Ok((make_tuple_schema(params), p))\n        }\n        SCHEMA_MAP => {\n            // Map/record: base-62 size char, then N (key_len_char + key_bytes + value_schema)\n            if cur >= bytes.len() {\n                return Err(MorlocError::Schema(\"expected map size\".into()));\n            }\n            let n = decode_base62(bytes[cur])?;\n            let mut params = Vec::with_capacity(n);\n            let mut keys = Vec::with_capacity(n);\n            let mut p = cur + 1;\n            for _ in 0..n {\n                // Read key: base-62 length char + that many bytes\n                if p >= bytes.len() {\n                    return Err(MorlocError::Schema(\"expected map key length\".into()));\n                }\n                let key_len = decode_base62(bytes[p])?;\n                p += 1;\n                if p + key_len > bytes.len() {\n                    return Err(MorlocError::Schema(\"map key extends past end\".into()));\n                }\n                let key = std::str::from_utf8(&bytes[p..p + key_len])\n                    .map_err(|_| MorlocError::Schema(\"invalid UTF-8 in map key\".into()))?\n                    .to_string();\n                p += key_len;\n                keys.push(key);\n                // Read value schema\n                let (child, end) = parse_schema_r(bytes, p)?;\n                params.push(child);\n                p = end;\n            }\n            Ok((make_map_schema(params, keys), p))\n        }\n        SCHEMA_TENSOR => {\n            // Tensor: base-62 ndim char, then element schema\n            if cur >= bytes.len() {\n                return Err(MorlocError::Schema(\"expected tensor ndim\".into()));\n            }\n            let ndim = decode_base62(bytes[cur])?;\n            let (child, end) = parse_schema_r(bytes, cur + 1)?;\n            Ok((make_tensor_schema(ndim, child), end))\n        }\n        _ => Err(MorlocError::Schema(format!(\n            \"unknown schema character '{}' at position {pos}\",\n            c as char\n        ))),\n    }\n}\n\n/// Parse hint with nested angle bracket support: `<std::vector<$1>>` etc.\nfn parse_hint(bytes: &[u8], pos: usize) -> Result<(String, usize), MorlocError> {\n    let mut depth: usize = 1;\n    let start = pos;\n    let mut cur = pos;\n    while cur < bytes.len() {\n        match bytes[cur] {\n            b'<' => depth += 1,\n            b'>' => {\n                depth -= 1;\n                if depth == 0 {\n                    let hint = std::str::from_utf8(&bytes[start..cur])\n                        .unwrap_or(\"\")\n                        .to_string();\n                    return Ok((hint, cur + 1)); // skip closing '>'\n                }\n            }\n            _ => {}\n        }\n        cur += 1;\n    }\n    Err(MorlocError::Schema(\"unclosed '<' in hint\".into()))\n}\n\nfn parse_sized_int(\n    bytes: &[u8],\n    pos: usize,\n    signed: bool,\n) -> Result<(Schema, usize), MorlocError> {\n    if pos >= bytes.len() {\n        return Err(MorlocError::Schema(\"expected size after 'i'/'u'\".into()));\n    }\n    // Size is a SINGLE base-62 character, not a multi-digit number\n    let size = decode_base62(bytes[pos])?;\n    let next = pos + 1;\n    let st = match (signed, size) {\n        (true, 1) => SerialType::Sint8,\n        (true, 2) => SerialType::Sint16,\n        (true, 4) => SerialType::Sint32,\n        (true, 8) => SerialType::Sint64,\n        (false, 1) => SerialType::Uint8,\n        (false, 2) => SerialType::Uint16,\n        (false, 4) => SerialType::Uint32,\n        (false, 8) => SerialType::Uint64,\n        _ => return Err(MorlocError::Schema(format!(\"invalid integer size {size}\"))),\n    };\n    Ok((Schema::primitive(st), next))\n}\n\nfn parse_sized_float(bytes: &[u8], pos: usize) -> Result<(Schema, usize), MorlocError> {\n    if pos >= bytes.len() {\n        return Err(MorlocError::Schema(\"expected size after 'f'\".into()));\n    }\n    // Size is a SINGLE base-62 character, not a multi-digit number\n    let size = decode_base62(bytes[pos])?;\n    let next = pos + 1;\n    let st = match size {\n        4 => SerialType::Float32,\n        8 => SerialType::Float64,\n        _ => return Err(MorlocError::Schema(format!(\"invalid float size {size}\"))),\n    };\n    Ok((Schema::primitive(st), next))\n}\n\n/// Decode a single base-62 character to a number (0-63).\n/// 0-9 -> 0-9, a-z -> 10-35, A-Z -> 36-61, + -> 62, / -> 63\nfn decode_base62(c: u8) -> Result<usize, MorlocError> {\n    match c {\n        b'0'..=b'9' => Ok((c - b'0') as usize),\n        b'a'..=b'z' => Ok((c - b'a') as usize + 10),\n        b'A'..=b'Z' => Ok((c - b'A') as usize + 36),\n        b'+' => Ok(62),\n        b'/' => Ok(63),\n        _ => Err(MorlocError::Schema(format!(\n            \"invalid base-62 size character '{}'\",\n            c as char\n        ))),\n    }\n}\n\nfn encode_base62(n: usize) -> char {\n    match n {\n        0..=9 => (b'0' + n as u8) as char,\n        10..=35 => (b'a' + (n - 10) as u8) as char,\n        36..=61 => (b'A' + (n - 36) as u8) as char,\n        62 => '+',\n        63 => '/',\n        _ => '\\x07', // bell - error\n    }\n}\n\n// ── Schema constructors ────────────────────────────────────────────────────\n\nfn make_array_schema(child: Schema) -> Schema {\n    Schema {\n        serial_type: SerialType::Array,\n        size: 1,\n        width: std::mem::size_of::<crate::shm::Array>(),\n        offsets: Vec::new(),\n        hint: None,\n        parameters: vec![child],\n        keys: Vec::new(),\n    }\n}\n\nfn make_optional_schema(child: Schema) -> Schema {\n    let align = child.alignment().max(1);\n    let inner_offset = crate::shm::align_up(1, align);\n    Schema {\n        serial_type: SerialType::Optional,\n        size: 1,\n        width: inner_offset + child.width,\n        offsets: vec![inner_offset],\n        hint: None,\n        parameters: vec![child],\n        keys: Vec::new(),\n    }\n}\n\nfn make_tuple_schema(params: Vec<Schema>) -> Schema {\n    let (width, offsets) = calculate_tuple_layout(&params);\n    let size = params.len();\n    Schema {\n        serial_type: SerialType::Tuple,\n        size,\n        width,\n        offsets,\n        hint: None,\n        parameters: params,\n        keys: Vec::new(),\n    }\n}\n\nfn make_map_schema(params: Vec<Schema>, keys: Vec<String>) -> Schema {\n    let (width, offsets) = calculate_tuple_layout(&params);\n    let size = params.len();\n    Schema {\n        serial_type: SerialType::Map,\n        size,\n        width,\n        offsets,\n        hint: None,\n        parameters: params,\n        keys,\n    }\n}\n\nfn make_tensor_schema(ndim: usize, child: Schema) -> Schema {\n    Schema {\n        serial_type: SerialType::Tensor,\n        size: 1,\n        width: std::mem::size_of::<crate::shm::Tensor>(),\n        offsets: vec![ndim],\n        hint: None,\n        parameters: vec![child],\n        keys: Vec::new(),\n    }\n}\n\n/// Calculate byte offsets for tuple fields (C struct layout with natural alignment).\nfn calculate_tuple_layout(params: &[Schema]) -> (usize, Vec<usize>) {\n    let mut offsets = Vec::with_capacity(params.len());\n    let mut offset: usize = 0;\n    let mut max_align: usize = 1;\n\n    for param in params {\n        let align = param.alignment();\n        max_align = std::cmp::max(max_align, align);\n        // Align the offset\n        offset = (offset + align - 1) & !(align - 1);\n        offsets.push(offset);\n        offset += param.width;\n    }\n\n    // Total width padded to max alignment\n    let width = (offset + max_align - 1) & !(max_align - 1);\n    (width, offsets)\n}\n\n/// Render a schema back to its string representation.\npub fn schema_to_string(schema: &Schema) -> String {\n    let mut buf = String::new();\n    schema_to_string_inner(schema, &mut buf);\n    buf\n}\n\nfn schema_to_string_inner(schema: &Schema, buf: &mut String) {\n    // Write hint if present\n    if let Some(ref hint) = schema.hint {\n        buf.push('<');\n        buf.push_str(hint);\n        buf.push('>');\n    }\n\n    match schema.serial_type {\n        SerialType::Nil => buf.push('z'),\n        SerialType::Bool => buf.push('b'),\n        SerialType::Sint8 => buf.push_str(\"i1\"),\n        SerialType::Sint16 => buf.push_str(\"i2\"),\n        SerialType::Sint32 => buf.push_str(\"i4\"),\n        SerialType::Sint64 => buf.push_str(\"i8\"),\n        SerialType::Uint8 => buf.push_str(\"u1\"),\n        SerialType::Uint16 => buf.push_str(\"u2\"),\n        SerialType::Uint32 => buf.push_str(\"u4\"),\n        SerialType::Uint64 => buf.push_str(\"u8\"),\n        SerialType::Float32 => buf.push_str(\"f4\"),\n        SerialType::Float64 => buf.push_str(\"f8\"),\n        SerialType::String => buf.push('s'),\n        SerialType::Array => {\n            buf.push('a');\n            schema_to_string_inner(&schema.parameters[0], buf);\n        }\n        SerialType::Tuple => {\n            buf.push('t');\n            buf.push(encode_base62(schema.size));\n            for p in &schema.parameters {\n                schema_to_string_inner(p, buf);\n            }\n        }\n        SerialType::Map => {\n            buf.push('m');\n            buf.push(encode_base62(schema.size));\n            for (i, p) in schema.parameters.iter().enumerate() {\n                if i < schema.keys.len() {\n                    let key = &schema.keys[i];\n                    buf.push(encode_base62(key.len()));\n                    buf.push_str(key);\n                }\n                schema_to_string_inner(p, buf);\n            }\n        }\n        SerialType::Optional => {\n            buf.push('?');\n            schema_to_string_inner(&schema.parameters[0], buf);\n        }\n        SerialType::Tensor => {\n            let ndim = schema.offsets.first().copied().unwrap_or(0);\n            buf.push('T');\n            buf.push(encode_base62(ndim));\n            schema_to_string_inner(&schema.parameters[0], buf);\n        }\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_parse_primitives() {\n        assert_eq!(parse_schema(\"z\").unwrap().serial_type, SerialType::Nil);\n        assert_eq!(parse_schema(\"b\").unwrap().serial_type, SerialType::Bool);\n        assert_eq!(parse_schema(\"i4\").unwrap().serial_type, SerialType::Sint32);\n        assert_eq!(parse_schema(\"u8\").unwrap().serial_type, SerialType::Uint64);\n        assert_eq!(parse_schema(\"f8\").unwrap().serial_type, SerialType::Float64);\n        assert_eq!(parse_schema(\"s\").unwrap().serial_type, SerialType::String);\n    }\n\n    #[test]\n    fn test_parse_array() {\n        let s = parse_schema(\"ai4\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Array);\n        assert_eq!(s.parameters.len(), 1);\n        assert_eq!(s.parameters[0].serial_type, SerialType::Sint32);\n    }\n\n    #[test]\n    fn test_parse_tuple() {\n        let s = parse_schema(\"t3i4sf8\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Tuple);\n        assert_eq!(s.parameters.len(), 3);\n    }\n\n    #[test]\n    fn test_parse_nested() {\n        let s = parse_schema(\"at2i4s\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Array);\n        assert_eq!(s.parameters[0].serial_type, SerialType::Tuple);\n        assert_eq!(s.parameters[0].parameters.len(), 2);\n    }\n\n    #[test]\n    fn test_parse_map() {\n        let s = parse_schema(\"m21as1bi4\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Map);\n        assert_eq!(s.parameters.len(), 2);\n        assert_eq!(s.keys[0], \"a\");\n        assert_eq!(s.keys[1], \"b\");\n    }\n\n    #[test]\n    fn test_parse_optional() {\n        let s = parse_schema(\"?f8\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Optional);\n        assert_eq!(s.parameters[0].serial_type, SerialType::Float64);\n    }\n\n    #[test]\n    fn test_parse_tensor() {\n        let s = parse_schema(\"T2f8\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Tensor);\n        assert_eq!(s.offsets[0], 2); // ndim\n        assert_eq!(s.parameters[0].serial_type, SerialType::Float64);\n    }\n\n    #[test]\n    fn test_parse_with_hints() {\n        let s = parse_schema(\"<float>f8\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Float64);\n        assert_eq!(s.hint.as_deref(), Some(\"float\"));\n\n        // Nested hints: <std::vector<$1>>\n        let s = parse_schema(\"<std::vector<$1>>ai4\").unwrap();\n        assert_eq!(s.serial_type, SerialType::Array);\n        assert_eq!(s.hint.as_deref(), Some(\"std::vector<$1>\"));\n    }\n\n    #[test]\n    fn test_roundtrip() {\n        let cases = [\"z\", \"b\", \"i4\", \"u8\", \"f8\", \"s\", \"ai4\", \"t2i4s\", \"?i4\", \"T2f8\"];\n        for case in cases {\n            let schema = parse_schema(case).unwrap();\n            let rendered = schema_to_string(&schema);\n            assert_eq!(rendered, case, \"roundtrip failed for '{case}'\");\n        }\n    }\n\n    #[test]\n    fn test_roundtrip_map() {\n        let input = \"m24names4infoi4\";\n        let schema = parse_schema(input).unwrap();\n        assert_eq!(schema.serial_type, SerialType::Map);\n        assert_eq!(schema.keys, vec![\"name\", \"info\"]);\n        let rendered = schema_to_string(&schema);\n        assert_eq!(rendered, input);\n    }\n}\n\n#[cfg(test)]\nmod compat_tests {\n    use super::*;\n\n    fn dump(label: &str, s: &Schema, depth: usize) {\n        let indent = \"  \".repeat(depth);\n        print!(\"{}{}: type={} size={} width={}\", indent, label, s.serial_type as u32, s.size, s.width);\n        if !s.offsets.is_empty() {\n            print!(\" offsets={:?}\", s.offsets);\n        }\n        if let Some(ref h) = s.hint { print!(\" hint=\\\"{}\\\"\", h); }\n        if !s.keys.is_empty() { print!(\" keys={:?}\", s.keys); }\n        println!();\n        for (i, p) in s.parameters.iter().enumerate() {\n            dump(&format!(\"param[{}]\", i), p, depth + 1);\n        }\n    }\n\n    #[test]\n    fn test_schema_compat_with_c() {\n        // These must match the C output exactly\n        let cases = vec![\n            (\"s\", \"type=13 size=1 width=16\"),\n            (\"ai4\", \"type=14 size=1 width=16\"),\n            (\"t2i4s\", \"type=15 size=2 width=24\"),\n            (\"?i4\", \"type=17 size=1 width=8\"),\n            (\"?s\", \"type=17 size=1 width=24\"),\n            (\"T2f8\", \"type=12 size=1 width=32\"),\n        ];\n        for (input, expected_root) in &cases {\n            let s = parse_schema(input).unwrap();\n            let got = format!(\"type={} size={} width={}\", s.serial_type as u32, s.size, s.width);\n            assert_eq!(&got, *expected_root, \"Schema '{}' mismatch\", input);\n        }\n\n        // Verify tuple offsets\n        let t = parse_schema(\"t2i4s\").unwrap();\n        assert_eq!(t.offsets, vec![0, 8], \"t2i4s offsets\");\n\n        // Verify optional offsets\n        let o = parse_schema(\"?i4\").unwrap();\n        assert_eq!(o.offsets, vec![4], \"?i4 offsets\");\n        let os = parse_schema(\"?s\").unwrap();\n        assert_eq!(os.offsets, vec![8], \"?s offsets\");\n\n        // Verify string has uint8 parameter\n        let s = parse_schema(\"s\").unwrap();\n        assert_eq!(s.parameters.len(), 1);\n        assert_eq!(s.parameters[0].serial_type, SerialType::Uint8);\n        assert_eq!(s.parameters[0].width, 1);\n\n        // Verify tensor\n        let t = parse_schema(\"T2f8\").unwrap();\n        assert_eq!(t.offsets, vec![2]); // ndim\n        assert_eq!(t.width, 32); // sizeof(Tensor)\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/shm.rs",
    "content": "//! Shared memory management with multi-volume support.\n//!\n//! Replaces shm.c / memory.h. Uses AtomicU32 + futex for cross-process locking\n//! instead of pthread_rwlock_t, providing crash-safety and portability.\n\nuse crate::error::MorlocError;\nuse std::sync::atomic::{AtomicU32, Ordering};\nuse std::sync::Mutex;\n\n/// Cross-platform file pre-allocation.\n/// Linux: posix_fallocate (allocates disk blocks).\n/// macOS: ftruncate (extends file, may be sparse).\n#[cfg(target_os = \"linux\")]\nunsafe fn preallocate_fd(fd: i32, size: i64) -> i32 {\n    libc::posix_fallocate(fd, 0, size)\n}\n\n#[cfg(target_os = \"macos\")]\nunsafe fn preallocate_fd(fd: i32, size: i64) -> i32 {\n    if libc::ftruncate(fd, size) == -1 { -1 } else { 0 }\n}\n\n// ── Constants ──────────────────────────────────────────────────────────────\n\npub const SHM_MAGIC: u32 = 0xFECA_0DF0;\npub const BLK_MAGIC: u32 = 0x0CB1_0DF0;\npub const MAX_VOLUME_NUMBER: usize = 32;\npub const MAX_FILENAME_SIZE: usize = 128;\npub const MAX_PATH_SIZE: usize = 512;\n\nconst LOCK_UNLOCKED: u32 = 0;\nconst LOCK_LOCKED: u32 = 1;\nconst SPIN_LIMIT: u32 = 40;\n#[cfg(target_os = \"linux\")]\nconst LOCK_TIMEOUT_SECS: u64 = 5;\n\n// ── Pointer types ──────────────────────────────────────────────────────────\n\n/// Relative pointer: index into the multi-volume pool (cross-process safe).\npub type RelPtr = isize;\n/// Volume-local pointer: offset within a single volume.\npub type VolPtr = isize;\n/// Absolute pointer: virtual address in this process.\npub type AbsPtr = *mut u8;\n\npub const RELNULL: RelPtr = -1;\npub const VOLNULL: VolPtr = -1;\n\n// ── Block alignment ────────────────────────────────────────────────────────\n\npub const BLOCK_ALIGN: usize = std::mem::align_of::<BlockHeader>();\n\n#[inline]\npub const fn align_up(x: usize, align: usize) -> usize {\n    (x + align - 1) & !(align - 1)\n}\n\n// ── Shared memory header (lives in mmap'd region) ──────────────────────────\n\n#[repr(C)]\npub struct ShmHeader {\n    pub magic: u32,\n    pub volume_name: [u8; MAX_FILENAME_SIZE],\n    pub volume_index: i32,\n    pub volume_size: usize,\n    pub relative_offset: usize,\n    pub lock: AtomicU32,\n    pub cursor: VolPtr,\n}\n\n#[repr(C)]\npub struct BlockHeader {\n    pub magic: u32,\n    pub reference_count: AtomicU32,\n    pub size: usize,\n}\n\nconst _: () = assert!(\n    std::mem::size_of::<BlockHeader>()\n        == std::mem::size_of::<u32>()\n            + std::mem::size_of::<AtomicU32>()\n            + std::mem::size_of::<usize>()\n);\n\n// ── Voidstar data structures (used by serialization) ───────────────────────\n\n/// Variable-length array/string representation in SHM.\n#[derive(Clone, Copy)]\n#[repr(C)]\npub struct Array {\n    pub size: usize,\n    pub data: RelPtr,\n}\n\n/// N-dimensional dense tensor in SHM.\n#[repr(C)]\npub struct Tensor {\n    pub total_elements: usize,\n    pub device_type: u32,\n    pub device_id: u32,\n    pub data: RelPtr,\n    pub shape: RelPtr,\n}\n\n// ── Send wrapper for raw pointers ──────────────────────────────────────────\n\n#[derive(Clone, Copy)]\nstruct SendPtr(*mut ShmHeader);\n// SAFETY: ShmHeader lives in mmap'd shared memory that outlives all threads.\n// Access is serialized via VOLUMES Mutex and per-volume AtomicU32 futex locks.\nunsafe impl Send for SendPtr {}\nimpl SendPtr {\n    const fn null() -> Self { SendPtr(std::ptr::null_mut()) }\n    fn is_null(&self) -> bool { self.0.is_null() }\n    fn ptr(&self) -> *mut ShmHeader { self.0 }\n    fn set(&mut self, p: *mut ShmHeader) { self.0 = p; }\n}\n\nfn get_cstr_buf(buf: &[u8; MAX_FILENAME_SIZE]) -> &str {\n    get_cstr(buf.as_slice())\n}\n\n// ── Global state ───────────────────────────────────────────────────────────\n\nstatic CURRENT_VOLUME: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0);\n\nstatic VOLUMES: Mutex<[SendPtr; MAX_VOLUME_NUMBER]> =\n    Mutex::new([SendPtr::null(); MAX_VOLUME_NUMBER]);\n\nstatic ALLOC_MUTEX: Mutex<()> = Mutex::new(());\n\nstatic COMMON_BASENAME: Mutex<[u8; MAX_FILENAME_SIZE]> = Mutex::new([0u8; MAX_FILENAME_SIZE]);\n\nstatic FALLBACK_DIR: Mutex<[u8; MAX_FILENAME_SIZE]> = Mutex::new([0u8; MAX_FILENAME_SIZE]);\n\nfn set_cstr(buf: &mut [u8], s: &str) {\n    let bytes = s.as_bytes();\n    let len = bytes.len().min(buf.len() - 1);\n    buf[..len].copy_from_slice(&bytes[..len]);\n    buf[len] = 0;\n}\n\nfn get_cstr(buf: &[u8]) -> &str {\n    let end = buf.iter().position(|&b| b == 0).unwrap_or(buf.len());\n    std::str::from_utf8(&buf[..end]).unwrap_or(\"\")\n}\n\n// ── Public API ─────────────────────────────────────────────────────────────\n\n/// Set fallback directory for file-backed SHM when /dev/shm is too small.\npub fn shm_set_fallback_dir(dir: &str) {\n    let mut fb = FALLBACK_DIR.lock().unwrap();\n    set_cstr(&mut *fb, dir);\n}\n\n/// Initialize a new SHM volume.\npub fn shinit(\n    shm_basename: &str,\n    volume_index: usize,\n    shm_size: usize,\n) -> Result<*mut ShmHeader, MorlocError> {\n    let full_size = shm_size + std::mem::size_of::<ShmHeader>();\n    let shm_name = format!(\"{}_{}\", shm_basename, volume_index);\n\n    // Store common basename\n    {\n        let mut cb = COMMON_BASENAME.lock().unwrap();\n        set_cstr(&mut *cb, shm_basename);\n    }\n\n    // Try POSIX shared memory first, fall back to file-backed\n    let (fd, created, volume_label, actual_full_size) =\n        try_open_shm(&shm_name, full_size)?;\n\n    // SAFETY: mmap with MAP_SHARED on a valid fd obtained from shm_open/open above.\n    // The returned pointer is checked against MAP_FAILED before use.\n    let ptr = unsafe {\n        libc::mmap(\n            std::ptr::null_mut(),\n            actual_full_size,\n            libc::PROT_READ | libc::PROT_WRITE,\n            libc::MAP_SHARED,\n            fd,\n            0,\n        )\n    };\n    // SAFETY: fd is a valid file descriptor opened above.\n    unsafe { libc::close(fd) };\n\n    if ptr == libc::MAP_FAILED {\n        return Err(MorlocError::Shm(format!(\n            \"Failed to mmap volume '{}' ({} bytes)\",\n            volume_label, actual_full_size\n        )));\n    }\n\n    let shm = ptr as *mut ShmHeader;\n\n    // Store in volumes array\n    {\n        let mut vols = VOLUMES.lock().unwrap();\n        vols[volume_index].set(shm);\n    }\n\n    let actual_data_size = actual_full_size - std::mem::size_of::<ShmHeader>();\n\n    if created {\n        // SAFETY: shm points to the start of our mmap'd region of actual_full_size bytes.\n        // We just created it, so we have exclusive access for initialization.\n        unsafe {\n            (*shm).magic = SHM_MAGIC;\n            let mut name_buf = [0u8; MAX_FILENAME_SIZE];\n            set_cstr(&mut name_buf, &volume_label);\n            (*shm).volume_name = name_buf;\n            (*shm).volume_index = volume_index as i32;\n\n            // Calculate relative offset from prior volumes\n            let vols = VOLUMES.lock().unwrap();\n            let mut rel_offset = 0usize;\n            for i in 0..volume_index {\n                if !vols[i].is_null() {\n                    rel_offset += (*vols[i].ptr()).volume_size;\n                }\n            }\n            (*shm).relative_offset = rel_offset;\n            (*shm).volume_size = actual_data_size;\n            (*shm).lock = AtomicU32::new(LOCK_UNLOCKED);\n            (*shm).cursor = 0;\n\n            // Initialize first block header\n            let first_block =\n                (shm as *mut u8).add(std::mem::size_of::<ShmHeader>()) as *mut BlockHeader;\n            (*first_block).magic = BLK_MAGIC;\n            (*first_block).reference_count = AtomicU32::new(0);\n            (*first_block).size = actual_data_size - std::mem::size_of::<BlockHeader>();\n        }\n    }\n\n    Ok(shm)\n}\n\n/// Open an existing SHM volume (or return cached pointer).\npub fn shopen(volume_index: usize) -> Result<Option<*mut ShmHeader>, MorlocError> {\n    {\n        let vols = VOLUMES.lock().unwrap();\n        if !vols[volume_index].is_null() {\n            return Ok(Some(vols[volume_index].ptr()));\n        }\n    }\n\n    let basename = {\n        let cb = COMMON_BASENAME.lock().unwrap();\n        get_cstr_buf(&cb).to_string()\n    };\n    if basename.is_empty() {\n        return Ok(None);\n    }\n\n    let shm_name = format!(\"{}_{}\", basename, volume_index);\n\n    // Try POSIX SHM\n    let name_cstr = std::ffi::CString::new(shm_name.as_str()).unwrap();\n    // SAFETY: name_cstr is a valid null-terminated CString.\n    let fd = unsafe { libc::shm_open(name_cstr.as_ptr(), libc::O_RDWR, 0o666) };\n\n    let fd = if fd == -1 {\n        // Try file-backed fallback\n        let fb = FALLBACK_DIR.lock().unwrap();\n        let fallback = get_cstr_buf(&fb);\n        if fallback.is_empty() {\n            return Ok(None);\n        }\n        let file_path = format!(\"{}/{}\", fallback, shm_name);\n        let path_cstr = std::ffi::CString::new(file_path.as_str()).unwrap();\n        let fd2 = unsafe { libc::open(path_cstr.as_ptr(), libc::O_RDWR) };\n        if fd2 == -1 {\n            return Ok(None);\n        }\n        fd2\n    } else {\n        fd\n    };\n\n    // SAFETY: zeroed memory is valid for libc::stat. fstat/close on valid fd.\n    let mut sb: libc::stat = unsafe { std::mem::zeroed() };\n    if unsafe { libc::fstat(fd, &mut sb) } == -1 {\n        unsafe { libc::close(fd) };\n        return Err(MorlocError::Shm(format!(\n            \"Cannot fstat SHM volume '{}'\",\n            shm_name\n        )));\n    }\n    let volume_size = sb.st_size as usize;\n\n    // SAFETY: mmap with MAP_SHARED on a valid fd; result checked against MAP_FAILED.\n    let ptr = unsafe {\n        libc::mmap(\n            std::ptr::null_mut(),\n            volume_size,\n            libc::PROT_READ | libc::PROT_WRITE,\n            libc::MAP_SHARED,\n            fd,\n            0,\n        )\n    };\n    // SAFETY: fd is a valid file descriptor opened above.\n    unsafe { libc::close(fd) };\n\n    if ptr == libc::MAP_FAILED {\n        return Err(MorlocError::Shm(format!(\n            \"Cannot mmap SHM volume '{}'\",\n            shm_name\n        )));\n    }\n\n    let shm = ptr as *mut ShmHeader;\n    {\n        let mut vols = VOLUMES.lock().unwrap();\n        vols[volume_index].set(shm);\n    }\n\n    Ok(Some(shm))\n}\n\n/// Close and unlink all SHM volumes.\npub fn shclose() -> Result<(), MorlocError> {\n    let _lock = ALLOC_MUTEX.lock().unwrap();\n    let mut vols = VOLUMES.lock().unwrap();\n\n    for i in 0..MAX_VOLUME_NUMBER {\n        let shm = if !vols[i].is_null() {\n            vols[i].ptr()\n        } else {\n            continue;\n        };\n\n        // SAFETY: shm is a valid mmap'd pointer stored in VOLUMES.\n        // munmap/unlink on regions we own. Name read from valid ShmHeader.\n        unsafe {\n            let name = get_cstr(&(*shm).volume_name).to_string();\n            let full_size = (*shm).volume_size + std::mem::size_of::<ShmHeader>();\n            libc::munmap(shm as *mut libc::c_void, full_size);\n\n            // Unlink: file-backed volumes start with '/', POSIX SHM does not\n            if name.starts_with('/') {\n                let cstr = std::ffi::CString::new(name.as_str()).unwrap();\n                libc::unlink(cstr.as_ptr());\n            } else {\n                let cstr = std::ffi::CString::new(name.as_str()).unwrap();\n                libc::shm_unlink(cstr.as_ptr());\n            }\n        }\n        vols[i] = SendPtr::null();\n    }\n    Ok(())\n}\n\n/// Allocate `size` bytes from shared memory.\npub fn shmalloc(size: usize) -> Result<AbsPtr, MorlocError> {\n    // Allow 0-size: round up to minimum block alignment.\n    // Needed for nil type (width=0) in morloc_eval.\n    let size = if size == 0 { BLOCK_ALIGN } else { align_up(size, BLOCK_ALIGN) };\n    let _lock = ALLOC_MUTEX.lock().unwrap();\n    shmalloc_unlocked(size)\n}\n\n/// Copy data into a new SHM allocation.\npub fn shmemcpy(src: *const u8, size: usize) -> Result<AbsPtr, MorlocError> {\n    let dest = shmalloc(size)?;\n    // SAFETY: dest is a freshly allocated SHM block of `size` bytes.\n    // Caller guarantees src points to `size` readable bytes.\n    unsafe { std::ptr::copy_nonoverlapping(src, dest, size) };\n    Ok(dest)\n}\n\n/// Allocate and zero-fill.\npub fn shcalloc(nmemb: usize, size: usize) -> Result<AbsPtr, MorlocError> {\n    let total = nmemb * size;\n    let ptr = shmalloc(total)?;\n    // SAFETY: ptr is a freshly allocated SHM block of `total` bytes.\n    unsafe { std::ptr::write_bytes(ptr, 0, total) };\n    Ok(ptr)\n}\n\n/// Free a shared memory block (decrement reference count).\npub fn shfree(ptr: AbsPtr) -> Result<(), MorlocError> {\n    let _lock = ALLOC_MUTEX.lock().unwrap();\n    shfree_unlocked(ptr)\n}\n\n/// Increment reference count on a shared memory block.\npub fn shincref(ptr: AbsPtr) -> Result<(), MorlocError> {\n    if ptr.is_null() {\n        return Err(MorlocError::Shm(\"Cannot incref NULL pointer\".into()));\n    }\n    // SAFETY: ptr was returned by shmalloc, which places a BlockHeader immediately before\n    // the returned data pointer. Magic check below validates the header.\n    let blk = unsafe { &*(ptr.sub(std::mem::size_of::<BlockHeader>()) as *const BlockHeader) };\n    if blk.magic != BLK_MAGIC {\n        return Err(MorlocError::Shm(\"Corrupted memory - invalid magic\".into()));\n    }\n    blk.reference_count.fetch_add(1, Ordering::AcqRel);\n    Ok(())\n}\n\n/// Convert relative pointer to absolute pointer.\npub fn rel2abs(ptr: RelPtr) -> Result<AbsPtr, MorlocError> {\n    if ptr < 0 {\n        return Err(MorlocError::Shm(format!(\"Illegal relptr value {}\", ptr)));\n    }\n    let mut remaining = ptr as usize;\n\n    // First try with volumes already mapped\n    {\n        let vols = VOLUMES.lock().unwrap();\n        for i in 0..MAX_VOLUME_NUMBER {\n            if vols[i].is_null() {\n                break; // No more volumes mapped\n            }\n            let shm = vols[i].ptr();\n            // SAFETY: shm is a valid mmap'd ShmHeader pointer from VOLUMES.\n            let vol_size = unsafe { (*shm).volume_size };\n            if remaining < vol_size {\n                // SAFETY: data region starts after ShmHeader; remaining < vol_size\n                // guarantees the offset is within the mmap'd region.\n                let base = unsafe {\n                    (shm as *const u8).add(std::mem::size_of::<ShmHeader>())\n                };\n                return Ok(unsafe { base.add(remaining) as AbsPtr });\n            }\n            remaining -= vol_size;\n        }\n    }\n\n    // If not found, try opening unmapped volumes\n    remaining = ptr as usize;\n    for i in 0..MAX_VOLUME_NUMBER {\n        let shm = match shopen(i)? {\n            Some(s) => s,\n            None => {\n                return Err(MorlocError::Shm(format!(\n                    \"Failed to find volume for relptr {}\", ptr\n                )));\n            }\n        };\n        // SAFETY: shm is a valid mmap'd ShmHeader pointer from shopen.\n        let vol_size = unsafe { (*shm).volume_size };\n        if remaining < vol_size {\n            // SAFETY: same as above - offset within mmap'd region.\n            let base = unsafe {\n                (shm as *const u8).add(std::mem::size_of::<ShmHeader>())\n            };\n            return Ok(unsafe { base.add(remaining) as AbsPtr });\n        }\n        remaining -= vol_size;\n    }\n\n    Err(MorlocError::Shm(format!(\n        \"Shared memory pool does not contain index {}\", ptr\n    )))\n}\n\n/// Convert absolute pointer to relative pointer.\npub fn abs2rel(ptr: AbsPtr) -> Result<RelPtr, MorlocError> {\n    let vols = VOLUMES.lock().unwrap();\n    for i in 0..MAX_VOLUME_NUMBER {\n        let shm = vols[i].ptr();\n        if shm.is_null() {\n            continue;\n        }\n        // SAFETY: shm is a valid mmap'd ShmHeader from VOLUMES. We compute\n        // data region bounds and check ptr falls within before computing offset.\n        unsafe {\n            let data_start = (shm as *const u8).add(std::mem::size_of::<ShmHeader>());\n            let data_end = data_start.add((*shm).volume_size);\n            let p = ptr as *const u8;\n            if p >= data_start && p < data_end {\n                let offset = p.offset_from(data_start) as usize;\n                return Ok(((*shm).relative_offset + offset) as RelPtr);\n            }\n        }\n    }\n    Err(MorlocError::Shm(format!(\n        \"Failed to find absptr {:?} in shared memory\",\n        ptr\n    )))\n}\n\n/// Find the ShmHeader for a given absolute pointer.\npub fn abs2shm(ptr: AbsPtr) -> Result<*mut ShmHeader, MorlocError> {\n    let vols = VOLUMES.lock().unwrap();\n    for i in 0..MAX_VOLUME_NUMBER {\n        let shm = vols[i].ptr();\n        if shm.is_null() {\n            continue;\n        }\n        // SAFETY: shm is a valid mmap'd ShmHeader from VOLUMES.\n        unsafe {\n            let data_start = (shm as *const u8).add(std::mem::size_of::<ShmHeader>());\n            let data_end = data_start.add((*shm).volume_size);\n            let p = ptr as *const u8;\n            if p >= data_start && p < data_end {\n                return Ok(shm);\n            }\n        }\n    }\n    Err(MorlocError::Shm(\"Failed to find absptr in SHM\".into()))\n}\n\n/// Total size of all SHM volumes.\npub fn total_shm_size() -> usize {\n    let vols = VOLUMES.lock().unwrap();\n    let mut total = 0;\n    for i in 0..MAX_VOLUME_NUMBER {\n        if !vols[i].is_null() {\n            // SAFETY: non-null VOLUMES entries are valid mmap'd ShmHeader pointers.\n            total += unsafe { (*vols[i].ptr()).volume_size };\n        }\n    }\n    total\n}\n\n// ── Internal helpers ───────────────────────────────────────────────────────\n\nfn try_open_shm(\n    shm_name: &str,\n    full_size: usize,\n) -> Result<(i32, bool, String, usize), MorlocError> {\n    let name_cstr = std::ffi::CString::new(shm_name).unwrap();\n\n    // Try POSIX SHM\n    let fd = unsafe {\n        libc::shm_open(\n            name_cstr.as_ptr(),\n            libc::O_RDWR | libc::O_CREAT,\n            0o666,\n        )\n    };\n\n    if fd >= 0 {\n        let mut sb: libc::stat = unsafe { std::mem::zeroed() };\n        if unsafe { libc::fstat(fd, &mut sb) } == -1 {\n            unsafe { libc::close(fd) };\n            return Err(MorlocError::Shm(format!(\"fstat failed for '{}'\", shm_name)));\n        }\n        let created = sb.st_size == 0;\n        if created {\n            let err = unsafe { preallocate_fd(fd, full_size as i64) };\n            if err == 0 {\n                return Ok((fd, true, shm_name.to_string(), full_size));\n            }\n            // /dev/shm too small, clean up and try file-backed\n            unsafe {\n                libc::close(fd);\n                libc::shm_unlink(name_cstr.as_ptr());\n            }\n        } else {\n            return Ok((fd, false, shm_name.to_string(), sb.st_size as usize));\n        }\n    }\n\n    // Try file-backed fallback\n    let fb = FALLBACK_DIR.lock().unwrap();\n    let fallback = get_cstr_buf(&fb);\n    if fallback.is_empty() {\n        return Err(MorlocError::Shm(format!(\n            \"Failed to allocate SHM '{}': /dev/shm too small and no fallback directory\",\n            shm_name\n        )));\n    }\n    let file_path = format!(\"{}/{}\", fallback, shm_name);\n    drop(fb);\n\n    let path_cstr = std::ffi::CString::new(file_path.as_str()).unwrap();\n    let fd = unsafe { libc::open(path_cstr.as_ptr(), libc::O_RDWR | libc::O_CREAT, 0o666) };\n    if fd == -1 {\n        return Err(MorlocError::Shm(format!(\n            \"Failed to create file-backed volume '{}'\",\n            file_path\n        )));\n    }\n\n    let mut sb: libc::stat = unsafe { std::mem::zeroed() };\n    if unsafe { libc::fstat(fd, &mut sb) } == -1 {\n        unsafe { libc::close(fd) };\n        return Err(MorlocError::Shm(format!(\"fstat failed for '{}'\", file_path)));\n    }\n    let created = sb.st_size == 0;\n    let actual_size = if created {\n        let err = unsafe { preallocate_fd(fd, full_size as i64) };\n        if err != 0 {\n            unsafe {\n                libc::close(fd);\n                libc::unlink(path_cstr.as_ptr());\n            }\n            return Err(MorlocError::Shm(format!(\n                \"Failed to allocate file-backed volume '{}' ({} bytes)\",\n                file_path, full_size\n            )));\n        }\n        full_size\n    } else {\n        sb.st_size as usize\n    };\n\n    Ok((fd, created, file_path, actual_size))\n}\n\nfn shmalloc_unlocked(size: usize) -> Result<AbsPtr, MorlocError> {\n    let mut shm: *mut ShmHeader = std::ptr::null_mut();\n    let blk = find_free_block(size, &mut shm)?;\n\n    // Split and claim\n    let final_blk = split_block(shm, blk, size)?;\n    // SAFETY: final_blk is a valid BlockHeader in mmap'd SHM found by find_free_block.\n    // The data region starts immediately after the header.\n    unsafe {\n        (*final_blk).reference_count.store(1, Ordering::Release);\n        Ok((final_blk as *mut u8).add(std::mem::size_of::<BlockHeader>()))\n    }\n}\n\nfn shfree_unlocked(ptr: AbsPtr) -> Result<(), MorlocError> {\n    if ptr.is_null() {\n        return Err(MorlocError::Shm(\"Cannot free NULL pointer\".into()));\n    }\n    // SAFETY: ptr was returned by shmalloc, which places a BlockHeader\n    // immediately before the data. Magic check validates correctness.\n    let blk = unsafe {\n        &*(ptr.sub(std::mem::size_of::<BlockHeader>()) as *const BlockHeader)\n    };\n    if blk.magic != BLK_MAGIC {\n        return Err(MorlocError::Shm(\"Corrupted memory\".into()));\n    }\n    if blk.reference_count.load(Ordering::Acquire) == 0 {\n        return Err(MorlocError::Shm(\"Reference count already 0\".into()));\n    }\n    let prev = blk.reference_count.fetch_sub(1, Ordering::AcqRel);\n    if prev == 1 {\n        // SAFETY: ptr points to blk.size bytes of SHM data we own (refcount just hit 0).\n        unsafe {\n            std::ptr::write_bytes(ptr, 0, blk.size);\n        }\n    }\n    Ok(())\n}\n\nfn find_free_block(\n    size: usize,\n    shm_out: &mut *mut ShmHeader,\n) -> Result<*mut BlockHeader, MorlocError> {\n    let cv = CURRENT_VOLUME.load(Ordering::Relaxed);\n    let vols = VOLUMES.lock().unwrap();\n\n    // Try current volume first\n    let shm = vols[cv].ptr();\n    if !shm.is_null() {\n        if let Some(blk) = find_free_block_in_volume(shm, size)? {\n            *shm_out = shm;\n            return Ok(blk);\n        }\n    }\n\n    // Search all volumes\n    for i in 0..MAX_VOLUME_NUMBER {\n        let shm = vols[i].ptr();\n        if shm.is_null() {\n            // Create a new volume\n            drop(vols);\n            let new_size = std::cmp::max(size * 2, 0xffff);\n            let basename = {\n                let cb = COMMON_BASENAME.lock().unwrap();\n                get_cstr_buf(&cb).to_string()\n            };\n            let new_shm = shinit(&basename, i, new_size)?;\n            CURRENT_VOLUME.store(i, Ordering::Relaxed);\n            *shm_out = new_shm;\n            let blk = unsafe {\n                (new_shm as *mut u8).add(std::mem::size_of::<ShmHeader>()) as *mut BlockHeader\n            };\n            return Ok(blk);\n        }\n\n        if let Some(blk) = find_free_block_in_volume(shm, size)? {\n            CURRENT_VOLUME.store(i, Ordering::Relaxed);\n            *shm_out = shm;\n            return Ok(blk);\n        }\n    }\n\n    Err(MorlocError::Shm(format!(\n        \"Could not find suitable block for {} bytes\",\n        size\n    )))\n}\n\nfn find_free_block_in_volume(\n    shm: *mut ShmHeader,\n    size: usize,\n) -> Result<Option<*mut BlockHeader>, MorlocError> {\n    unsafe {\n        let shm_end = (shm as *const u8)\n            .add(std::mem::size_of::<ShmHeader>())\n            .add((*shm).volume_size);\n\n        shm_lock(&(*shm).lock)?;\n\n        // Try cursor position first\n        let cursor = (*shm).cursor;\n        if cursor != VOLNULL {\n            let blk = vol2abs_raw(cursor, shm);\n            let blk = blk as *mut BlockHeader;\n            if (*blk).magic == BLK_MAGIC\n                && (*blk).reference_count.load(Ordering::Relaxed) == 0\n                && (*blk).size >= size\n            {\n                shm_unlock(&(*shm).lock);\n                return Ok(Some(blk));\n            }\n        }\n\n        // Scan from cursor forward\n        let start_blk = if cursor != VOLNULL {\n            vol2abs_raw(cursor, shm) as *mut BlockHeader\n        } else {\n            vol2abs_raw(0, shm) as *mut BlockHeader\n        };\n\n        if let Some(blk) = scan_volume(start_blk, size, shm_end as *const u8) {\n            shm_unlock(&(*shm).lock);\n            return Ok(Some(blk));\n        }\n\n        // Wrap around: scan from beginning to cursor\n        if cursor > 0 {\n            let first_blk = vol2abs_raw(0, shm) as *mut BlockHeader;\n            let cursor_end = vol2abs_raw(cursor, shm);\n            if let Some(blk) = scan_volume(first_blk, size, cursor_end as *const u8) {\n                shm_unlock(&(*shm).lock);\n                return Ok(Some(blk));\n            }\n        }\n\n        shm_unlock(&(*shm).lock);\n        Ok(None)\n    }\n}\n\n/// Scan a volume region for a free block of at least `size` bytes, merging adjacent free blocks.\n///\n/// # Safety\n/// `blk` must point to a valid BlockHeader within an mmap'd SHM volume.\n/// `end` must point to the byte past the end of the volume's data region.\nunsafe fn scan_volume(\n    mut blk: *mut BlockHeader,\n    size: usize,\n    end: *const u8,\n) -> Option<*mut BlockHeader> {\n    let hdr_size = std::mem::size_of::<BlockHeader>();\n    while (blk as *const u8).add(hdr_size + size) <= end {\n        if blk.is_null() || (*blk).magic != BLK_MAGIC {\n            return None;\n        }\n\n        // Merge adjacent free blocks\n        while (*blk).reference_count.load(Ordering::Relaxed) == 0 {\n            let next = (blk as *mut u8).add(hdr_size + (*blk).size) as *mut BlockHeader;\n            if (next as *const u8) >= end\n                || (*next).magic != BLK_MAGIC\n                || (*next).reference_count.load(Ordering::Relaxed) != 0\n            {\n                break;\n            }\n            (*blk).size += hdr_size + (*next).size;\n        }\n\n        if (*blk).reference_count.load(Ordering::Relaxed) == 0 && (*blk).size >= size {\n            return Some(blk);\n        }\n\n        blk = (blk as *mut u8).add(hdr_size + (*blk).size) as *mut BlockHeader;\n    }\n    None\n}\n\nfn split_block(\n    shm: *mut ShmHeader,\n    blk: *mut BlockHeader,\n    size: usize,\n) -> Result<*mut BlockHeader, MorlocError> {\n    unsafe {\n        if (*blk).size == size {\n            return Ok(blk);\n        }\n\n        shm_lock(&(*shm).lock)?;\n\n        let remaining = (*blk).size - size;\n        (*blk).size = size;\n\n        let hdr_size = std::mem::size_of::<BlockHeader>();\n        let new_free = (blk as *mut u8).add(hdr_size + size) as *mut BlockHeader;\n\n        if remaining > hdr_size {\n            (*new_free).magic = BLK_MAGIC;\n            (*new_free).reference_count = AtomicU32::new(0);\n            (*new_free).size = remaining - hdr_size;\n\n            // Update cursor\n            let data_start = (shm as *const u8).add(std::mem::size_of::<ShmHeader>());\n            (*shm).cursor = (new_free as *const u8).offset_from(data_start) as VolPtr;\n        } else {\n            (*blk).size += remaining;\n            (*shm).cursor = VOLNULL;\n        }\n\n        shm_unlock(&(*shm).lock);\n        Ok(blk)\n    }\n}\n\n/// Convert a volume-local offset to an absolute pointer.\n///\n/// # Safety\n/// `shm` must be a valid mmap'd ShmHeader. `ptr` must be within the volume's data region.\n#[inline]\nunsafe fn vol2abs_raw(ptr: VolPtr, shm: *const ShmHeader) -> *mut u8 {\n    (shm as *const u8)\n        .add(std::mem::size_of::<ShmHeader>())\n        .add(ptr as usize) as *mut u8\n}\n\n// ── Futex-based lock ───────────────────────────────────────────────────────\n\n/// Acquire a futex-based cross-process lock on shared memory.\n///\n/// # Safety\n/// `lock` must point to an AtomicU32 in mmap'd shared memory that\n/// persists for the duration of the lock. The caller must call shm_unlock\n/// on the same lock when done.\npub unsafe fn shm_lock(lock: &AtomicU32) -> Result<(), MorlocError> {\n    if lock\n        .compare_exchange_weak(LOCK_UNLOCKED, LOCK_LOCKED, Ordering::Acquire, Ordering::Relaxed)\n        .is_ok()\n    {\n        return Ok(());\n    }\n\n    for _ in 0..SPIN_LIMIT {\n        std::hint::spin_loop();\n        if lock\n            .compare_exchange_weak(LOCK_UNLOCKED, LOCK_LOCKED, Ordering::Acquire, Ordering::Relaxed)\n            .is_ok()\n        {\n            return Ok(());\n        }\n    }\n\n    shm_lock_slow(lock)\n}\n\n#[cfg(target_os = \"linux\")]\nunsafe fn shm_lock_slow(lock: &AtomicU32) -> Result<(), MorlocError> {\n    let timeout = libc::timespec {\n        tv_sec: LOCK_TIMEOUT_SECS as i64,\n        tv_nsec: 0,\n    };\n\n    loop {\n        let ptr = lock as *const AtomicU32 as *const u32;\n        libc::syscall(\n            libc::SYS_futex, ptr, libc::FUTEX_WAIT, LOCK_LOCKED,\n            &timeout as *const libc::timespec, std::ptr::null::<u32>(), 0u32,\n        );\n\n        if lock\n            .compare_exchange_weak(LOCK_UNLOCKED, LOCK_LOCKED, Ordering::Acquire, Ordering::Relaxed)\n            .is_ok()\n        {\n            return Ok(());\n        }\n\n        if lock.load(Ordering::Relaxed) == LOCK_LOCKED {\n            if lock\n                .compare_exchange(LOCK_LOCKED, LOCK_UNLOCKED, Ordering::AcqRel, Ordering::Relaxed)\n                .is_ok()\n            {\n                if lock\n                    .compare_exchange(LOCK_UNLOCKED, LOCK_LOCKED, Ordering::Acquire, Ordering::Relaxed)\n                    .is_ok()\n                {\n                    return Ok(());\n                }\n            }\n        }\n    }\n}\n\n/// macOS fallback: spin-yield loop (no futex available).\n#[cfg(target_os = \"macos\")]\nunsafe fn shm_lock_slow(lock: &AtomicU32) -> Result<(), MorlocError> {\n    loop {\n        std::thread::yield_now();\n        if lock\n            .compare_exchange_weak(LOCK_UNLOCKED, LOCK_LOCKED, Ordering::Acquire, Ordering::Relaxed)\n            .is_ok()\n        {\n            return Ok(());\n        }\n    }\n}\n\n/// Release a futex-based cross-process lock on shared memory.\n///\n/// # Safety\n/// `lock` must be the same AtomicU32 previously acquired via shm_lock.\npub unsafe fn shm_unlock(lock: &AtomicU32) {\n    lock.store(LOCK_UNLOCKED, Ordering::Release);\n    #[cfg(target_os = \"linux\")]\n    {\n        let ptr = lock as *const AtomicU32 as *const u32;\n        libc::syscall(\n            libc::SYS_futex, ptr, libc::FUTEX_WAKE, 1,\n            std::ptr::null::<libc::timespec>(), std::ptr::null::<u32>(), 0u32,\n        );\n    }\n    // macOS: no futex wake needed; spin-yield waiters will see the store.\n}\n\n// ── Pointer conversion helpers ─────────────────────────────────────────────\n\n#[inline]\npub fn vol2rel(ptr: VolPtr, shm: &ShmHeader) -> RelPtr {\n    shm.relative_offset as RelPtr + ptr\n}\n\n/// # Safety\n/// `shm` must be a valid mmap'd ShmHeader. `ptr` must be within the volume's data region.\n#[inline]\npub unsafe fn vol2abs(ptr: VolPtr, shm: *const ShmHeader) -> AbsPtr {\n    vol2abs_raw(ptr, shm)\n}\n\n// ── Tests ──────────────────────────────────────────────────────────────────\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn test_block_header_no_padding() {\n        assert_eq!(\n            std::mem::size_of::<BlockHeader>(),\n            4 + 4 + std::mem::size_of::<usize>()\n        );\n    }\n\n    #[test]\n    fn test_align_up() {\n        assert_eq!(align_up(0, 8), 0);\n        assert_eq!(align_up(1, 8), 8);\n        assert_eq!(align_up(7, 8), 8);\n        assert_eq!(align_up(8, 8), 8);\n        assert_eq!(align_up(9, 8), 16);\n    }\n\n    #[test]\n    fn test_pointer_constants() {\n        assert_eq!(RELNULL, -1);\n        assert_eq!(VOLNULL, -1);\n    }\n\n    #[test]\n    fn test_lock_unlock() {\n        let lock = AtomicU32::new(LOCK_UNLOCKED);\n        unsafe {\n            shm_lock(&lock).unwrap();\n            assert_eq!(lock.load(Ordering::Relaxed), LOCK_LOCKED);\n            shm_unlock(&lock);\n            assert_eq!(lock.load(Ordering::Relaxed), LOCK_UNLOCKED);\n        }\n    }\n\n    #[test]\n    fn test_array_struct_size() {\n        assert_eq!(\n            std::mem::size_of::<Array>(),\n            std::mem::size_of::<usize>() + std::mem::size_of::<RelPtr>()\n        );\n    }\n\n    #[test]\n    fn test_shinit_and_shmalloc() {\n        // Use file-backed SHM via tmpdir to avoid /dev/shm permission issues in test\n        let tmpdir = std::env::temp_dir();\n        let test_dir = tmpdir.join(format!(\"morloc_test_{}\", std::process::id()));\n        std::fs::create_dir_all(&test_dir).unwrap();\n        shm_set_fallback_dir(test_dir.to_str().unwrap());\n\n        let basename = format!(\"test_shm_{}\", std::process::id());\n        let shm = shinit(&basename, 0, 4096).unwrap();\n        assert!(!shm.is_null());\n        assert_eq!(unsafe { (*shm).magic }, SHM_MAGIC);\n\n        // Allocate some memory\n        let ptr1 = shmalloc(64).unwrap();\n        assert!(!ptr1.is_null());\n\n        // Write and read back\n        unsafe {\n            std::ptr::write_bytes(ptr1, 0xAB, 64);\n            assert_eq!(*ptr1, 0xAB);\n        }\n\n        // Convert to relptr and back\n        let rel = abs2rel(ptr1).unwrap();\n        assert!(rel >= 0);\n        let abs = rel2abs(rel).unwrap();\n        assert_eq!(abs, ptr1);\n\n        // Free\n        shfree(ptr1).unwrap();\n\n        // Cleanup\n        shclose().unwrap();\n        let _ = std::fs::remove_dir_all(&test_dir);\n    }\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/slurm_ffi.rs",
    "content": "//! C ABI wrappers for SLURM job submission.\n//! Replaces slurm.c.\n\nuse std::ffi::{c_char, c_void, CStr, CString};\nuse std::ptr;\n\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\nconst MAX_SLURM_COMMAND_LENGTH: usize = 1024;\nconst DEFAULT_XXHASH_SEED: u64 = 0;\n\n// ── C-compatible types ───────────────────────────────────────────────────────\n\n#[repr(C)]\npub struct Resources {\n    pub memory: i32,  // in Gb\n    pub time: i32,    // walltime in seconds\n    pub cpus: i32,\n    pub gpus: i32,\n}\n\n// ── parse_slurm_time ─────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn parse_slurm_time(\n    time_str: *const c_char,\n    errmsg: *mut *mut c_char,\n) -> usize {\n    clear_errmsg(errmsg);\n    let s = CStr::from_ptr(time_str).to_string_lossy();\n\n    let mut days: i32 = 0;\n    let hours: i32;\n    let minutes: i32;\n    let seconds: i32;\n\n    // Try D-HH:MM:SS format\n    if let Some(dash_pos) = s.find('-') {\n        days = match s[..dash_pos].parse() {\n            Ok(d) => d,\n            Err(_) => {\n                set_errmsg(errmsg, &MorlocError::Other(format!(\"Failed to scan slurm walltime string '{}'\", s)));\n                return 0;\n            }\n        };\n        let rest = &s[dash_pos + 1..];\n        let parts: Vec<&str> = rest.split(':').collect();\n        if parts.len() != 3 {\n            set_errmsg(errmsg, &MorlocError::Other(format!(\"Failed to scan slurm walltime string '{}'\", s)));\n            return 0;\n        }\n        hours = parts[0].parse().unwrap_or(-1);\n        minutes = parts[1].parse().unwrap_or(-1);\n        seconds = parts[2].parse().unwrap_or(-1);\n    } else {\n        // Try HH:MM:SS format\n        let parts: Vec<&str> = s.split(':').collect();\n        if parts.len() != 3 {\n            set_errmsg(errmsg, &MorlocError::Other(format!(\"Failed to scan slurm walltime string '{}'\", s)));\n            return 0;\n        }\n        hours = parts[0].parse().unwrap_or(-1);\n        minutes = parts[1].parse().unwrap_or(-1);\n        seconds = parts[2].parse().unwrap_or(-1);\n    }\n\n    if days < 0 || hours < 0 || minutes < 0 || seconds < 0 {\n        set_errmsg(errmsg, &MorlocError::Other(format!(\"Negative time component in '{}'\", s)));\n        return 0;\n    }\n    if hours > 23 || minutes > 59 || seconds > 59 {\n        set_errmsg(errmsg, &MorlocError::Other(format!(\"Invalid time component in '{}' (HH<=23 MM<=59 SS<=59)\", s)));\n        return 0;\n    }\n    if days > 3650 {\n        set_errmsg(errmsg, &MorlocError::Other(\"Do you really want to run this job for more than 10 years?\".into()));\n        return 0;\n    }\n\n    (seconds + 60 * minutes + 60 * 60 * hours + 60 * 60 * 24 * days) as usize\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn write_slurm_time(seconds: i32) -> *mut c_char {\n    let mut rem = seconds;\n    let days = rem / (60 * 60 * 24);\n    rem -= days * 60 * 60 * 24;\n    let hours = rem / (60 * 60);\n    rem -= hours * 60 * 60;\n    let minutes = rem / 60;\n    rem -= minutes * 60;\n\n    let s = format!(\"{}-{:02}:{:02}:{:02}\", days, hours, minutes, rem);\n    match CString::new(s) {\n        Ok(cs) => cs.into_raw(),\n        Err(_) => ptr::null_mut(),\n    }\n}\n\n// ── parse_morloc_call_arguments ──────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn parse_morloc_call_arguments(\n    packet: *mut u8,\n    args: *mut *mut u8,\n    nargs: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    *nargs = 0;\n\n    let header = &*(packet as *const crate::packet::PacketHeader);\n    let packet_size = 32 + header.offset as usize + header.length as usize;\n\n    if header.command_type() != crate::packet::PACKET_TYPE_CALL {\n        set_errmsg(errmsg, &MorlocError::Packet(\"Unexpected packet type (BUG)\".into()));\n        return false;\n    }\n\n    // First pass: count args\n    let mut pos = 32 + header.offset as usize;\n    while pos < packet_size {\n        let arg_header = &*(packet.add(pos) as *const crate::packet::PacketHeader);\n        pos += 32 + arg_header.offset as usize + arg_header.length as usize;\n        *nargs += 1;\n    }\n\n    // Second pass: set pointers\n    pos = 32 + header.offset as usize;\n    for i in 0..*nargs {\n        *args.add(i) = packet.add(pos);\n        let arg_header = &*(packet.add(pos) as *const crate::packet::PacketHeader);\n        pos += 32 + arg_header.offset as usize + arg_header.length as usize;\n    }\n\n    true\n}\n\n// ── slurm_job_is_complete ────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn slurm_job_is_complete(job_id: u32) -> bool {\n    let cmd = format!(\"sacct -j {} --format=State --noheader\\0\", job_id);\n    let sacct = libc::popen(cmd.as_ptr() as *const c_char, b\"r\\0\".as_ptr() as *const c_char);\n    if sacct.is_null() { return false; }\n\n    let mut state = [0u8; 64];\n    let mut done = false;\n    while !libc::fgets(state.as_mut_ptr() as *mut c_char, 64, sacct).is_null() {\n        let s = std::str::from_utf8(&state).unwrap_or(\"\");\n        if s.contains(\"COMPLETED\") || s.contains(\"FAILED\") || s.contains(\"CANCELLED\") {\n            done = true;\n            break;\n        }\n    }\n    libc::pclose(sacct);\n    done\n}\n\n// ── shell_escape ─────────────────────────────────────────────────────────────\n\nfn shell_escape(input: &str) -> String {\n    let mut out = String::with_capacity(input.len() + 10);\n    out.push('\\'');\n    for ch in input.chars() {\n        if ch == '\\'' {\n            out.push_str(\"'\\\\''\");\n        } else {\n            out.push(ch);\n        }\n    }\n    out.push('\\'');\n    out\n}\n\n// ── submit_morloc_slurm_job ──────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn submit_morloc_slurm_job(\n    nexus_path: *const c_char,\n    socket_basename: *const c_char,\n    call_packet_filename: *const c_char,\n    result_cache_filename: *const c_char,\n    output_filename: *const c_char,\n    error_filename: *const c_char,\n    resources: *const Resources,\n    errmsg: *mut *mut c_char,\n) -> u32 {\n    clear_errmsg(errmsg);\n\n    macro_rules! check_null {\n        ($ptr:expr, $name:expr) => {\n            if $ptr.is_null() {\n                set_errmsg(errmsg, &MorlocError::Other(format!(\"{} undefined\", $name)));\n                return 0;\n            }\n        };\n    }\n    check_null!(nexus_path, \"nexus path\");\n    check_null!(socket_basename, \"socket basename\");\n    check_null!(call_packet_filename, \"call packet filename\");\n    check_null!(result_cache_filename, \"result cache filename\");\n    check_null!(output_filename, \"slurm output filename\");\n    check_null!(error_filename, \"slurm error filename\");\n\n    let res = &*resources;\n    let nexus = CStr::from_ptr(nexus_path).to_string_lossy();\n    let call = CStr::from_ptr(call_packet_filename).to_string_lossy();\n    let socket = CStr::from_ptr(socket_basename).to_string_lossy();\n    let result_cache = CStr::from_ptr(result_cache_filename).to_string_lossy();\n    let output = CStr::from_ptr(output_filename).to_string_lossy();\n    let error = CStr::from_ptr(error_filename).to_string_lossy();\n\n    let time_str_raw = write_slurm_time(res.time);\n    let time_str = CStr::from_ptr(time_str_raw).to_string_lossy().into_owned();\n    libc::free(time_str_raw as *mut c_void);\n\n    let mem_arg = format!(\"--mem={}G\", res.memory);\n    let time_arg = format!(\"--time={}\", time_str);\n    let cpus_arg = format!(\"--cpus-per-task={}\", res.cpus);\n    let gpus_arg = format!(\"--gres=gpu:{}\", res.gpus);\n\n    let esc_nexus = shell_escape(&nexus);\n    let esc_call = shell_escape(&call);\n    let esc_socket = shell_escape(&socket);\n    let esc_result = shell_escape(&result_cache);\n\n    let wrap_cmd = format!(\n        \"{} --call-packet {} --socket-base {} --output-file {} --output-form packet\",\n        esc_nexus, esc_call, esc_socket, esc_result\n    );\n\n    if wrap_cmd.len() >= MAX_SLURM_COMMAND_LENGTH {\n        set_errmsg(errmsg, &MorlocError::Other(\"Wrap command too long\".into()));\n        return 0;\n    }\n\n    let wrap_arg = format!(\"--wrap={}\", wrap_cmd);\n\n    // Fork/exec sbatch\n    let mut pipefd = [0i32; 2];\n    if libc::pipe(pipefd.as_mut_ptr()) == -1 {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to create pipe for sbatch\".into()));\n        return 0;\n    }\n\n    let pid = libc::fork();\n    if pid == -1 {\n        libc::close(pipefd[0]);\n        libc::close(pipefd[1]);\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to fork for sbatch\".into()));\n        return 0;\n    }\n\n    if pid == 0 {\n        // Child\n        libc::close(pipefd[0]);\n        libc::dup2(pipefd[1], libc::STDOUT_FILENO);\n        libc::close(pipefd[1]);\n\n        let sbatch = CString::new(\"sbatch\").unwrap();\n        let parsable = CString::new(\"--parsable\").unwrap();\n        let o_flag = CString::new(\"-o\").unwrap();\n        let e_flag = CString::new(\"-e\").unwrap();\n        let c_output = CString::new(output.as_ref()).unwrap();\n        let c_error = CString::new(error.as_ref()).unwrap();\n        let c_mem = CString::new(mem_arg).unwrap();\n        let c_time = CString::new(time_arg).unwrap();\n        let c_cpus = CString::new(cpus_arg).unwrap();\n        let c_gpus = CString::new(gpus_arg).unwrap();\n        let c_wrap = CString::new(wrap_arg).unwrap();\n\n        libc::execlp(\n            sbatch.as_ptr(),\n            sbatch.as_ptr(),\n            parsable.as_ptr(),\n            o_flag.as_ptr(), c_output.as_ptr(),\n            e_flag.as_ptr(), c_error.as_ptr(),\n            c_mem.as_ptr(), c_time.as_ptr(), c_cpus.as_ptr(), c_gpus.as_ptr(),\n            c_wrap.as_ptr(),\n            ptr::null::<c_char>(),\n        );\n        libc::_exit(127);\n    }\n\n    // Parent\n    libc::close(pipefd[1]);\n\n    let mut buf = [0u8; 64];\n    let nread = libc::read(pipefd[0], buf.as_mut_ptr() as *mut c_void, 63);\n    libc::close(pipefd[0]);\n\n    let mut status: i32 = 0;\n    libc::waitpid(pid, &mut status, 0);\n\n    if !libc::WIFEXITED(status) || libc::WEXITSTATUS(status) != 0 {\n        set_errmsg(errmsg, &MorlocError::Other(\"sbatch exited with error\".into()));\n        return 0;\n    }\n    if nread <= 0 {\n        set_errmsg(errmsg, &MorlocError::Other(\"Failed to read sbatch output\".into()));\n        return 0;\n    }\n\n    let output_str = std::str::from_utf8(&buf[..nread as usize]).unwrap_or(\"\");\n    match output_str.trim().parse::<u32>() {\n        Ok(job_id) => job_id,\n        Err(_) => {\n            set_errmsg(errmsg, &MorlocError::Other(\"Failed to parse job ID from sbatch output\".into()));\n            0\n        }\n    }\n}\n\n// ── remote_call ──────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn remote_call(\n    midx: i32,\n    socket_basename: *const c_char,\n    cache_path: *const c_char,\n    resources: *const Resources,\n    arg_packets: *const *const u8,\n    nargs: usize,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n\n    // Use extern C declarations for functions from other modules\n    extern \"C\" {\n        fn read_schema_from_packet_meta(packet: *const u8, errmsg: *mut *mut c_char) -> *mut c_char;\n        fn parse_schema(schema_str: *const c_char, errmsg: *mut *mut c_char) -> *mut crate::cschema::CSchema;\n        fn free_schema(schema: *mut crate::cschema::CSchema);\n        fn get_morloc_data_packet_value(data: *const u8, schema: *const crate::cschema::CSchema, errmsg: *mut *mut c_char) -> *mut u8;\n        fn hash_voidstar(data: *const u8, schema: *const crate::cschema::CSchema, seed: u64, hash: *mut u64, errmsg: *mut *mut c_char) -> bool;\n        fn mix(a: u64, b: u64) -> u64;\n        fn mkdir_p(path: *const c_char, errmsg: *mut *mut c_char) -> i32;\n        fn check_cache_packet(key: u64, cache_path: *const c_char, errmsg: *mut *mut c_char) -> *mut c_char;\n        fn get_cache_packet(key: u64, cache_path: *const c_char, errmsg: *mut *mut c_char) -> *mut u8;\n        fn put_cache_packet(data: *const u8, schema: *const crate::cschema::CSchema, key: u64, cache_path: *const c_char, errmsg: *mut *mut c_char) -> *mut c_char;\n        fn make_cache_filename(hash: u64, cache_path: *const c_char, errmsg: *mut *mut c_char) -> *mut c_char;\n        fn make_cache_filename_ext(hash: u64, cache_path: *const c_char, ext: *const c_char, errmsg: *mut *mut c_char) -> *mut c_char;\n        fn make_morloc_remote_call_packet(midx: u32, arg_packets: *const *const u8, nargs: usize, errmsg: *mut *mut c_char) -> *mut u8;\n        fn morloc_packet_size(packet: *const u8, errmsg: *mut *mut c_char) -> usize;\n        fn read_binary_file(filename: *const c_char, file_size: *mut usize, errmsg: *mut *mut c_char) -> *mut u8;\n        fn write_atomic(filename: *const c_char, data: *const u8, size: usize, errmsg: *mut *mut c_char) -> i32;\n        fn get_morloc_data_packet_error_message(data: *const u8, errmsg: *mut *mut c_char) -> *mut c_char;\n    }\n\n    let seed = midx as u64;\n    let mut err: *mut c_char = ptr::null_mut();\n\n    // Cleanup tracking\n    let mut return_packet: *mut u8 = ptr::null_mut();\n    let mut arg_hashes: Vec<u64> = vec![0; nargs];\n    let mut arg_voidstars: Vec<*mut u8> = vec![ptr::null_mut(); nargs];\n    let mut arg_schemas: Vec<*mut crate::cschema::CSchema> = vec![ptr::null_mut(); nargs];\n    let mut cached_arg_filenames: Vec<*mut c_char> = vec![ptr::null_mut(); nargs];\n    let mut cached_arg_packets: Vec<*mut u8> = vec![ptr::null_mut(); nargs];\n\n    let mut function_hash = mix(seed, DEFAULT_XXHASH_SEED);\n\n    // Hash each argument\n    for i in 0..nargs {\n        let schema_str = read_schema_from_packet_meta(*arg_packets.add(i), &mut err);\n        if schema_str.is_null() || !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n\n        arg_schemas[i] = parse_schema(schema_str, &mut err);\n        if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n\n        arg_voidstars[i] = get_morloc_data_packet_value(*arg_packets.add(i), arg_schemas[i], &mut err);\n        if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n\n        let mut h: u64 = 0;\n        hash_voidstar(arg_voidstars[i], arg_schemas[i], DEFAULT_XXHASH_SEED, &mut h, &mut err);\n        if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n        arg_hashes[i] = h;\n\n        function_hash = mix(function_hash, h);\n    }\n\n    mkdir_p(cache_path, &mut err);\n    if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n\n    // Check if result is cached\n    let mut result_cache_filename = check_cache_packet(function_hash, cache_path, &mut err);\n    if !err.is_null() { libc::free(err as *mut c_void); err = ptr::null_mut(); }\n\n    if !result_cache_filename.is_null() {\n        return_packet = get_cache_packet(function_hash, cache_path, &mut err);\n        if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n    } else {\n        result_cache_filename = make_cache_filename(function_hash, cache_path, &mut err);\n        if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n\n        // Cache arguments\n        for i in 0..nargs {\n            cached_arg_filenames[i] = check_cache_packet(arg_hashes[i], cache_path, &mut err);\n            if cached_arg_filenames[i].is_null() {\n                if !err.is_null() { libc::free(err as *mut c_void); err = ptr::null_mut(); }\n                cached_arg_filenames[i] = put_cache_packet(arg_voidstars[i], arg_schemas[i], arg_hashes[i], cache_path, &mut err);\n                if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n            }\n        }\n\n        // Read cached arg packets\n        for i in 0..nargs {\n            let mut file_size: usize = 0;\n            cached_arg_packets[i] = read_binary_file(cached_arg_filenames[i], &mut file_size, &mut err);\n            if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n        }\n\n        // Build call packet\n        let cached_ptrs: Vec<*const u8> = cached_arg_packets.iter().map(|p| *p as *const u8).collect();\n        let call_packet = make_morloc_remote_call_packet(midx as u32, cached_ptrs.as_ptr(), nargs, &mut err);\n        if !err.is_null() {\n            libc::free(call_packet as *mut c_void);\n            goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet);\n        }\n\n        let call_packet_size = morloc_packet_size(call_packet, &mut err);\n\n        // Hash call packet with xxhash\n        let call_packet_hash = crate::hash::xxh64_with_seed(std::slice::from_raw_parts(call_packet, call_packet_size), DEFAULT_XXHASH_SEED);\n\n        let call_ext = CString::new(\"-call.dat\").unwrap();\n        let call_packet_filename = make_cache_filename_ext(call_packet_hash, cache_path, call_ext.as_ptr(), &mut err);\n\n        // Write call packet to disk\n        write_atomic(call_packet_filename, call_packet, call_packet_size, &mut err);\n        libc::free(call_packet as *mut c_void);\n\n        let out_ext = CString::new(\".out\").unwrap();\n        let err_ext = CString::new(\".err\").unwrap();\n        let output_filename = make_cache_filename_ext(function_hash, cache_path, out_ext.as_ptr(), &mut err);\n        let error_filename = make_cache_filename_ext(function_hash, cache_path, err_ext.as_ptr(), &mut err);\n\n        // Submit SLURM job\n        let nexus_c = CString::new(\"./nexus\").unwrap();\n        let pid = submit_morloc_slurm_job(\n            nexus_c.as_ptr(), socket_basename, call_packet_filename,\n            result_cache_filename, output_filename, error_filename,\n            resources, &mut err,\n        );\n\n        libc::free(call_packet_filename as *mut c_void);\n        libc::free(output_filename as *mut c_void);\n        libc::free(error_filename as *mut c_void);\n\n        if !err.is_null() { goto_cleanup!(errmsg, err, arg_schemas, cached_arg_filenames, cached_arg_packets, return_packet); }\n\n        // Wait for job completion\n        while !slurm_job_is_complete(pid) {\n            libc::sleep(1);\n        }\n\n        let mut return_packet_size: usize = 0;\n        return_packet = read_binary_file(result_cache_filename, &mut return_packet_size, &mut err);\n\n        let failure = get_morloc_data_packet_error_message(return_packet, &mut err);\n        if !failure.is_null() {\n            libc::fprintf(\n                libc::fdopen(libc::STDERR_FILENO, b\"w\\0\".as_ptr() as *const c_char),\n                b\"Failed, deleting result %s\\n\\0\".as_ptr() as *const c_char,\n                result_cache_filename,\n            );\n            libc::unlink(result_cache_filename);\n            libc::free(failure as *mut c_void);\n        }\n    }\n\n    // Cleanup\n    for i in 0..nargs {\n        if !arg_schemas[i].is_null() { free_schema(arg_schemas[i]); }\n        if !cached_arg_filenames[i].is_null() { libc::free(cached_arg_filenames[i] as *mut c_void); }\n        if !cached_arg_packets[i].is_null() { libc::free(cached_arg_packets[i] as *mut c_void); }\n    }\n    if !result_cache_filename.is_null() { libc::free(result_cache_filename as *mut c_void); }\n\n    return_packet\n}\n\n// Cleanup macro for goto-like pattern\nmacro_rules! goto_cleanup {\n    ($errmsg:expr, $err:expr, $schemas:expr, $filenames:expr, $packets:expr, $return_packet:expr) => {{\n        *$errmsg = $err;\n        for i in 0..$schemas.len() {\n            if !$schemas[i].is_null() {\n                extern \"C\" { fn free_schema(s: *mut crate::cschema::CSchema); }\n                free_schema($schemas[i]);\n            }\n            if !$filenames[i].is_null() { libc::free($filenames[i] as *mut c_void); }\n            if !$packets[i].is_null() { libc::free($packets[i] as *mut c_void); }\n        }\n        return $return_packet;\n    }};\n}\nuse goto_cleanup;\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/utility.rs",
    "content": "//! File I/O and string utility functions.\n//! Replaces utility.c.\n\nuse std::ffi::{c_char, c_void, CStr};\nuse std::io::Write;\nuse std::ptr;\n\nuse crate::error::{clear_errmsg, set_errmsg, MorlocError};\n\n// ── Cross-platform helpers ─────────────────────────────────────────────────\n\n/// Return the current errno value (cross-platform).\n#[cfg(target_os = \"linux\")]\n#[inline]\npub unsafe fn errno_val() -> i32 {\n    *libc::__errno_location()\n}\n\n#[cfg(target_os = \"macos\")]\n#[inline]\npub unsafe fn errno_val() -> i32 {\n    *libc::__error()\n}\n\n/// Suppress SIGPIPE on send(). Linux: per-call flag. macOS: use set_nosigpipe() on the socket.\n#[cfg(target_os = \"linux\")]\npub const SEND_NOSIGNAL: i32 = libc::MSG_NOSIGNAL;\n#[cfg(target_os = \"macos\")]\npub const SEND_NOSIGNAL: i32 = 0;\n\n/// Set SO_NOSIGPIPE on a socket (macOS). No-op on Linux (uses MSG_NOSIGNAL per-call).\n#[allow(unused_variables)]\npub unsafe fn set_nosigpipe(fd: i32) {\n    #[cfg(target_os = \"macos\")]\n    {\n        let val: libc::c_int = 1;\n        libc::setsockopt(\n            fd,\n            libc::SOL_SOCKET,\n            libc::SO_NOSIGPIPE,\n            &val as *const _ as *const libc::c_void,\n            std::mem::size_of::<libc::c_int>() as libc::socklen_t,\n        );\n    }\n}\n\n// ── File operations ────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn file_exists(filename: *const c_char) -> bool {\n    if filename.is_null() {\n        return false;\n    }\n    let path = CStr::from_ptr(filename).to_string_lossy();\n    std::path::Path::new(path.as_ref()).exists()\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn mkdir_p(path: *const c_char, errmsg: *mut *mut c_char) -> i32 {\n    clear_errmsg(errmsg);\n    if path.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL path\".into()));\n        return -1;\n    }\n    let p = CStr::from_ptr(path).to_string_lossy();\n    match std::fs::create_dir_all(p.as_ref()) {\n        Ok(_) => 0,\n        Err(e) => {\n            set_errmsg(\n                errmsg,\n                &MorlocError::Io(e),\n            );\n            -1\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn delete_directory(path: *const c_char) {\n    if path.is_null() {\n        return;\n    }\n    let p = CStr::from_ptr(path).to_string_lossy();\n    let _ = std::fs::remove_dir_all(p.as_ref());\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn has_suffix(x: *const c_char, suffix: *const c_char) -> bool {\n    if x.is_null() || suffix.is_null() {\n        return false;\n    }\n    let xs = CStr::from_ptr(x).to_string_lossy();\n    let ss = CStr::from_ptr(suffix).to_string_lossy();\n    xs.ends_with(ss.as_ref())\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn write_atomic(\n    filename: *const c_char,\n    data: *const u8,\n    size: usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    if filename.is_null() || (data.is_null() && size != 0) {\n        set_errmsg(errmsg, &MorlocError::Other(\"invalid arguments\".into()));\n        return -1;\n    }\n    let path_str = CStr::from_ptr(filename).to_string_lossy();\n    let path = std::path::Path::new(path_str.as_ref());\n\n    // Get parent directory\n    let dir = path.parent().unwrap_or(std::path::Path::new(\".\"));\n\n    // Create temp file in same directory\n    let tmp_path = dir.join(format!(\"morloc-tmp_{}\", std::process::id()));\n\n    let result = (|| -> Result<(), std::io::Error> {\n        // Write to temp file\n        let mut f = std::fs::File::create(&tmp_path)?;\n        if size > 0 {\n            let bytes = std::slice::from_raw_parts(data, size);\n            f.write_all(bytes)?;\n        }\n        f.sync_all()?;\n        drop(f);\n\n        // Atomic rename\n        std::fs::rename(&tmp_path, path)?;\n\n        // Sync parent directory\n        if let Ok(dir_f) = std::fs::File::open(dir) {\n            let _ = dir_f.sync_all();\n        }\n        Ok(())\n    })();\n\n    match result {\n        Ok(_) => 0,\n        Err(e) => {\n            let _ = std::fs::remove_file(&tmp_path);\n            set_errmsg(errmsg, &MorlocError::Io(e));\n            -1\n        }\n    }\n}\n\n// ── Binary I/O ─────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_binary_file(\n    filename: *const c_char,\n    file_size: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    if filename.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL filename\".into()));\n        return ptr::null_mut();\n    }\n    let path = CStr::from_ptr(filename).to_string_lossy();\n    match std::fs::read(path.as_ref()) {\n        Ok(data) => {\n            *file_size = data.len();\n            let buf = libc::malloc(data.len()) as *mut u8;\n            if buf.is_null() {\n                set_errmsg(errmsg, &MorlocError::Other(\"malloc failed\".into()));\n                return ptr::null_mut();\n            }\n            std::ptr::copy_nonoverlapping(data.as_ptr(), buf, data.len());\n            buf\n        }\n        Err(e) => {\n            set_errmsg(errmsg, &MorlocError::Io(e));\n            ptr::null_mut()\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn read_binary_fd(\n    file: *mut libc::FILE,\n    file_size: *mut usize,\n    errmsg: *mut *mut c_char,\n) -> *mut u8 {\n    clear_errmsg(errmsg);\n    if file.is_null() {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL file\".into()));\n        return ptr::null_mut();\n    }\n\n    // Try seek-based size detection\n    if libc::fseek(file, 0, libc::SEEK_END) == 0 {\n        let size = libc::ftell(file) as usize;\n        if size > 0 {\n            libc::rewind(file);\n            let buf = libc::malloc(size) as *mut u8;\n            if buf.is_null() {\n                set_errmsg(errmsg, &MorlocError::Other(\"malloc failed\".into()));\n                return ptr::null_mut();\n            }\n            let read = libc::fread(buf as *mut c_void, 1, size, file);\n            if read == size {\n                *file_size = size;\n                return buf;\n            }\n            libc::free(buf as *mut c_void);\n        }\n    }\n\n    // Streaming read for non-seekable files\n    let chunk_size: usize = 0xffff;\n    let mut buf: *mut u8 = ptr::null_mut();\n    let mut allocated: usize = 0;\n\n    loop {\n        let new_buf = libc::realloc(buf as *mut c_void, allocated + chunk_size) as *mut u8;\n        if new_buf.is_null() {\n            libc::free(buf as *mut c_void);\n            set_errmsg(errmsg, &MorlocError::Other(\"realloc failed\".into()));\n            return ptr::null_mut();\n        }\n        buf = new_buf;\n        let read = libc::fread(buf.add(allocated) as *mut c_void, 1, chunk_size, file);\n        allocated += read;\n\n        if read < chunk_size {\n            if libc::feof(file) != 0 {\n                *file_size = allocated;\n                return buf;\n            }\n            if libc::ferror(file) != 0 {\n                libc::free(buf as *mut c_void);\n                set_errmsg(errmsg, &MorlocError::Other(\"read error\".into()));\n                return ptr::null_mut();\n            }\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn write_binary_fd(\n    fd: i32,\n    buf: *const c_char,\n    count: usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    clear_errmsg(errmsg);\n    let mut total: usize = 0;\n    while total < count {\n        let written = libc::write(fd, buf.add(total) as *const c_void, count - total);\n        if written < 0 {\n            set_errmsg(\n                errmsg,\n                &MorlocError::Other(format!(\"write failed: {}\", std::io::Error::last_os_error())),\n            );\n            return -1;\n        }\n        total += written as usize;\n    }\n    0\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn print_binary(\n    buf: *const c_char,\n    count: usize,\n    errmsg: *mut *mut c_char,\n) -> i32 {\n    write_binary_fd(libc::STDOUT_FILENO, buf, count, errmsg)\n}\n\n// ── Display ────────────────────────────────────────────────────────────────\n\n#[no_mangle]\npub unsafe extern \"C\" fn hex(ptr: *const c_void, size: usize) {\n    if ptr.is_null() || size == 0 {\n        return;\n    }\n    let bytes = std::slice::from_raw_parts(ptr as *const u8, size);\n    for (i, b) in bytes.iter().enumerate() {\n        if i > 0 && i % 8 == 0 {\n            eprint!(\" \");\n        }\n        eprint!(\"{:02X}\", b);\n        if i < size - 1 {\n            eprint!(\" \");\n        }\n    }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn print_hex_dump(\n    data: *const u8,\n    size: usize,\n    errmsg: *mut *mut c_char,\n) -> bool {\n    clear_errmsg(errmsg);\n    if data.is_null() && size > 0 {\n        set_errmsg(errmsg, &MorlocError::Other(\"NULL data\".into()));\n        return false;\n    }\n    let bytes = if size > 0 {\n        std::slice::from_raw_parts(data, size)\n    } else {\n        &[]\n    };\n    for (i, b) in bytes.iter().enumerate() {\n        if i > 0 && i % 4 == 0 {\n            if i % 24 == 0 {\n                println!();\n            } else {\n                print!(\" \");\n            }\n        }\n        print!(\"{:02X}\", b);\n    }\n    if !bytes.is_empty() {\n        println!();\n    }\n    true\n}\n\n// ── xxHash wrapper and mix ─────────────────────────────────────────────────\n\n/// Mix two 64-bit hash values. Matches the C implementation in cache.c.\n#[no_mangle]\npub extern \"C\" fn mix(a: u64, b: u64) -> u64 {\n    const PRIME64_1: u64 = 0x9E3779B185EBCA87;\n    const PRIME64_2: u64 = 0xC2B2AE3D27D4EB4F;\n    let mut a = a ^ b.wrapping_mul(PRIME64_1);\n    a = (a << 31) | (a >> 33);\n    a.wrapping_mul(PRIME64_2)\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn morloc_xxh64(\n    input: *const c_void,\n    length: usize,\n    seed: u64,\n) -> u64 {\n    if input.is_null() || length == 0 {\n        return crate::hash::xxh64_with_seed(&[], seed);\n    }\n    let data = std::slice::from_raw_parts(input as *const u8, length);\n    crate::hash::xxh64_with_seed(data, seed)\n}\n\n// ── String utilities ───────────────────────────────────────────────────────\n\n/// dirname - returns pointer into the input string (modifies it in-place)\n/// Matches the C behavior: returns \".\" for empty/NULL, strips trailing slashes\n#[no_mangle]\npub unsafe extern \"C\" fn dirname(path: *mut c_char) -> *mut c_char {\n    // Return a pointer to the static string \".\" for empty/null paths and paths with no slash.\n    static DOT: [u8; 2] = [b'.', 0];\n    let dot_ptr = DOT.as_ptr() as *mut c_char;\n\n    if path.is_null() || *path == 0 {\n        return dot_ptr;\n    }\n\n    let len = libc::strlen(path);\n    let mut end = path.add(len - 1);\n\n    // Remove trailing slashes\n    while end > path && *end == b'/' as c_char {\n        *end = 0;\n        end = end.sub(1);\n    }\n\n    // Find last slash\n    let last_slash = libc::strrchr(path, b'/' as i32);\n    if last_slash.is_null() {\n        return dot_ptr;\n    }\n    if last_slash == path {\n        *path.add(1) = 0; // root case \"/\"\n    } else {\n        *last_slash = 0;\n    }\n    path\n}\n"
  },
  {
    "path": "data/rust/morloc-runtime/src/voidstar.rs",
    "content": "//! Shared voidstar operations: relptr adjustment, binary serialization,\n//! schema-aware free, and flatten-to-buffer.\n//!\n//! These functions operate on the morloc voidstar binary format in SHM.\n//! They are used by packet.rs, cli.rs, and json.rs.\n\nuse crate::error::MorlocError;\nuse crate::schema::{Schema, SerialType};\nuse crate::shm::{self, AbsPtr, Array, RelPtr, Tensor};\n\n// ── adjust_voidstar_relptrs ────────────────────────────────────────────────\n\n/// Adjust all relptrs in a voidstar blob by adding base_rel.\n/// Used after copying a flattened blob into SHM.\npub fn adjust_relptrs(\n    data: AbsPtr,\n    schema: &Schema,\n    base_rel: RelPtr,\n) -> Result<(), MorlocError> {\n    // SAFETY: data points to a voidstar blob in SHM. We adjust relptrs in-place;\n    // all pointer arithmetic stays within the blob's bounds as defined by schema.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let arr = &mut *(data as *mut Array);\n                arr.data += base_rel;\n                if !schema.parameters.is_empty() && !schema.parameters[0].is_fixed_width() {\n                    let arr_data = shm::rel2abs(arr.data)?;\n                    let w = schema.parameters[0].width;\n                    for i in 0..arr.size {\n                        adjust_relptrs(arr_data.add(i * w), &schema.parameters[0], base_rel)?;\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                for i in 0..schema.parameters.len() {\n                    adjust_relptrs(data.add(schema.offsets[i]), &schema.parameters[i], base_rel)?;\n                }\n            }\n            SerialType::Optional => {\n                if *data != 0 && !schema.parameters.is_empty() {\n                    let off = schema.offsets.first().copied()\n                        .unwrap_or_else(|| shm::align_up(1, schema.parameters[0].alignment().max(1)));\n                    adjust_relptrs(data.add(off), &schema.parameters[0], base_rel)?;\n                }\n            }\n            SerialType::Tensor => {\n                let t = &mut *(data as *mut Tensor);\n                if t.total_elements > 0 {\n                    t.shape += base_rel;\n                    t.data += base_rel;\n                }\n            }\n            _ => {}\n        }\n    }\n    Ok(())\n}\n\n// ── read_voidstar_binary ───────────────────────────────────────────────────\n\n/// Read a flat voidstar binary blob into SHM, adjusting relptrs.\npub fn read_binary(blob: &[u8], schema: &Schema) -> Result<AbsPtr, MorlocError> {\n    let base = shm::shmalloc(blob.len())?;\n    // SAFETY: base is freshly allocated with blob.len() bytes.\n    unsafe { std::ptr::copy_nonoverlapping(blob.as_ptr(), base, blob.len()) };\n    let base_rel = shm::abs2rel(base)?;\n    adjust_relptrs(base, schema, base_rel)?;\n    Ok(base)\n}\n\n// ── shfree_by_schema ───────────────────────────────────────────────────────\n\n/// Zero metadata for nested structures so the parent block can be cleanly freed.\n/// Does NOT call shfree on sub-pointers (they're cursor-packed in the same block).\npub fn free_by_schema(ptr: AbsPtr, schema: &Schema) -> Result<(), MorlocError> {\n    // SAFETY: ptr points to voidstar data in SHM with layout described by schema.\n    // We zero metadata at schema.width offsets within the structure.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let arr = &*(ptr as *const Array);\n                if arr.data > 0 && !schema.parameters.is_empty() && !schema.parameters[0].is_fixed_width() {\n                    let arr_data = shm::rel2abs(arr.data)?;\n                    let w = schema.parameters[0].width;\n                    for i in 0..arr.size {\n                        free_by_schema(arr_data.add(i * w), &schema.parameters[0])?;\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                for i in 0..schema.parameters.len() {\n                    free_by_schema(ptr.add(schema.offsets[i]), &schema.parameters[i])?;\n                }\n            }\n            SerialType::Tensor => {} // inline, freed by parent\n            _ => {}\n        }\n        std::ptr::write_bytes(ptr, 0, schema.width);\n    }\n    Ok(())\n}\n\n// ── flatten_voidstar_to_buffer ─────────────────────────────────────────────\n\n/// Flatten a voidstar structure in SHM into a self-contained byte buffer.\n/// Relptrs in the output are offsets from position 0 of the buffer.\npub fn flatten_to_buffer(data: AbsPtr, schema: &Schema) -> Result<Vec<u8>, MorlocError> {\n    let total = crate::ffi::calc_voidstar_size_inner(data, schema)?;\n    let mut buf = vec![0u8; total];\n\n    // SAFETY: data points to at least schema.width bytes in SHM; buf has total >= schema.width bytes.\n    unsafe { std::ptr::copy_nonoverlapping(data, buf.as_mut_ptr(), schema.width) };\n\n    // Phase 2: fix up relptrs and copy variable-length data\n    let mut cursor = schema.width;\n    flatten_fixup(&mut buf, 0, data, schema, &mut cursor)?;\n\n    Ok(buf)\n}\n\nfn flatten_fixup(\n    buf: &mut [u8],\n    buf_offset: usize,\n    data: AbsPtr,\n    schema: &Schema,\n    cursor: &mut usize,\n) -> Result<(), MorlocError> {\n    // SAFETY: buf is sized by calc_voidstar_size_inner to hold the entire flattened structure.\n    // data points to corresponding SHM data. cursor tracks write position within buf.\n    unsafe {\n        match schema.serial_type {\n            SerialType::String | SerialType::Array => {\n                let orig_arr = &*(data as *const Array);\n                let buf_arr = &mut *(buf.as_mut_ptr().add(buf_offset) as *mut Array);\n                if orig_arr.size == 0 {\n                    buf_arr.data = 0;\n                    return Ok(());\n                }\n                let orig_data = shm::rel2abs(orig_arr.data)?;\n                let elem_schema = &schema.parameters[0];\n                let align = elem_schema.alignment();\n                *cursor = shm::align_up(*cursor, align);\n                buf_arr.data = *cursor as RelPtr;\n                let elem_w = elem_schema.width;\n                let total_bytes = elem_w * orig_arr.size;\n                buf[*cursor..*cursor + total_bytes].copy_from_slice(\n                    std::slice::from_raw_parts(orig_data, total_bytes)\n                );\n                let elem_start = *cursor;\n                *cursor += total_bytes;\n                if !elem_schema.is_fixed_width() {\n                    for i in 0..orig_arr.size {\n                        flatten_fixup(\n                            buf, elem_start + i * elem_w,\n                            orig_data.add(i * elem_w), elem_schema, cursor,\n                        )?;\n                    }\n                }\n            }\n            SerialType::Tuple | SerialType::Map => {\n                for i in 0..schema.parameters.len() {\n                    flatten_fixup(\n                        buf, buf_offset + schema.offsets[i],\n                        data.add(schema.offsets[i]), &schema.parameters[i], cursor,\n                    )?;\n                }\n            }\n            SerialType::Optional => {\n                let tag = *buf.as_ptr().add(buf_offset);\n                if tag != 0 && !schema.parameters.is_empty() {\n                    let off = schema.offsets.first().copied()\n                        .unwrap_or_else(|| shm::align_up(1, schema.parameters[0].alignment().max(1)));\n                    flatten_fixup(\n                        buf, buf_offset + off,\n                        data.add(off), &schema.parameters[0], cursor,\n                    )?;\n                }\n            }\n            SerialType::Tensor => {\n                let orig = &*(data as *const Tensor);\n                let buf_t = &mut *(buf.as_mut_ptr().add(buf_offset) as *mut Tensor);\n                if orig.total_elements == 0 {\n                    buf_t.shape = 0;\n                    buf_t.data = 0;\n                    return Ok(());\n                }\n                let ndim = schema.offsets.first().copied().unwrap_or(0);\n                let orig_shape = shm::rel2abs(orig.shape)?;\n                *cursor = shm::align_up(*cursor, std::mem::align_of::<i64>());\n                buf_t.shape = *cursor as RelPtr;\n                let shape_bytes = ndim * std::mem::size_of::<i64>();\n                buf[*cursor..*cursor + shape_bytes].copy_from_slice(\n                    std::slice::from_raw_parts(orig_shape, shape_bytes)\n                );\n                *cursor += shape_bytes;\n\n                let orig_data = shm::rel2abs(orig.data)?;\n                let elem_w = schema.parameters[0].width;\n                let elem_align = schema.parameters[0].alignment();\n                *cursor = shm::align_up(*cursor, elem_align);\n                buf_t.data = *cursor as RelPtr;\n                let data_bytes = orig.total_elements * elem_w;\n                buf[*cursor..*cursor + data_bytes].copy_from_slice(\n                    std::slice::from_raw_parts(orig_data, data_bytes)\n                );\n                *cursor += data_bytes;\n            }\n            _ => {} // primitives already copied by parent\n        }\n    }\n    Ok(())\n}\n\n// ── write_voidstar_binary (to fd) ──────────────────────────────────────────\n\n/// Flatten voidstar and write to a file descriptor. Returns bytes written.\npub fn write_binary_to_fd(fd: i32, data: AbsPtr, schema: &Schema) -> Result<usize, MorlocError> {\n    let buf = flatten_to_buffer(data, schema)?;\n    // SAFETY: buf is a valid byte slice; fd is a valid file descriptor from the caller.\n    let written = unsafe {\n        libc::write(fd, buf.as_ptr() as *const std::ffi::c_void, buf.len())\n    };\n    if written < 0 {\n        return Err(MorlocError::Io(std::io::Error::last_os_error()));\n    }\n    Ok(written as usize)\n}\n"
  },
  {
    "path": "exe/morloc-codegen-generic/Main.hs",
    "content": "{- |\nModule      : Main\nDescription : Generic pool code generator for morloc\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\n\nStandalone executable that assembles pool files for dynamically-typed\ninterpreted languages. Receives a serialized IProgram via a binary file\nand a language descriptor via lang.yaml.\n\nUsage: morloc-codegen-generic <lang.yaml> <iprogram.bin>\n\nReads:\n  - argv[1]: path to lang.yaml (language descriptor)\n  - argv[2]: path to binary-encoded IProgram\n\nWrites to stdout:\n  - JSON CodegenManifest with pool_code and build_commands\n-}\nmodule Main (main) where\n\nimport qualified Data.Aeson as Aeson\nimport qualified Data.Binary as Binary\nimport qualified Data.ByteString.Lazy as BL\nimport qualified Data.Text as T\nimport qualified Data.Text.IO as TIO\nimport GHC.IO.Encoding (setLocaleEncoding, utf8)\nimport System.Environment (getArgs)\nimport System.Exit (exitFailure)\nimport System.FilePath (takeDirectory, (</>))\nimport System.IO (hPutStrLn, stderr)\n\nimport Morloc.CodeGenerator.Grammars.Translator.Generic (CodegenManifest (..), printProgram)\nimport Morloc.CodeGenerator.Grammars.Translator.Imperative (IProgram)\nimport Morloc.CodeGenerator.LanguageDescriptor (LangDescriptor (..), loadLangDescriptor)\nimport Morloc.Data.Doc (render)\n\nmain :: IO ()\nmain = do\n  setLocaleEncoding utf8\n  args <- getArgs\n  case args of\n    [langYamlPath, iprogramPath] -> run langYamlPath iprogramPath\n    _ -> do\n      hPutStrLn stderr \"Usage: morloc-codegen-generic <lang.yaml> <iprogram.bin>\"\n      exitFailure\n\nrun :: FilePath -> FilePath -> IO ()\nrun langYamlPath iprogramPath = do\n  -- load language descriptor\n  descResult <- loadLangDescriptor langYamlPath\n  desc <- case descResult of\n    Left err -> do\n      hPutStrLn stderr $ \"Failed to load \" ++ langYamlPath ++ \": \" ++ err\n      exitFailure\n    Right d -> return d\n\n  -- load pool template from disk if not inline\n  desc' <-\n    if T.null (ldPoolTemplate desc)\n      then do\n        let langDir = takeDirectory langYamlPath\n            ext = ldExtension desc\n            poolPath = langDir </> \"pool.\" ++ ext\n        poolText <- TIO.readFile poolPath\n        return desc {ldPoolTemplate = poolText}\n      else return desc\n\n  -- deserialize IProgram\n  binaryData <- BL.readFile iprogramPath\n  let program = Binary.decode binaryData :: IProgram\n\n  -- assemble pool file\n  let poolCode = render (printProgram desc' program)\n\n  -- output manifest as JSON\n  let manifest =\n        CodegenManifest\n          { cgmPoolCode = poolCode\n          , cgmBuildCommands = []\n          }\n  BL.putStr (Aeson.encode manifest)\n"
  },
  {
    "path": "executable/CppPrinter.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE QuasiQuotes #-}\n{-# LANGUAGE TemplateHaskell #-}\n\n{- |\nModule      : CppPrinter\nDescription : Pretty-print the imperative IR as C++ source code\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nConverts 'IStmt' and 'IExpr' IR nodes into C++ source text. Handles\ntype rendering, struct definitions, forward declarations, and C++\nidioms (templates, shared_ptr, std::variant).\n-}\nmodule CppPrinter\n  ( printExpr\n  , printStmt\n  , printStmts\n\n    -- * Pool-level rendering\n  , printDispatch\n  , printProgram\n\n    -- * Struct/serializer rendering\n  , printStructTypedef\n  , printSerializer\n  , printDeserializer\n  , printTemplateHeader\n  , printRecordTemplate\n  ) where\n\nimport Morloc.CodeGenerator.Grammars.Common (DispatchEntry (..), manNamer)\nimport Morloc.CodeGenerator.Grammars.Translator.Imperative\nimport Morloc.CodeGenerator.Namespace (MDoc)\nimport Morloc.Data.Doc\nimport Morloc.DataFiles as DF\nimport Morloc.Quasi\n\nprintExpr :: IExpr -> MDoc\nprintExpr (IVar v) = pretty v\nprintExpr (IBoolLit True) = \"true\"\nprintExpr (IBoolLit False) = \"false\"\nprintExpr (INullLit (Just t)) = \"std::optional<\" <> renderIType t <> \">()\"\nprintExpr (INullLit Nothing) = \"std::nullopt\"\nprintExpr (IIntLit Nothing i) = viaShow i\nprintExpr (IIntLit (Just t) i)\n  | t == \"int\" = viaShow i\n  | otherwise = \"static_cast<\" <> pretty t <> \">(\" <> viaShow i <> \")\"\nprintExpr (IRealLit Nothing r) = viaShow r\nprintExpr (IRealLit (Just t) r)\n  | t == \"double\" = viaShow r\n  | otherwise = \"static_cast<\" <> pretty t <> \">(\" <> viaShow r <> \")\"\nprintExpr (IStrLit s) = [idoc|std::string(#{textEsc' s})|]\nprintExpr (IListLit es) = encloseSep \"{\" \"}\" \",\" (map printExpr es)\nprintExpr (ITupleLit es) = \"std::make_tuple\" <> tupled (map printExpr es)\nprintExpr (IRecordLit _ _ entries) =\n  encloseSep \"{\" \"}\" \",\" (map (printExpr . snd) entries)\nprintExpr (IAccess e (IIdx i)) = \"std::get<\" <> pretty i <> \">(\" <> printExpr e <> \")\"\nprintExpr (IAccess e (IKey _)) = printExpr e -- should not be reached for C++\nprintExpr (IAccess e (IField f)) = printExpr e <> \".\" <> pretty f\nprintExpr (ISerCall schema e) = [idoc|_put_value(#{printExpr e}, \"#{pretty schema}\")|]\nprintExpr (IDesCall schema (Just rawtype) e) = [idoc|_get_value<#{renderIType rawtype}>(#{printExpr e}, \"#{pretty schema}\")|]\nprintExpr (IDesCall schema Nothing e) = [idoc|_get_value(#{printExpr e}, \"#{pretty schema}\")|]\nprintExpr (IPack packer e) = pretty packer <> parens (printExpr e)\nprintExpr (ICall f Nothing argGroups) =\n  pretty f <> hsep (map (tupled . map printExpr) argGroups)\nprintExpr (ICall f (Just ts) argGroups) =\n  pretty f\n    <> encloseSep \"<\" \">\" \",\" (map renderIType ts)\n    <> hsep (map (tupled . map printExpr) argGroups)\nprintExpr (IForeignCall _ _ _) = error \"use IRawExpr for C++ foreign calls\"\nprintExpr (IRemoteCall _ _ _ _) = error \"use IRawExpr for C++ remote calls\"\nprintExpr (ILambda args body) =\n  \"[&](\"\n    <> hsep (punctuate \",\" [\"auto\" <+> pretty a | a <- args])\n    <> \"){return \"\n    <> printExpr body\n    <> \";}\"\nprintExpr (IRawExpr d) = pretty d\nprintExpr (IDoBlock e) = \"[&](){return \" <> printExpr e <> \";}\"\nprintExpr (IEval e) = printExpr e <> \"()\"\nprintExpr (IIntrinsicHash schema e) =\n  [idoc|_mlc_hash(#{printExpr e}, \"#{pretty schema}\")|]\nprintExpr (IIntrinsicSave fmt schema e path)\n  | fmt == \"json\" = [idoc|_mlc_save_json(#{printExpr e}, \"#{pretty schema}\", #{printExpr path})|]\n  | fmt == \"voidstar\" = [idoc|_mlc_save_voidstar(#{printExpr e}, \"#{pretty schema}\", #{printExpr path})|]\n  | otherwise = [idoc|_mlc_save(#{printExpr e}, \"#{pretty schema}\", #{printExpr path})|]\nprintExpr (IIntrinsicLoad schema (Just t) path) =\n  [idoc|_mlc_load<#{renderIType t}>(\"#{pretty schema}\", #{printExpr path})|]\nprintExpr (IIntrinsicLoad schema Nothing path) =\n  [idoc|_mlc_load(\"#{pretty schema}\", #{printExpr path})|]\nprintExpr (IIntrinsicShow schema e) =\n  [idoc|_mlc_show(#{printExpr e}, \"#{pretty schema}\")|]\nprintExpr (IIntrinsicRead schema (Just t) e) =\n  [idoc|_mlc_read<#{renderIType t}>(\"#{pretty schema}\", #{printExpr e})|]\nprintExpr (IIntrinsicRead schema Nothing e) =\n  [idoc|_mlc_read(\"#{pretty schema}\", #{printExpr e})|]\n\nprintStmt :: IStmt -> MDoc\nprintStmt (IAssign v Nothing e) = \"auto\" <+> pretty v <+> \"=\" <+> printExpr e <> \";\"\nprintStmt (IAssign v (Just t) e) = renderIType t <+> pretty v <+> \"=\" <+> printExpr e <> \";\"\n-- C++ uses an indexed for loop with push_back\nprintStmt (IMapList resultVar resultType iterVar collection bodyStmts yieldExpr) =\n  vsep\n    [ resultDecl\n    , block\n        4\n        [idoc|for(size_t #{pretty iterVar}_idx = 0; #{pretty iterVar}_idx < #{printExpr collection}.size(); #{pretty iterVar}_idx++)|]\n        ( vsep\n            ( [idoc|auto #{pretty iterVar} = #{printExpr collection}[#{pretty iterVar}_idx];|]\n                : map printStmt bodyStmts\n                ++ [[idoc|#{pretty resultVar}.push_back(#{printExpr yieldExpr});|]]\n            )\n        )\n    ]\n  where\n    resultDecl = case resultType of\n      Just t -> [idoc|#{renderIType t} #{pretty resultVar};|]\n      Nothing -> printStmt (IAssign resultVar Nothing (IListLit []))\nprintStmt (IIf resultVar resultType condExpr thenStmts thenExpr elseStmts elseExpr) =\n  vsep\n    [ resultDecl\n    , block 4 [idoc|if(#{printExpr condExpr})|]\n        (vsep (map printStmt thenStmts ++ [[idoc|#{pretty resultVar} = #{printExpr thenExpr};|]]))\n    , block 4 \"else\"\n        (vsep (map printStmt elseStmts ++ [[idoc|#{pretty resultVar} = #{printExpr elseExpr};|]]))\n    ]\n  where\n    resultDecl = case resultType of\n      Just t -> [idoc|#{renderIType t} #{pretty resultVar};|]\n      Nothing -> [idoc|auto #{pretty resultVar};|]\nprintStmt (IReturn e) = \"return(\" <> printExpr e <> \");\"\nprintStmt (IExprStmt e) = printExpr e <> \";\"\nprintStmt (IFunDef _ _ _ _) = error \"IFunDef not yet implemented for C++ printer\"\n\nprintStmts :: [IStmt] -> [MDoc]\nprintStmts = map printStmt\n\n-- | Render C++ dispatch functions from structured dispatch entries.\nprintDispatch :: [DispatchEntry] -> [DispatchEntry] -> MDoc\nprintDispatch locals remotes =\n  [idoc|uint8_t* local_dispatch(uint32_t mid, const uint8_t** args){\n    switch(mid){\n        #{align (vsep localCases)}\n        default:\n            std::ostringstream oss;\n            oss << \"Invalid local manifold id: \" << mid;\n            throw std::runtime_error(oss.str());\n    }\n}\n\nuint8_t* remote_dispatch(uint32_t mid, const uint8_t** args){\n    switch(mid){\n        #{align (vsep remoteCases)}\n        default:\n            std::ostringstream oss;\n            oss << \"Invalid remote manifold id: \" << mid;\n            throw std::runtime_error(oss.str());\n    }\n}|]\n  where\n    localCases = map (makeCase \"\") locals\n    remoteCases = map (makeCase \"_remote\") remotes\n\n    makeCase :: MDoc -> DispatchEntry -> MDoc\n    makeCase suffix (DispatchEntry i n) =\n      \"case\" <+> pretty i\n        <> \":\"\n          <+> \"return\"\n          <+> manNamer i\n        <> suffix\n        <> tupled [\"args[\" <> pretty j <> \"]\" | j <- take n ([0 ..] :: [Int])]\n        <> \";\"\n\n-- | Assemble a complete C++ pool file from an IProgram and C++-specific extras.\nprintProgram :: [MDoc] -> [MDoc] -> IProgram -> MDoc\nprintProgram serialization signatures prog =\n  format\n    (DF.embededFileText (DF.poolTemplate \"cpp\"))\n    \"// <<<BREAK>>>\"\n    [ vsep (map pretty (ipSources prog))\n    , vsep serialization\n    , vsep signatures\n    , vsep (map pretty (ipManifolds prog))\n    , printDispatch (ipLocalDispatch prog) (ipRemoteDispatch prog)\n    ]\n\nprintTemplateHeader :: [MDoc] -> MDoc\nprintTemplateHeader [] = \"\"\nprintTemplateHeader ts = \"template\" <+> encloseSep \"<\" \">\" \",\" [\"class\" <+> t | t <- ts]\n\nprintRecordTemplate :: [MDoc] -> MDoc\nprintRecordTemplate [] = \"\"\nprintRecordTemplate ts = encloseSep \"<\" \">\" \",\" ts\n\n-- | Render a C++ struct definition.\nprintStructTypedef ::\n  [MDoc] -> -- template parameters (e.g., [\"T\"])\n  MDoc -> -- the name of the structure (e.g., \"Person\")\n  [(MDoc, MDoc)] -> -- key and type for all fields\n  MDoc\nprintStructTypedef params rname fields = vsep [template, struct]\n  where\n    template = printTemplateHeader params\n    struct =\n      block\n        4\n        (\"struct\" <+> rname)\n        (vsep [t <+> k <> \";\" | (k, t) <- fields])\n        <> \";\"\n\n-- | Render a C++ serializer (toAnything) for a struct.\nprintSerializer ::\n  [MDoc] -> -- template parameters\n  MDoc -> -- type of thing being serialized\n  [(MDoc, MDoc)] -> -- key and type for all fields\n  MDoc\nprintSerializer params rtype fields =\n  [idoc|\n#{printTemplateHeader params}\nvoid* toAnything(void* dest, void** cursor, const Schema* schema, const #{rtype}& obj)\n{\n    return toAnything(dest, cursor, schema, std::make_tuple#{arguments});\n}\n|]\n  where\n    arguments = tupled [\"obj.\" <> key | (key, _) <- fields]\n\n-- | Render a C++ deserializer (fromAnything + get_shm_size) for a struct.\nprintDeserializer ::\n  Bool -> -- build object with constructor\n  [MDoc] -> -- template parameters\n  MDoc -> -- type of thing being deserialized\n  [(MDoc, MDoc)] -> -- key and type for all fields\n  MDoc\nprintDeserializer _ params rtype fields =\n  [idoc|\n#{printTemplateHeader params}\n#{block 4 header body}\n\n#{printTemplateHeader params}\n#{block 4 headerGetSize bodyGetSize}\n|]\n  where\n    header =\n      [idoc|#{rtype} fromAnything(const Schema* schema, const void * anything, #{rtype}* dummy = nullptr, const void* base_ptr = nullptr)|]\n    body =\n      vsep $\n        [[idoc|#{rtype} obj;|]]\n          <> zipWith assignFields [0 ..] fields\n          <> [\"return obj;\"]\n\n    assignFields :: Int -> (MDoc, MDoc) -> MDoc\n    assignFields idx (keyName, keyType) =\n      vsep\n        [ [idoc|#{keyType}* elemental_dumby_#{keyName} = nullptr;|]\n        , [idoc|obj.#{keyName} = fromAnything(schema->parameters[#{pretty idx}], (char*)anything + schema->offsets[#{pretty idx}], elemental_dumby_#{keyName}, base_ptr);|]\n        ]\n\n    headerGetSize = [idoc|size_t get_shm_size(const Schema* schema, const #{rtype}& data)|]\n    bodyGetSize =\n      vsep $\n        [\"size_t size = 0;\"]\n          <> [getSize idx key | (idx, (key, _)) <- zip [0 ..] fields]\n          <> [\"return size;\"]\n\n    getSize :: Int -> MDoc -> MDoc\n    getSize idx key = [idoc|size += get_shm_size(schema->parameters[#{pretty idx}], data.#{key});|]\n"
  },
  {
    "path": "executable/CppTranslator.hs",
    "content": "{-# LANGUAGE FlexibleContexts #-}\n{-# LANGUAGE FlexibleInstances #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE QuasiQuotes #-}\n{-# LANGUAGE TemplateHaskell #-}\n{-# LANGUAGE UndecidableInstances #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : CppTranslator\nDescription : Translate 'SerialManifold' trees into C++ pool source code\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nStateful C++ translator using the two-phase IR architecture: lower the\n'SerialManifold' tree into 'IStmt'/'IExpr' via 'LowerConfig', then print\nvia 'CppPrinter'. Handles C++-specific concerns like compilation flags,\ninclude paths, struct generation, and template instantiation.\n-}\nmodule CppTranslator\n  ( translate\n  , cppLang\n  ) where\n\nimport Control.Monad.Identity (Identity, runIdentity)\nimport qualified Control.Monad.State as CMS\nimport qualified CppPrinter as CP\nimport qualified Data.Char as DC\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport Morloc.CodeGenerator.Grammars.Common\nimport Morloc.CodeGenerator.Grammars.Macro (expandMacro)\nimport Morloc.CodeGenerator.Grammars.Translator.Imperative\n  ( IType (..)\n  , LowerConfig (..)\n  , buildProgramM\n  , defaultFoldRules\n  , expandDeserialize\n  , expandSerialize\n  , toIType\n  )\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.CodeGenerator.Serial\n  ( serialAstToType\n  , shallowType\n  )\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Language as ML\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.Version as MV\nimport Morloc.Quasi\nimport qualified Morloc.System as MS\nimport qualified Morloc.TypeEval as TE\n\n-- HACK: repeating these here is hacky\n-- This same data is repeated in cpp/lang.yaml\ncppLang :: ML.Lang\ncppLang = ML.Lang \"cpp\" \"cpp\"\n\nserialType :: MDoc\nserialType = \"uint8_t*\"\n\ndata CallSemantics = Copy | Reference | ConstPtr\n\nclass HasCppType a where\n  cppTypeOf :: a -> CppTranslator MDoc\n\n  cppArgOf :: CallSemantics -> Arg a -> CppTranslator MDoc\n\nsetCallSemantics :: CallSemantics -> MDoc -> MDoc\nsetCallSemantics Copy typestr = typestr\nsetCallSemantics Reference typestr = \"const\" <+> typestr <> \"&\"\nsetCallSemantics ConstPtr typestr = \"const\" <+> typestr\n\nchooseCallSemantics :: TypeM -> CallSemantics\nchooseCallSemantics Passthrough = ConstPtr -- const uint8_t* packet\nchooseCallSemantics (Serial _) = ConstPtr -- const uint8_t* packet\nchooseCallSemantics (Native _) = Reference -- for now, primitives should be pass by copy\nchooseCallSemantics (Function _ _) = Copy -- currently not used\n\ninstance HasCppType TypeM where\n  cppTypeOf (Serial _) = return serialType\n  cppTypeOf (Native c) = cppTypeOf c\n  cppTypeOf Passthrough = return serialType\n  cppTypeOf (Function ts t) = do\n    t' <- cppTypeOf t\n    ts' <- mapM cppTypeOf ts\n    return $ \"std::function<\" <> t' <> tupled ts' <> \">\"\n\n  cppArgOf s (Arg i t) = do\n    typeStr <- cppTypeOf t\n    let typeStrQualified = setCallSemantics s typeStr\n    return $ case t of\n      (Serial _) -> typeStrQualified <+> svarNamer i\n      (Native _) -> typeStrQualified <+> nvarNamer i\n      Passthrough -> typeStrQualified <+> svarNamer i\n      (Function _ _) -> typeStrQualified <+> nvarNamer i\n\ninstance HasCppType NativeManifold where\n  cppTypeOf = cppTypeOf . typeMof\n  cppArgOf s r = cppArgOf s $ fmap typeMof r\n\ninstance {-# OVERLAPPABLE #-} (HasTypeF e) => HasCppType e where\n  cppTypeOf = f . typeFof\n    where\n      f (UnkF (FV _ x)) = return $ pretty x\n      f (VarF (FV _ x)) = return $ pretty x\n      f (FunF ts t) = do\n        t' <- f t\n        ts' <- mapM f ts\n        return $ \"std::function<\" <> t' <> tupled ts' <> \">\"\n      f (NatLitF _) = return mempty\n      f (AppF t ts) = do\n        t' <- f t\n        let runtimeTs = [x | x <- ts, not (isNatLitF x)]\n        ts' <- mapM f runtimeTs\n        return . pretty $ expandMacro (render t') (map render ts')\n      f t@(NamF _ (FV gc (CV \"struct\")) _ rs) = do\n        recmap <- CMS.gets translatorRecmap\n        -- handle autogenerated structs\n        case lookup (FV gc (CV \"struct\"), map fst rs) recmap of\n          (Just rec) -> do\n            params <- typeParams (zip (map snd (recFields rec)) (map snd rs))\n            return $ recName rec <> params\n          Nothing -> error $ \"Record missing from recmap: \" <> show t <> \" from map: \" <> show recmap\n      f (NamF _ (FV _ (CV \"arrow\")) _ _) = return \"mlc::ArrowTable\"\n      f (NamF _ (FV _ s) ps _) = do\n        ps' <- mapM f ps\n        return $ pretty s <> CP.printRecordTemplate ps'\n      f (EffectF _ t) = do\n        t' <- f t\n        return $ \"std::function<\" <> t' <> \"()\" <> \">\"\n      f (OptionalF t) = do\n        t' <- f t\n        return $ \"std::optional<\" <> t' <> \">\"\n      isNatLitF (NatLitF _) = True\n      isNatLitF _ = False\n\n  cppArgOf s (Arg i t) = do\n    t' <- cppTypeOf (typeFof t)\n    return $ setCallSemantics s t' <+> nvarNamer i\n\ndata CppTranslatorState = CppTranslatorState\n  { translatorCounter :: Int\n  , translatorRecmap :: RecMap\n  , translatorSignatureSet :: Set.Set Int\n  , translatorLocalManifoldSet :: Set.Set Int\n  , translatorRemoteManifoldSet :: Set.Set Int\n  , translatorCurrentManifold :: Int\n  , translatorEffectLabels :: Map.Map Int (Set.Set Text)\n  }\n\ninstance Defaultable CppTranslatorState where\n  defaultValue =\n    CppTranslatorState\n      { translatorCounter = 0\n      , translatorRecmap = []\n      , translatorSignatureSet = Set.empty\n      , translatorLocalManifoldSet = Set.empty\n      , translatorRemoteManifoldSet = Set.empty\n      , translatorCurrentManifold = -1 -- -1 indicates we are not inside a manifold\n      , translatorEffectLabels = Map.empty\n      }\n\ntype CppTranslator a = CMS.StateT CppTranslatorState Identity a\ntype CppTranslatorM = CMS.StateT CppTranslatorState Identity\n\ngetCounter :: CppTranslator Int\ngetCounter = do\n  s <- CMS.get\n  let i = translatorCounter s\n  CMS.put $ s {translatorCounter = translatorCounter s + 1}\n  return i\n\nresetCounter :: CppTranslator ()\nresetCounter = do\n  s <- CMS.get\n  CMS.put $ s {translatorCounter = 0}\n\ntranslate :: [Source] -> [SerialManifold] -> MorlocMonad Script\ntranslate srcs es = do\n  -- scopeMap :: GMap Int MVar (Map.Map Lang Scope)\n  scopeMap <- MM.gets stateConcreteTypedefs\n\n  -- universalScopeMap :: GMap Int MVar Scope\n  universalScopeMap <- MM.gets stateUniversalConcreteTypedefs\n\n  effectMap <- MM.gets stateManifoldEffects\n\n  -- Canonicalize C++ source paths once up front so that the #include\n  -- directives emitted by makeCppCode and the -I flags emitted by\n  -- makeTheMaker see exactly the same absolute paths. Before this,\n  -- `#include \"./src/foo.hpp\"` could not be resolved against\n  -- `-I/abs/src` because the `src/` prefix was duplicated.\n  (srcs', _, _) <- handleFlagsAndPaths srcs\n\n  let recmap = unifyRecords . concatMap collectRecords $ es\n      translatorState = defaultValue {translatorRecmap = recmap, translatorEffectLabels = effectMap}\n      code = CMS.evalState (makeCppCode srcs' es universalScopeMap scopeMap) translatorState\n\n  maker <- makeTheMaker srcs'\n\n  poolSubdir <- MM.getModuleName\n\n  return $\n    Script\n      { scriptBase = \"pool\"\n      , scriptLang = cppLang\n      , scriptCode = \".\" :/ Dir \"pools\" [Dir poolSubdir [File \"pool.cpp\" (Code (T.replace \"__MORLOC_VERSION__\" (MT.pack MV.versionStr) (render code)))]]\n      , scriptMake = maker\n      }\n\nmakeCppCode ::\n  [Source] ->\n  [SerialManifold] ->\n  Map.Map Lang Scope ->\n  GMap Int MVar (Map.Map Lang Scope) ->\n  CppTranslator MDoc\nmakeCppCode srcs es univeralScopeMap scopeMap = do\n  -- ([MDoc], [MDoc])\n  (srcDecl, srcSerial) <- generateSourcedSerializers univeralScopeMap scopeMap es\n\n  -- write include statements for sources\n  let includeDocs = map translateSource (unique . mapMaybe srcPath $ srcs)\n\n  signatures <- concat <$> mapM makeSignature es\n\n  (autoDecl, autoSerial) <- generateAnonymousStructs\n  let serializationCode = autoDecl ++ srcDecl ++ autoSerial ++ srcSerial\n\n  -- build the program (translates each manifold tree)\n  program <- buildProgramM includeDocs es translateSegment\n\n  -- create and return complete pool script\n  return $ CP.printProgram serializationCode signatures program\n\nmetaTypedefs ::\n  GMap Int MVar (Map.Map Lang Scope) ->\n  Int -> -- manifold index\n  Scope\nmetaTypedefs tmap i =\n  case GMap.lookup i tmap of\n    (GMapJust langmap) -> case Map.lookup cppLang langmap of\n      (Just scope) -> Map.filter (not . null) scope\n      Nothing -> Map.empty\n    _ -> Map.empty\n\n-- | Collect TVar names of all named (non-anonymous) record types used\n-- in a SerialManifold tree.\ncollectNamedRecordTVars :: SerialManifold -> Set.Set TVar\ncollectNamedRecordTVars e0 =\n  runIdentity $ foldWithSerialManifoldM fm e0\n  where\n    fm = defaultValue\n      { opFoldWithNativeExprM = nativeExpr\n      , opFoldWithSerialExprM = serialExpr\n      }\n\n    nativeExpr _ (DeserializeN_ t s xs) =\n      return $ Set.unions [xs, seekNamedRecs t, seekNamedRecs (serialAstToType s)]\n    nativeExpr efull e =\n      return $ foldlNE Set.union (seekNamedRecs (typeFof efull)) e\n\n    serialExpr _ (SerializeS_ s xs) =\n      return $ Set.union (seekNamedRecs (serialAstToType s)) xs\n    serialExpr _ e = return $ foldlSE Set.union Set.empty e\n\n    seekNamedRecs :: TypeF -> Set.Set TVar\n    seekNamedRecs (NamF _ (FV v (CV c)) _ rs)\n      | c /= \"struct\" = Set.insert v (Set.unions (map (seekNamedRecs . snd) rs))\n    seekNamedRecs (NamF _ _ _ rs) = Set.unions (map (seekNamedRecs . snd) rs)\n    seekNamedRecs (FunF ts t) = Set.unions (map seekNamedRecs (t : ts))\n    seekNamedRecs (AppF t ts) = Set.unions (map seekNamedRecs (t : ts))\n    seekNamedRecs (EffectF _ t) = seekNamedRecs t\n    seekNamedRecs (OptionalF t) = seekNamedRecs t\n    seekNamedRecs _ = Set.empty\n\nmakeTheMaker :: [Source] -> MorlocMonad [SysCommand]\nmakeTheMaker srcs = do\n  poolSubdir <- MM.getModuleName\n  let outfile = pretty $ \"pools\" </> poolSubdir </> ML.makeExecutablePoolName cppLang\n  let src = pretty $ \"pools\" </> poolSubdir </> ML.makeSourcePoolName cppLang\n\n  (_, flags, includes) <- handleFlagsAndPaths srcs\n\n  bconf <- MM.gets stateBuildConfig\n  let sanitizeFlags = case buildConfigSanitize bconf of\n        Just True -> [\"-fsanitize=alignment\", \"-fno-sanitize-recover=alignment\"]\n        _ -> []\n\n  let incs = \"-I.\" : [pretty (\"-I\" <> i) | i <- includes]\n  let flags' = map pretty (flags ++ sanitizeFlags)\n\n  let cmd =\n        SysRun . Code . render $\n          [idoc|g++ -O2 -o #{outfile} #{src} #{hsep flags'} #{hsep incs}|]\n\n  return [cmd]\n\nmakeSignature :: SerialManifold -> CppTranslator [MDoc]\nmakeSignature = foldWithSerialManifoldM fm\n  where\n    fm =\n      defaultValue\n        { opFoldWithSerialManifoldM = serialManifold\n        , opFoldWithNativeManifoldM = nativeManifold\n        }\n\n    serialManifold (SerialManifold m _ form _ _) _ = manifoldSignature m serialType form\n\n    nativeManifold e@(NativeManifold m _ form _) _ = do\n      typestr <- cppTypeOf e\n      manifoldSignature m typestr form\n\n    manifoldSignature ::\n      (HasTypeM t) => Int -> MDoc -> ManifoldForm (Or TypeS TypeF) t -> CppTranslator [MDoc]\n    manifoldSignature i typestr form = do\n      s <- CMS.get\n      if Set.member i (translatorSignatureSet s)\n        then return []\n        else do\n          let formArgs = typeMofForm form\n\n          args <- mapM (\\r@(Arg _ t) -> cppArgOf (chooseCallSemantics t) r) formArgs\n          CMS.put (s {translatorSignatureSet = Set.insert i (translatorSignatureSet s)})\n          return [typestr <+> manNamer i <> tupled args <> \";\"]\n\ntupleKey :: Int -> MDoc -> MDoc\ntupleKey i v = [idoc|std::get<#{pretty i}>(#{v})|]\n\nrecordAccess :: MDoc -> MDoc -> MDoc\nrecordAccess record field = record <> \".\" <> field\n\ncppLowerConfig :: LowerConfig CppTranslatorM\ncppLowerConfig =\n  LowerConfig\n    { lcSrcName = \\src -> pretty (srcName src)\n    , lcTypeOf = \\t -> Just . toIType <$> cppTypeOf t\n    , lcSerialAstType = serializeTypeOf\n    , lcDeserialAstType = \\s -> Just . toIType <$> cppTypeOf (shallowType s)\n    , lcRawDeserialAstType = rawTypeOf\n    , lcTypeMOf = \\_ -> return Nothing\n    , lcPackerName = \\src -> pretty (srcName src)\n    , lcUnpackerName = \\src -> pretty (srcName src)\n    , lcRecordAccessor = \\_ _ -> recordAccess\n    , lcDeserialRecordAccessor = \\i _ v -> tupleKey i v\n    , lcTupleAccessor = tupleKey\n    , lcNewIndex = getCounter\n    , lcPrintExpr = CP.printExpr\n    , lcPrintStmt = CP.printStmt\n    , lcEvalPattern = \\t p xs -> do\n        state <- CMS.get\n        return $ evaluatePattern state t p xs\n    , lcListConstructor = \\_ _ es -> encloseSep \"{\" \"}\" \",\" es\n    , lcTupleConstructor = \\_ -> ((<>) \"std::make_tuple\" . tupled)\n    , lcRecordConstructor = \\recType _ _ _ rs -> do\n        t <- cppTypeOf recType\n        idx <- getCounter\n        let v' = \"a\" <> pretty idx\n            decl = t <+> v' <+> \"=\" <+> encloseSep \"{\" \"}\" \",\" (map snd rs) <> \";\"\n        return $ defaultValue {poolExpr = v', poolPriorLines = [decl]}\n    , lcForeignCall = \\socketFile mid args ->\n        let argList = [dquotes socketFile, pretty mid] <> args <> [\"NULL\"]\n         in [idoc|foreign_call#{tupled argList}|]\n    , lcRemoteCall = \\socketFile mid res args -> do\n        let resMem = pretty $ fromMaybe (-1) (remoteResourcesMemory res)\n            resTime = pretty $ maybe (-1) unTimeInSeconds (remoteResourcesTime res)\n            resCPU = pretty $ fromMaybe (-1) (remoteResourcesThreads res)\n            resGPU = pretty $ fromMaybe 0 (remoteResourcesGpus res)\n            cacheDir = \".morloc-cache\"\n            argList = encloseSep \"{\" \"}\" \",\" args\n            setup =\n              [idoc|resources_t resources = {#{resMem}, #{resTime}, #{resCPU}, #{resGPU}};\nconst uint8_t* args[] = #{argList};\nchar* errmsg = NULL;|]\n            call =\n              [idoc|remote_call(\n    #{pretty mid},\n    #{dquotes socketFile},\n    #{dquotes cacheDir},\n    &resources,\n    args,\n    #{pretty (length args)},\n    &errmsg\n);\nPROPAGATE_ERROR(errmsg)|]\n        return $ defaultValue {poolExpr = call, poolPriorLines = [setup]}\n    , lcMakeLet = \\namer letIndex mt e1 e2 -> do\n        typestr <- case mt of\n          (Just t) -> cppTypeOf t\n          Nothing -> return serialType\n        return $ makeLet namer letIndex typestr e1 e2\n    , lcReturn = \\e -> \"return(\" <> e <> \");\"\n    , lcMakeIf = \\origExpr condDocs thenDocs elseDocs -> do\n        idx <- getCounter\n        let v = helperNamer idx\n        typeStr <- cppTypeOf origExpr\n        let condE = poolExpr condDocs\n            thenE = poolExpr thenDocs\n            elseE = poolExpr elseDocs\n            thenBlock = poolPriorLines thenDocs <> [v <+> \"=\" <+> thenE <> \";\"]\n            elseBlock = poolPriorLines elseDocs <> [v <+> \"=\" <+> elseE <> \";\"]\n            decl = typeStr <+> v <> \";\"\n            ifStmt = vsep\n              [ decl\n              , \"if\" <+> parens condE <+> \"{\"\n              , indent 4 (vsep thenBlock)\n              , \"} else {\"\n              , indent 4 (vsep elseBlock)\n              , \"}\"\n              ]\n        return $ PoolDocs\n          { poolCompleteManifolds = poolCompleteManifolds condDocs <> poolCompleteManifolds thenDocs <> poolCompleteManifolds elseDocs\n          , poolExpr = v\n          , poolPriorLines = poolPriorLines condDocs <> [ifStmt]\n          , poolPriorExprs = poolPriorExprs condDocs <> poolPriorExprs thenDocs <> poolPriorExprs elseDocs\n          }\n    , lcMakeDoBlock = \\t stmts expr ->\n        let isUnit = case t of\n              EffectF _ (VarF (FV tv _)) -> tv == TV \"Unit\"\n              VarF (FV tv _) -> tv == TV \"Unit\"\n              _ -> False\n        in (,) [] $ case (isUnit, stmts) of\n          (True, []) -> \"[&](){\" <> expr <> \"; return mlc::Unit{};}\"\n          (True, _) -> \"[&](){\" <> nest 4 (line <> vsep (stmts <> [expr <> \";\", \"return mlc::Unit{};\"])) <> line <> \"}\"\n          (False, []) -> \"[&](){return \" <> expr <> \";}\"\n          (False, _) -> \"[&](){\" <> nest 4 (line <> vsep (stmts <> [\"return \" <> expr <> \";\"])) <> line <> \"}\"\n    , lcSerialize = \\v s -> serialize v s\n    , lcDeserialize = \\t v s -> do\n        typestr <- cppTypeOf t\n        deserialize v typestr s\n    , lcMakeFunction = \\mname args manifoldType priorLines body headForm -> do\n        callIndex <- CMS.gets translatorCurrentManifold\n        state <- CMS.get\n        let effectLabels = Map.findWithDefault Set.empty callIndex (translatorEffectLabels state)\n        let alreadyDone = case headForm of\n              (Just HeadManifoldFormRemoteWorker) -> Set.member callIndex (translatorRemoteManifoldSet state)\n              _ -> Set.member callIndex (translatorLocalManifoldSet state)\n        if alreadyDone\n          then return Nothing\n          else do\n            case headForm of\n              (Just HeadManifoldFormRemoteWorker) ->\n                CMS.modify\n                  (\\s -> s {translatorRemoteManifoldSet = Set.insert callIndex (translatorRemoteManifoldSet s)})\n              _ ->\n                CMS.modify\n                  (\\s -> s {translatorLocalManifoldSet = Set.insert callIndex (translatorLocalManifoldSet s)})\n            returnTypeStr <- returnType manifoldType\n            typedArgs <- mapM (\\r@(Arg _ t) -> cppArgOf (chooseCallSemantics t) r) args\n            let fullName = mname <> mnameExt headForm\n                decl = returnTypeStr <+> fullName <> tupled typedArgs\n                enrichError = case headForm of\n                  Just HeadManifoldFormRemoteWorker -> True\n                  _ -> Set.member \"Error\" effectLabels\n                tryBody = block 4 \"try\" (vsep $ priorLines <> [body])\n                catchBody\n                  | enrichError =\n                    let throwStatement = vsep\n                          [ [idoc|std::string error_message = \"Error raised in C++ pool by #{mname}:\\n\" + std::string(e.what());|]\n                          , [idoc|throw std::runtime_error(error_message);|]\n                          ]\n                     in block 4 \"catch (const std::exception& e)\" throwStatement\n                  | otherwise = block 4 \"catch (...)\" \"throw;\"\n            return . Just . block 4 decl . vsep $ [tryBody <+> catchBody]\n    , lcMakeLambda = \\mname contextArgs boundArgs ->\n        let vs' = take (length boundArgs) (map (\\j -> \"std::placeholders::_\" <> viaShow j) ([1 ..] :: [Int]))\n         in [idoc|std::bind(#{cat (punctuate \",\" (mname : (contextArgs ++ vs')))})|]\n    }\n  where\n    -- For serialization, records become tuples (that's what _put_value/toAnything expects)\n    serializeTypeOf :: SerialAST -> CppTranslator (Maybe IType)\n    serializeTypeOf (SerialObject _ _ _ rs) = Just . toIType <$> recordToCppTuple (map snd rs)\n    serializeTypeOf s = Just . toIType <$> cppTypeOf (serialAstToType s)\n\n    rawTypeOf :: SerialAST -> CppTranslator (Maybe IType)\n    rawTypeOf (SerialObject _ _ _ rs) = Just . toIType <$> recordToCppTuple (map snd rs)\n    rawTypeOf s = Just . toIType <$> cppTypeOf (serialAstToType s)\n\n    makeLet :: (Int -> MDoc) -> Int -> MDoc -> PoolDocs -> PoolDocs -> PoolDocs\n    makeLet namer letIndex typestr (PoolDocs ms1 e1 rs1 pes1) (PoolDocs ms2 e2 rs2 pes2) =\n      let letAssignment = [idoc|#{typestr} #{namer letIndex} = #{e1};|]\n          rs = rs1 <> [letAssignment] <> rs2\n       in PoolDocs\n            { poolCompleteManifolds = ms1 <> ms2\n            , poolExpr = e2\n            , poolPriorLines = rs\n            , poolPriorExprs = pes1 <> pes2\n            }\n\n    mnameExt :: Maybe HeadManifoldForm -> MDoc\n    mnameExt (Just HeadManifoldFormRemoteWorker) = \"_remote\"\n    mnameExt _ = \"\"\n\n    returnType :: TypeM -> CppTranslator MDoc\n    returnType (Function _ t) = cppTypeOf t\n    returnType t = cppTypeOf t\n\n-- Use `#include \"foo.h\"` rather than `#include <foo.h>`\ntranslateSource ::\n  -- | Path to a header (e.g., `$MORLOC_HOME/src/foo.h`)\n  Path ->\n  MDoc\ntranslateSource path = \"#include\" <+> (dquotes . pretty) path\n\nserialize :: MDoc -> SerialAST -> CppTranslator PoolDocs\nserialize v s = do\n  (expr, stmts) <- expandSerialize cppLowerConfig v s\n  return $\n    PoolDocs\n      { poolCompleteManifolds = []\n      , poolExpr = CP.printExpr expr\n      , poolPriorLines = map CP.printStmt stmts\n      , poolPriorExprs = []\n      }\n\n-- reverse of serialize, parameters are the same\ndeserialize :: MDoc -> MDoc -> SerialAST -> CppTranslator (MDoc, [MDoc])\ndeserialize varname0 typestr0 s0 = do\n  (expr, stmts) <- expandDeserialize cppLowerConfig varname0 s0\n  let rendered = CP.printExpr expr\n  if null stmts\n    then return (rendered, [])\n    else do\n      schemaVar <- helperNamer <$> getCounter\n      let final = [idoc|#{typestr0} #{schemaVar} = #{rendered};|]\n      return (schemaVar, map CP.printStmt stmts ++ [final])\n\nrecordToCppTuple :: [SerialAST] -> CppTranslator MDoc\nrecordToCppTuple ts = do\n  tsDocs <- mapM (cppTypeOf . serialAstToType) ts\n  return $ \"std::tuple\" <> encloseSep \"<\" \">\" \",\" tsDocs\n\ntranslateSegment :: SerialManifold -> CppTranslator MDoc\ntranslateSegment m0 = do\n  resetCounter\n  e <- surroundFoldSerialManifoldM manifoldIndexer (defaultFoldRules cppLowerConfig) m0\n  return $ renderPoolDocs e\n  where\n    manifoldIndexer =\n      makeManifoldIndexer\n        (CMS.gets translatorCurrentManifold)\n        (\\i -> CMS.modify (\\s -> s {translatorCurrentManifold = i}))\n\n-- handle string interpolation\nevaluatePattern :: CppTranslatorState -> TypeF -> Pattern -> [MDoc] -> MDoc\nevaluatePattern _ _ (PatternText s ss) xs = \"interweave_strings\" <> tupled [fragments, insertions]\n  where\n    fragments = encloseSep \"{\" \"}\" \", \" (map (dquotes . pretty . escapeQuotes \"\\\"\" \"\\\\\\\"\" . escapeStringLit) (s : ss))\n    insertions = encloseSep \"{\" \"}\" \", \" xs\n\n-- handle getters\nevaluatePattern _ _ (PatternStruct (ungroup -> [ss])) [m] =\n  writeSelector m ss\nevaluatePattern _ _ (PatternStruct (ungroup -> sss)) [m] =\n  encloseSep \"{\" \"}\" \",\" (map (writeSelector m) sss)\nevaluatePattern state0 t0 (PatternStruct s0) (m0 : xs0) =\n  patternSetter makeTuple makeRecord accessTuple accessRecord m0 t0 s0 xs0\n  where\n    makeTuple (AppF _ ts) xs =\n      let tupleTypes = CMS.evalState (mapM cppTypeOf ts) state0\n       in \"std::tuple\" <> encloseSep \"<\" \">\" \",\" tupleTypes <> tupled xs\n    makeTuple _ _ = error \"Unreachable\"\n\n    makeRecord _ xs = encloseSep \"{\" \"}\" \", \" xs\n\n    accessTuple _ m i = \"std::get<\" <> pretty i <> \">(\" <> m <> \")\"\n    accessRecord _ d k = d <> \".\" <> pretty k\nevaluatePattern _ _ (PatternStruct _) [] = error \"Unreachable illegal pattern\"\n\nwriteSelector :: MDoc -> [Either Int Text] -> MDoc\nwriteSelector d [] = d\nwriteSelector d (Right k : rs) = writeSelector (d <> \".\" <> pretty k) rs\nwriteSelector d (Left i : rs) = writeSelector (\"std::get<\" <> pretty i <> \">\" <> parens d) rs\n\ntypeParams :: [(Maybe TypeF, TypeF)] -> CppTranslator MDoc\ntypeParams ts = CP.printRecordTemplate <$> mapM cppTypeOf [t | (Nothing, t) <- ts]\n\ngenerateAnonymousStructs :: CppTranslator ([MDoc], [MDoc])\ngenerateAnonymousStructs = do\n  recmap <- CMS.gets translatorRecmap\n\n  xs <- mapM makeSerializers (reverse . map snd $ recmap)\n\n  return (concatMap fst xs, concatMap snd xs)\n  where\n    makeSerializers :: RecEntry -> CppTranslator ([MDoc], [MDoc])\n    makeSerializers rec = do\n      let templateTerms = map ((\"T\" <>) . pretty) ([1 ..] :: [Int])\n          rs' = zip templateTerms (recFields rec)\n\n      let params = [t | (t, (_, Nothing)) <- rs']\n          rname = recName rec\n          rtype = rname <> CP.printRecordTemplate [v | (v, (_, Nothing)) <- rs']\n\n      let fieldNames = [k | (_, (k, _)) <- rs']\n\n      fieldTypes <- mapM (\\(t, v) -> maybeM t cppTypeOf v) [(t', v') | (t', (_, v')) <- rs']\n\n      let fields = [(pretty k, v) | (k, v) <- zip fieldNames fieldTypes]\n\n      let structDecl = CP.printStructTypedef params rname fields\n          serializer = CP.printSerializer params rtype fields\n          deserializer = CP.printDeserializer False params rtype fields\n\n      return ([structDecl], [serializer, deserializer])\n\n    -- monadic form of `maybe` function\n    maybeM :: (Monad m) => a -> (b -> m a) -> Maybe b -> m a\n    maybeM _ f (Just x) = f x\n    maybeM x _ Nothing = return x\n\ngenerateSourcedSerializers ::\n  Map.Map Lang Scope ->\n  GMap Int MVar (Map.Map Lang Scope) ->\n  [SerialManifold] -> -- all segments that can be called in this pool\n  CppTranslator\n    ( [MDoc]\n    , [MDoc]\n    )\ngenerateSourcedSerializers univeralScopeMap scopeMap es0 = do\n  perManifold <- Map.unions <$> mapM (foldSerialManifoldM fm) es0\n\n  scope <- case Map.lookup cppLang univeralScopeMap of\n    (Just scope) -> return scope\n    Nothing -> return Map.empty\n\n  -- Supplement per-manifold typedefs with universal scope entries for named\n  -- record types that appear in this pool but are missing from the per-manifold\n  -- scope (happens in secondary C++ pools called via foreign_call).\n  let usedTypes = Set.unions (map collectNamedRecordTVars es0)\n      missingTypes = Set.difference usedTypes (Map.keysSet perManifold)\n      supplemental = Map.filterWithKey (\\k _ -> Set.member k missingTypes) scope\n      typedef = Map.unionWith mergeScopes perManifold supplemental\n\n  foldl groupQuad ([], []) . concat . Map.elems <$> Map.mapWithKeyM (makeSerials scope) typedef\n  where\n    -- given the universal map of scopes, pull out every one that is used in this subtree\n    fm =\n      defaultValue\n        { opSerialManifoldM = \\(SerialManifold_ i _ _ _ e) -> return $ Map.unionWith mergeScopes (metaTypedefs scopeMap i) e\n        , opNativeManifoldM = \\(NativeManifold_ i _ _ e) -> return $ Map.unionWith mergeScopes (metaTypedefs scopeMap i) e\n        }\n\n    -- there are likely to be repeats in the scopes, we only want the unique ones\n    mergeScopes xs ys = unique (xs <> ys)\n\n    groupQuad :: ([a], [a]) -> (a, a) -> ([a], [a])\n    groupQuad (xs, ys) (x, y) = (x : xs, y : ys)\n\n    makeSerials ::\n      Scope -> TVar -> [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)] -> CppTranslator [(MDoc, MDoc)]\n    makeSerials s v xs = catMaybes <$> mapM (makeSerial s v) xs\n\n    makeSerial ::\n      Scope -> TVar -> ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) -> CppTranslator (Maybe (MDoc, MDoc))\n    makeSerial _ _ (_, NamU _ (TV \"struct\") _ _, _, _) = return Nothing\n    makeSerial _ _ (_, NamU _ (TV \"arrow\") _ _, _, _) = return Nothing\n    makeSerial scope _ (ps, NamU r (TV v) _ rs, _, _) = do\n      params <- mapM (either (\\(p, _) -> return $ \"T\" <> pretty p) (\\_ -> return \"XXX_FIXME\")) ps\n      let templateTerms = [\"T\" <> pretty p | Left (p, _) <- ps]\n          rtype = pretty v <> CP.printRecordTemplate templateTerms\n          rs' = map (second (evaluateTypeU scope)) rs\n          fields = [(pretty k, showDefType ps (typeOf t)) | (k, t) <- rs']\n          serializer = CP.printSerializer params rtype fields\n          deserializer = CP.printDeserializer (r == NamObject) params rtype fields\n      return $ Just (serializer, deserializer)\n    makeSerial _ _ _ = return Nothing\n\n    evaluateTypeU :: Scope -> TypeU -> TypeU\n    evaluateTypeU scope t = case TE.evaluateType scope t of\n      (Left e) -> error $ show e\n      (Right t') -> t'\n\n    showDefType :: [Either (TVar, Kind) TypeU] -> Type -> MDoc\n    showDefType ps (UnkT v)\n      | any (\\p -> either (\\(tv, _) -> tv == v) (const False) p) ps = \"T\" <> pretty v\n      | otherwise = pretty v\n    showDefType ps (VarT v)\n      | any (\\p -> either (\\(tv, _) -> tv == v) (const False) p) ps = \"T\" <> pretty v\n      | otherwise = pretty v\n    showDefType _ (FunT _ _) = error \"Cannot serialize functions\"\n    showDefType _ (NamT _ v _ _) = pretty v\n    showDefType _ (NatLitT _) = mempty\n    showDefType ps (AppT (VarT (TV v)) ts) = pretty $ expandMacro v (map (render . showDefType ps) runtimeTs)\n      where runtimeTs = [t | t <- ts, not (isNatLitT t)]\n    showDefType _ (AppT _ _) = error \"AppT is only OK with VarT, for now\"\n    showDefType _ (EffectT _ _) = error \"Cannot show EffectT\"\n    showDefType _ (NatAddT _ _) = mempty\n    showDefType _ (NatMulT _ _) = mempty\n    showDefType _ (NatSubT _ _) = mempty\n    showDefType _ (NatDivT _ _) = mempty\n    showDefType ps (OptionalT t) = \"std::optional<\" <> showDefType ps t <> \">\"\n    isNatLitT (NatLitT _) = True\n    isNatLitT _ = False\n\n-- C++ specific source handling (flags, headers, libraries)\n\nhandleFlagsAndPaths :: [Source] -> MorlocMonad ([Source], [Text], [Path])\nhandleFlagsAndPaths srcs = do\n  state <- MM.get\n  let gccversion = gccVersionFlag . foldl max 0 . map packageCppVersion $ statePackageMeta state\n  let explicitLibs = map (\"-l\" <>) . unique . concatMap packageDependencies $ statePackageMeta state\n  (srcs', libflags, paths) <-\n    fmap unzip3\n      . mapM flagAndPath\n      . unique\n      $ [s | s <- srcs, srcLang s == cppLang]\n\n  home <- MM.asks configHome\n  let mlcInclude = [\"-I\" <> home <> \"/include\"]\n      mlcPch = [\"-include\", \"morloc_pch.hpp\"]\n      mlcLib = [\"-L\" <> home <> \"/lib\", \"-Wl,-rpath,\" <> home <> \"/lib\", \"-lmorloc\", \"-lcppmorloc\", \"-lpthread\"]\n\n  return\n    ( filter (isJust . srcPath) srcs'\n    , [gccversion] <> explicitLibs ++ (map MT.pack . concat) (mlcPch : mlcInclude : mlcLib : libflags)\n    , unique (catMaybes paths)\n    )\n\ngccVersionFlag :: Int -> Text\ngccVersionFlag i\n  | i <= 17 = \"-std=c++17\"\n  | otherwise = \"-std=c++\" <> MT.show' i\n\nflagAndPath :: Source -> MorlocMonad (Source, [String], Maybe Path)\nflagAndPath src@(Source _ srcL (Just p) _ _ _ _ _ _) | srcL == cppLang =\n  case (MS.takeDirectory p, MS.dropExtensions (MS.takeFileName p), MS.takeExtensions p) of\n    (\".\", base, \"\") -> do\n      header <- lookupHeader base\n      libFlags <- lookupLib base\n      return (src {srcPath = Just header}, libFlags, Just (MS.takeDirectory header))\n    (dir, base, _) -> do\n      libFlags <- lookupLib base\n      absDir <- liftIO $ MS.canonicalizePath dir\n      absPath <- liftIO $ MS.canonicalizePath p\n      return (src {srcPath = Just absPath}, libFlags, Just absDir)\n  where\n    lookupHeader :: String -> MorlocMonad Path\n    lookupHeader base = do\n      home <- MM.asks configHome\n      let allPaths = getHeaderPaths home base [\".h\", \".hpp\", \".hxx\"]\n      existingPaths <- liftIO . fmap catMaybes . mapM getFile $ allPaths\n      case existingPaths of\n        (x : _) -> liftIO $ MS.canonicalizePath x\n        [] -> MM.throwSystemError $ \"Header file \" <> pretty base <> \".* not found\"\n\n    lookupLib :: String -> MorlocMonad [String]\n    lookupLib base = do\n      home <- MM.asks configHome\n      let libnamebase = filter DC.isAlphaNum (map DC.toLower base)\n      let libname = \"lib\" <> libnamebase <> \".so\"\n      let allPaths = getLibraryPaths home base libname\n      existingPaths <- liftIO . fmap catMaybes . mapM getFile $ allPaths\n      case existingPaths of\n        (libpath : _) -> do\n          libdir <- liftIO . MS.canonicalizePath . MS.takeDirectory $ libpath\n          return\n            [ \"-Wl,-rpath=\" <> libdir\n            , \"-L\" <> libdir\n            , \"-l\" <> libnamebase\n            ]\n        [] -> return []\nflagAndPath src@(Source _ srcL Nothing _ _ _ _ _ _) | srcL == cppLang = return (src, [], Nothing)\nflagAndPath _ = MM.throwSystemError $ \"flagAndPath should only be called for C++ functions\"\n\ngetFile :: Path -> IO (Maybe Path)\ngetFile x = do\n  exists <- MS.doesFileExist x\n  return $\n    if exists\n      then Just x\n      else Nothing\n\ngetHeaderPaths :: Path -> String -> [String] -> [Path]\ngetHeaderPaths lib base exts = [path <> ext | path <- paths, ext <- exts]\n  where\n    paths =\n      map\n        MS.joinPath\n        [ [base]\n        , [\"include\", base]\n        , [base, base]\n        , [lib, \"include\", base]\n        , [lib, \"src\", base, base]\n        , [\"/usr/include\", base]\n        , [\"/usr/local/include\", base]\n        ]\n\ngetLibraryPaths :: Path -> String -> String -> [Path]\ngetLibraryPaths lib base sofile =\n  map\n    MS.joinPath\n    [ [sofile]\n    , [\"lib\", sofile]\n    , [base, sofile]\n    , [lib, \"lib\", sofile]\n    , [lib, \"src\", base, sofile]\n    , [lib, \"src\", base, \"lib\", sofile]\n    ]\n"
  },
  {
    "path": "executable/Main.hs",
    "content": "{- |\nModule      : Main\nDescription : Executable main module\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Main where\n\nimport GHC.IO.Encoding (setLocaleEncoding, utf8)\nimport Options.Applicative\nimport Subcommands (runMorloc)\nimport UI\n\nmain :: IO ()\nmain = do\n  setLocaleEncoding utf8\n  runMorloc =<< execParser opts\n"
  },
  {
    "path": "executable/Subcommands.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Subcommands\nDescription : Dispatch CLI subcommands and inject the translator callback\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nImplements each CLI subcommand (make, typecheck, install, init, dump) and\ndefines the 'TranslateFn' callback that routes C++ to 'CppTranslator' and\nother languages to the generic translator. This is the dependency injection\npoint that keeps translator code out of the library.\n-}\nmodule Subcommands (runMorloc) where\n\nimport Control.Exception (SomeException, bracket, finally, try)\nimport Data.Time.Clock (getCurrentTime)\nimport Data.Time.Format (formatTime, defaultTimeLocale)\nimport qualified CppTranslator\nimport qualified Data.Aeson as JSON\nimport qualified Data.ByteString.Lazy as BL\nimport qualified Data.Map as Map\nimport qualified Data.Set as Set\nimport qualified Data.Text as T\nimport qualified Data.Text.IO as TIO\nimport Morloc (generatePools)\nimport qualified Morloc as M\nimport Morloc.CodeGenerator.Emit (TranslateFn)\nimport qualified Morloc.CodeGenerator.Grammars.Translator.Generic as Generic\nimport Morloc.CodeGenerator.Grammars.Translator.PseudoCode (pseudocodeSerialManifold)\nimport Morloc.CodeGenerator.Namespace (SerialManifold (..))\nimport qualified Morloc.CodeGenerator.SystemConfig as MSC\nimport qualified Morloc.Completion as Completion\nimport qualified Morloc.Config as Config\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Frontend.API as F\nimport Morloc.Module (OverwriteProtocol (..), findMainLocFile)\nimport qualified Morloc.Module as Mod\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport Morloc.Namespace.Type\nimport qualified Morloc.ProgramBuilder.Install as Install\nimport Morloc.Typecheck.Internal (prettyTypeU)\nimport System.Directory\n  ( createDirectoryIfMissing\n  , doesDirectoryExist\n  , doesFileExist\n  , getCurrentDirectory\n  , listDirectory\n  , removeDirectoryRecursive\n  , removeFile\n  , setCurrentDirectory\n  )\nimport System.Exit (exitFailure, exitSuccess)\nimport System.FilePath (dropExtension, takeDirectory, takeFileName)\nimport System.IO (hPutStrLn, stderr)\nimport System.IO.Temp (createTempDirectory)\nimport qualified System.Process as SP\nimport UI\n\ndecodePackageMeta :: BL.ByteString -> Maybe PackageMeta\ndecodePackageMeta = JSON.decode\n\n-- | Route each language to its translator.\ntranslator :: TranslateFn\ntranslator lang srcs es\n  | lang == CppTranslator.cppLang = CppTranslator.translate srcs es\n  | otherwise = Generic.translate lang srcs es\n\nrunMorloc :: CliCommand -> IO ()\nrunMorloc args = do\n  config <- getConfig args\n  buildConfig <- Config.loadBuildConfig config\n  let verbose = getVerbosity args\n  runPassed <- case args of\n    (CmdMake g) -> cmdMake g verbose config buildConfig\n    (CmdInstall g) -> cmdInstall g verbose config buildConfig\n    (CmdTypecheck g) -> cmdTypecheck g verbose config buildConfig\n    (CmdDump g) -> cmdDump g verbose config buildConfig\n    (CmdInit g) -> cmdInit g config\n    (CmdList g) -> cmdList g config\n    (CmdUninstall g) -> cmdUninstall g config\n    (CmdNew g) -> cmdNew g\n    (CmdEval g) -> cmdEval g verbose config buildConfig\n  case runPassed of\n    True -> exitSuccess\n    False -> exitFailure\n\n-- | read the global morloc config file or return a default one\ngetConfig :: CliCommand -> IO Config.Config\ngetConfig (CmdMake g) = getConfig' (makeConfig g) (makeVanilla g)\ngetConfig (CmdInstall g) = getConfig' (installConfig g) (installVanilla g)\ngetConfig (CmdTypecheck g) = getConfig' (typecheckConfig g) (typecheckVanilla g)\ngetConfig (CmdDump g) = getConfig' (dumpConfig g) (dumpVanilla g)\ngetConfig (CmdInit g) = getConfig' (initConfig g) (initVanilla g)\ngetConfig (CmdList g) = getConfig' (listConfig g) (listVanilla g)\ngetConfig (CmdUninstall g) = getConfig' (uninstallConfig g) (uninstallVanilla g)\ngetConfig (CmdEval g) = getConfig' (evalConfig g) (evalVanilla g)\ngetConfig (CmdNew _) = getConfig' \"\" False\n\ngetConfig' :: String -> Bool -> IO Config.Config\ngetConfig' _ True = Config.loadMorlocConfig Nothing\ngetConfig' \"\" _ = Config.loadMorlocConfig Nothing\ngetConfig' filename _ = Config.loadMorlocConfig (Just filename)\n\ngetVerbosity :: CliCommand -> Int\ngetVerbosity (CmdMake g) = makeVerbose g\ngetVerbosity (CmdInstall g) = installVerbose g\ngetVerbosity (CmdTypecheck g) = typecheckVerbose g\ngetVerbosity (CmdDump g) = dumpVerbose g\ngetVerbosity (CmdInit g) = if initQuiet g then 0 else 1\ngetVerbosity (CmdList g) = listVerbose g\ngetVerbosity (CmdEval g) = evalVerbose g\ngetVerbosity (CmdUninstall _) = 0\ngetVerbosity (CmdNew _) = 0\n\nreadScript :: Bool -> String -> IO (Maybe Path, Code)\nreadScript True code = return (Nothing, Code (MT.pack code))\nreadScript _ filename = do\n  code <- MT.readFile filename\n  return (Just filename, Code code)\n\n-- | Typecheck callback for module installation\ntypecheckModuleFn :: FilePath -> MorlocMonad [(T.Text, T.Text)]\ntypecheckModuleFn mainFile = do\n  code <- liftIO $ MT.readFile mainFile\n  -- Save current state, run typecheck in a clean sub-state\n  savedState <- MM.get\n  result <-\n    MM.catchError\n      ( do\n          xs <- M.typecheckFrontend (Just mainFile) (Code code)\n          st <- MM.get\n          return\n            [ (render (pretty v), render (pretty t))\n            | AnnoS (Idx i t) _ _ <- xs\n            , Just v <- [Map.lookup i (stateName st)]\n            ]\n      )\n      (\\_ -> return [])\n  -- Restore state so module typechecking doesn't pollute the parent state\n  MM.put savedState\n  return result\n\n-- | Install a module\ncmdInstall :: InstallCommand -> Int -> Config.Config -> BuildConfig -> IO Bool\ncmdInstall args verbosity conf buildConfig = do\n  userSources <- Map.fromList <$> mapM (\\modstr -> do\n    name <- Mod.extractModuleName modstr\n    return (name, modstr)) moduleTexts\n  let cmdInstall' =\n        mapM\n          ( \\modstr ->\n              Mod.installModule\n                (installForce args)\n                (installUseSSH args)\n                libpath\n                (Config.configPlaneCore conf)\n                mayTypecheck\n                userSources\n                Set.empty\n                Mod.ExplicitInstall\n                modstr\n          )\n          moduleTexts\n  passed <- MM.runMorlocMonad Nothing verbosity conf buildConfig cmdInstall' >>= MM.writeMorlocReturn\n  if passed && installBuild args\n    then buildInstalledModules args verbosity conf buildConfig moduleTexts libpath\n    else return passed\n  where\n    libpath = Config.configLibrary conf </> Config.configPlane conf\n    moduleTexts = map MT.pack (installModuleStrings args)\n\n    mayTypecheck =\n      if installNoTypecheck args\n        then Nothing\n        else Just typecheckModuleFn\n\n-- | Build and install executables for installed modules\nbuildInstalledModules ::\n  InstallCommand -> Int -> Config.Config -> BuildConfig -> [T.Text] -> FilePath -> IO Bool\nbuildInstalledModules args verbosity conf buildConfig moduleTexts libpath = do\n  results <- mapM buildOne moduleTexts\n  return (and results)\n  where\n    force = installForce args == ForceOverwrite\n\n    buildOne modstr = do\n      name <- T.unpack <$> Mod.extractModuleName modstr\n      let moduleDir = libpath </> name\n      mainFile <- findMainLocFile moduleDir name\n      case mainFile of\n        Nothing -> do\n          putStrLn $ \"Warning: no main.loc found for '\" <> name <> \"', skipping build\"\n          return True\n        Just locFile -> do\n          origDir <- getCurrentDirectory\n          setCurrentDirectory moduleDir\n          buildResult <-\n            buildModuleExecutable locFile name verbosity conf buildConfig force\n              `finally` setCurrentDirectory origDir\n          return buildResult\n\n    buildModuleExecutable locFile _name verbosity' config buildConfig' forceOverwrite = do\n      code <- MT.readFile locFile\n      makeAndInstall (Just locFile) Nothing (Code code) [] verbosity' config buildConfig' forceOverwrite\n\n-- | Compile a morloc program and optionally install it.\n-- Shared by `morloc make --install` and `morloc install --build`.\nmakeAndInstall ::\n  Maybe Path -> Maybe String -> Code -> [T.Text] -> Int ->\n  Config.Config -> BuildConfig -> Bool -> IO Bool\nmakeAndInstall path outfile code extraIncludes verbosity config buildConfig force = do\n  let action = do\n        MM.modify (\\s -> s {stateInstall = True, stateInstallForce = force})\n        M.writeProgram translator path code\n  result <- MM.runMorlocMonad outfile verbosity config buildConfig action\n  passed <- MM.writeMorlocReturn result\n  if passed\n    then do\n      let (_, finalState) = result\n          -- Merge include fields from all loaded packages.\n          -- Nothing = include everything (default mode).\n          -- Just [...] = strict allowlist mode.\n          pkgIncludes = map packageInclude (statePackageMeta finalState)\n          mergedIncludes\n            | not (null extraIncludes) =\n                -- CLI --include flags force strict mode\n                Just (concatMap (fromMaybe []) pkgIncludes ++ extraIncludes)\n            | all (== Nothing) pkgIncludes = Nothing\n            | otherwise = Just (concatMap (fromMaybe []) pkgIncludes)\n          allSources = concat (GMap.elems (stateSources finalState))\n          directSourcePaths = [ p | Source{srcPath = Just p} <- allSources ]\n      case stateInstallDir finalState of\n        Nothing -> do\n          putStrLn \"Error: install directory was not set during compilation\"\n          return False\n        Just installDir -> do\n          let installName = takeFileName installDir\n              packageRoot = case fmap takeDirectory path of\n                Just \"\"  -> \".\"\n                Just d   -> d\n                Nothing  -> \".\"\n          -- Atomic install: clean up installDir on any failure so the user\n          -- is not left with partial state requiring --force on retry.\n          installResult <- try (do\n            -- Only validate coverage in strict mode (explicit include patterns)\n            case mergedIncludes of\n              Just pats -> do\n                Install.validateIncludeCoverage packageRoot pats directSourcePaths\n              Nothing -> return ()\n            Install.installProgram (Config.configHome config) installDir installName mergedIncludes force\n            ) :: IO (Either SomeException ())\n          case installResult of\n            Right () -> return True\n            Left e -> do\n              dirExists <- doesDirectoryExist installDir\n              if dirExists\n                then do\n                  removeDirectoryRecursive installDir\n                  hPutStrLn stderr $ \"Cleaned up partial install: \" <> installDir\n                else return ()\n              hPutStrLn stderr $ show e\n              return False\n    else return False\n\n-- | build a Morloc program, generating the nexus and pool files\ncmdMake :: MakeCommand -> Int -> Config.Config -> BuildConfig -> IO Bool\ncmdMake args verbosity config buildConfig = do\n  (path, code) <- readScript (makeExpression args) (makeScript args)\n  outfile <- case makeOutfile args of\n    \"\" -> return Nothing\n    x -> return . Just $ x\n  if makeInstall args\n    then\n      makeAndInstall path outfile code\n        (map T.pack (makeInclude args)) verbosity config buildConfig (makeForce args)\n    else do\n      let action = do\n            MM.modify (\\s -> s {stateInstall = False})\n            M.writeProgram translator path code\n      result <- MM.runMorlocMonad outfile verbosity config buildConfig action\n      passed <- MM.writeMorlocReturn result\n      return passed\n\n-- | Evaluate a morloc expression\ncmdEval :: EvalCommand -> Int -> Config.Config -> BuildConfig -> IO Bool\ncmdEval args verbosity config buildConfig = do\n  let rawExpr = evalExpression args\n      code = MT.pack (preprocessEvalInput rawExpr)\n      tmpBase = Config.configTmpDir config\n      saveName = evalSave args\n      extraArgs = evalArgs args\n      isSave = not (null saveName)\n      exeName = if isSave then saveName else \"eval\"\n  createDirectoryIfMissing True tmpBase\n  bracket\n    (do\n      origDir <- getCurrentDirectory\n      tmpDir <- createTempDirectory tmpBase \"morloc-eval-\"\n      setCurrentDirectory tmpDir\n      return (origDir, tmpDir))\n    (\\(origDir, tmpDir) -> do\n      setCurrentDirectory origDir\n      cleanupTmpDir tmpDir)\n    (\\(_origDir, tmpDir) -> do\n      let action = do\n            MM.modify (\\s -> s {stateEvalMode = True})\n            if isSave then MM.modify (\\s -> s {stateInstall = True}) else return ()\n            M.writeProgram translator Nothing (Code code)\n      result <- MM.runMorlocMonad (Just exeName) verbosity config buildConfig action\n      passed <- MM.writeMorlocReturn result\n      if not passed\n        then return False\n        else\n          if isSave\n            then do\n              let (_, finalState) = result\n                  pkgIncludes = map packageInclude (statePackageMeta finalState)\n                  mergedIncludes\n                    | all (== Nothing) pkgIncludes = Nothing\n                    | otherwise = Just (concatMap (fromMaybe []) pkgIncludes)\n              case stateInstallDir finalState of\n                Nothing -> do\n                  putStrLn \"Error: install directory was not set during compilation\"\n                  return False\n                Just installDir -> do\n                  evalInstallResult <- try (do\n                    Install.installProgram (Config.configHome config) installDir saveName mergedIncludes True\n                    writeEvalMeta (Config.configHome config) saveName rawExpr\n                    ) :: IO (Either SomeException ())\n                  case evalInstallResult of\n                    Right () -> return True\n                    Left e -> do\n                      dirExists <- doesDirectoryExist installDir\n                      if dirExists\n                        then do\n                          removeDirectoryRecursive installDir\n                          hPutStrLn stderr $ \"Cleaned up partial install: \" <> installDir\n                        else return ()\n                      hPutStrLn stderr $ show e\n                      return False\n            else do\n              let exe = tmpDir </> exeName\n              subcommand <- getFirstSubcommand exe\n              let cmdArgs = subcommand : extraArgs\n              runResult <- try (SP.callProcess exe cmdArgs) :: IO (Either SomeException ())\n              case runResult of\n                Right () -> return True\n                Left e -> do\n                  putStrLn $ \"Error running expression: \" ++ show e\n                  return False)\n  where\n    cleanupTmpDir dir = do\n      exists <- doesDirectoryExist dir\n      if exists then removeDirectoryRecursive dir else return ()\n\n-- | Extract the first subcommand name from the manifest embedded in a wrapper script.\n-- Falls back to \"__expr__\" if the manifest cannot be parsed.\ngetFirstSubcommand :: FilePath -> IO String\ngetFirstSubcommand wrapperPath = do\n  result <- try (readFile wrapperPath) :: IO (Either SomeException String)\n  case result of\n    Left _ -> return \"__expr__\"\n    Right contents -> do\n      let marker = \"### MANIFEST ###\"\n          afterMarker = drop 1 $ dropWhile (/= marker) (lines contents)\n          manifestStr = unlines afterMarker\n      case JSON.eitherDecode (BL.fromStrict (MT.encodeUtf8 (MT.pack manifestStr))) of\n        Right pm -> case pmCommands pm of\n          (cmd : _) -> return (T.unpack (pcName cmd))\n          [] -> return \"__expr__\"\n        Left _ -> return \"__expr__\"\n\n-- | Write metadata about the saved eval expression\nwriteEvalMeta :: FilePath -> String -> String -> IO ()\nwriteEvalMeta cfgHome name expr = do\n  now <- getCurrentTime\n  let fdbDir = cfgHome </> \"fdb\"\n      metaPath = fdbDir </> name ++ \".eval-meta\"\n      timestamp = formatTime defaultTimeLocale \"%Y-%m-%dT%H:%M:%SZ\" now\n      json = \"{\\\"expression\\\":\" ++ jsonEscape expr ++ \",\\\"timestamp\\\":\\\"\" ++ timestamp ++ \"\\\"}\"\n  createDirectoryIfMissing True fdbDir\n  writeFile metaPath json\n  where\n    jsonEscape s = \"\\\"\" ++ concatMap escChar s ++ \"\\\"\"\n    escChar '\"' = \"\\\\\\\"\"\n    escChar '\\\\' = \"\\\\\\\\\"\n    escChar '\\n' = \"\\\\n\"\n    escChar '\\t' = \"\\\\t\"\n    escChar c = [c]\n\n-- | Preprocess eval input: replace top-level semicolons with newlines.\n-- Semicolons inside explicit brace blocks (depth > 0) are preserved.\n-- Leading whitespace after each replacement is stripped so the layout\n-- rule treats each statement as a new top-level declaration.\npreprocessEvalInput :: String -> String\npreprocessEvalInput = go (0 :: Int)\n  where\n    go _ [] = []\n    go depth ('{' : rest) = '{' : go (depth + 1) rest\n    go depth ('}' : rest) = '}' : go (max 0 (depth - 1)) rest\n    go 0 (';' : rest) = '\\n' : go 0 (dropWhile (== ' ') rest)\n    go depth ('\"' : rest) = '\"' : goString depth rest\n    go depth (c : rest) = c : go depth rest\n\n    goString depth [] = go depth []\n    goString depth ('\"' : rest) = '\"' : go depth rest\n    goString depth ('\\\\' : c : rest) = '\\\\' : c : goString depth rest\n    goString depth (c : rest) = c : goString depth rest\n\ncmdTypecheck :: TypecheckCommand -> Int -> Config.Config -> BuildConfig -> IO Bool\ncmdTypecheck args _ config buildConfig = do\n  (path, code) <- readScript (typecheckExpression args) (typecheckScript args)\n  let verbosity = typecheckVerbose args\n  if typecheckType args\n    then case F.readType (unCode code) of\n      (Left err') -> do\n        putStrLn err'\n        return False\n      (Right x) -> do\n        print x\n        return True\n    else\n      if typecheckRealize args\n        then do\n          (passed, result) <-\n            MM.runMorlocMonad\n              Nothing\n              verbosity\n              config\n              buildConfig\n              ( M.typecheck path code\n                  >>= (generatePools . snd)\n              )\n              |>> writeTypecheckOutput verbosity\n          putDoc (result <> \"\\n\")\n          return passed\n        else do\n          (passed, result) <-\n            MM.runMorlocMonad\n              Nothing\n              verbosity\n              config\n              buildConfig\n              (M.typecheckFrontend path code)\n              |>> writeFrontendTypecheckOutput verbosity\n          putDoc (result <> \"\\n\")\n          return passed\n\nwriteFrontendTypecheckOutput ::\n  Int ->\n  ((Either MorlocError [AnnoS (Indexed TypeU) Many Int], [MT.Text]), MorlocState) ->\n  (Bool, MDoc)\nwriteFrontendTypecheckOutput _ ((Left e, _), st) = (False, MM.makeMorlocError st e)\nwriteFrontendTypecheckOutput 0 ((Right xs, _), st) = (True, vsep (map (writeFrontendTypes st) xs))\nwriteFrontendTypecheckOutput 1 x = writeFrontendTypecheckOutput 0 x -- no difference in verbosity\nwriteFrontendTypecheckOutput _ _ = (False, \"I don't know how to be that verbose\")\n\nwriteFrontendTypes :: MorlocState -> AnnoS (Indexed TypeU) Many Int -> MDoc\nwriteFrontendTypes st (AnnoS (Idx i t) _ _) =\n  case Map.lookup i (stateName st) of\n    (Just v) -> pretty v <+> \"::\" <+> prettyTypeU t\n    Nothing -> \"? ::\" <+> prettyTypeU t\n\nwriteTypecheckOutput ::\n  Int -> ((Either MorlocError [(Lang, [SerialManifold])], [MT.Text]), MorlocState) -> (Bool, MDoc)\nwriteTypecheckOutput _ ((Left e, _), st) = (False, MM.makeMorlocError st e)\nwriteTypecheckOutput _ ((Right pools, _), _) = (True, vsep $ map (uncurry writePool) pools)\n\nwritePool :: Lang -> [SerialManifold] -> MDoc\nwritePool lang manifolds = pretty lang <+> \"pool:\" <> \"\\n\" <> vsep (map pseudocodeSerialManifold manifolds) <> \"\\n\"\n\ncmdDump :: DumpCommand -> Int -> Config.Config -> BuildConfig -> IO Bool\ncmdDump args _ config buildConfig = do\n  (path, code) <- readScript (dumpExpression args) (dumpScript args)\n  let verbosity = dumpVerbose args\n  ((x, _), st) <- MM.runMorlocMonad Nothing verbosity config buildConfig (F.parse path code)\n  case x of\n    (Left e) -> do\n      putDoc $ MM.makeMorlocError st e\n      return False\n    (Right e) -> do\n      putDoc $ prettyDAG e\n      return True\n\ncmdInit :: InitCommand -> Config.Config -> IO Bool\ncmdInit ic config = MSC.configureAll (not (initQuiet ic)) (initForce ic) (initSlurmSupport ic) (initSanitize ic) config\n\ncmdNew :: NewCommand -> IO Bool\ncmdNew args = do\n  let pkgFile = \"package.yaml\"\n  exists <- doesFileExist pkgFile\n  if exists\n    then do\n      hPutStrLn stderr \"Error: package.yaml already exists. Remove it first or use a different directory.\"\n      return False\n    else do\n      name <-\n        if null (newName args)\n          then takeFileName <$> getCurrentDirectory\n          else return (newName args)\n      writeFile pkgFile $\n        unlines\n          [ \"name: \" ++ name\n          , \"version: 0.1.0\"\n          , \"homepage: null\"\n          , \"synopsis: null\"\n          , \"description: null\"\n          , \"category: null\"\n          , \"license: MIT\"\n          , \"author: null\"\n          , \"maintainer: null\"\n          , \"github: null\"\n          , \"bug-reports: null\"\n          , \"dependencies: []\"\n          , \"# Uncomment to restrict which files are copied during install.\"\n          , \"# By default, all files are included (filtered by .morlocignore).\"\n          , \"# include:\"\n          , \"#   - \\\"*.py\\\"\"\n          , \"#   - \\\"src/\\\"\"\n          ]\n      hPutStrLn stderr $ \"Created package.yaml for '\" ++ name ++ \"'\"\n      return True\n\nprettyDAG :: DAG MVar e ExprI -> MDoc\nprettyDAG m0 = vsep (map prettyEntry (Map.toList m0))\n  where\n    prettyEntry :: (MVar, (ExprI, [(MVar, e)])) -> MDoc\n    prettyEntry (k, (n, _)) = block 4 (pretty k) (vsep [pretty n])\n\n-- ======================================================================\n-- List command\n-- ======================================================================\n\n-- Lightweight JSON types for reading manifests\n\ndata ModuleManifest = ModuleManifest\n  { mmName :: T.Text\n  , mmVersion :: T.Text\n  , mmSynopsis :: T.Text\n  , mmExports :: [(T.Text, T.Text)]\n  , mmMorlocDeps :: [T.Text]\n  , mmReason :: T.Text\n  }\n\ndata ProgramManifest = ProgramManifest\n  { pmName :: T.Text\n  , pmCommands :: [ProgramCommand]\n  }\n\ndata ProgramCommand = ProgramCommand\n  { pcName :: T.Text\n  , pcReturnType :: T.Text\n  , _pcArgSchemas :: [T.Text]\n  }\n\ninstance JSON.FromJSON ModuleManifest where\n  parseJSON = JSON.withObject \"ModuleManifest\" $ \\o ->\n    ModuleManifest\n      <$> o JSON..:? \"name\" JSON..!= \"\"\n      <*> o JSON..:? \"version\" JSON..!= \"\"\n      <*> o JSON..:? \"synopsis\" JSON..!= \"\"\n      <*> (o JSON..:? \"exports\" JSON..!= [] >>= mapM parseExport)\n      <*> o JSON..:? \"morloc_dependencies\" JSON..!= []\n      <*> o JSON..:? \"install_reason\" JSON..!= \"\"\n    where\n      parseExport = JSON.withObject \"Export\" $ \\o ->\n        (,) <$> o JSON..: \"name\" <*> o JSON..: \"type\"\n\ninstance JSON.FromJSON ProgramManifest where\n  parseJSON = JSON.withObject \"ProgramManifest\" $ \\o ->\n    ProgramManifest\n      <$> o JSON..:? \"name\" JSON..!= \"\"\n      <*> o JSON..:? \"commands\" JSON..!= []\n\ninstance JSON.FromJSON ProgramCommand where\n  parseJSON = JSON.withObject \"ProgramCommand\" $ \\o ->\n    ProgramCommand\n      <$> o JSON..: \"name\"\n      <*> o JSON..:? \"return_type\" JSON..!= \"\"\n      <*> o JSON..:? \"arg_schemas\" JSON..!= []\n\n-- | Check if pattern is a subsequence of the target string (case-insensitive)\nsubsequenceMatch :: String -> String -> Bool\nsubsequenceMatch [] _ = True\nsubsequenceMatch _ [] = False\nsubsequenceMatch (p : ps) (t : ts)\n  | toLower p == toLower t = subsequenceMatch ps ts\n  | otherwise = subsequenceMatch (p : ps) ts\n\ncmdList :: ListCommand -> Config.Config -> IO Bool\ncmdList args config = do\n  let fdbDir = Config.configHome config </> \"fdb\"\n      libDir = Config.configLibrary config </> Config.configPlane config\n      verbose = listVerbose args\n      kind = listKind args\n      pat = listPattern args\n\n  -- Load module manifests\n  allModules <-\n    if kind /= Just ListPrograms\n      then do\n        mods <- loadModuleManifests fdbDir\n        discovered <- discoverModules libDir fdbDir\n        return (mods ++ discovered)\n      else return []\n\n  -- Load program manifests\n  allPrograms <-\n    if kind /= Just ListModules\n      then loadProgramManifests fdbDir\n      else return []\n\n  -- Filter by pattern\n  let modules = case pat of\n        Nothing -> allModules\n        Just p -> filter (\\m -> subsequenceMatch p (T.unpack (mmName m))) allModules\n      programs = case pat of\n        Nothing -> allPrograms\n        Just p -> filter (\\m -> subsequenceMatch p (T.unpack (pmName m))) allPrograms\n\n  -- For verbose mode, fill in exports from .loc files when manifest has none\n  modules' <-\n    if verbose > 0\n      then mapM (fillModuleExports libDir) modules\n      else return modules\n\n  -- Print results\n  if null modules' && null programs\n    then putStrLn \"No installed modules or programs found.\"\n    else do\n      if not (null modules')\n        then do\n          putStrLn \"Modules:\"\n          mapM_ (printModule verbose) modules'\n        else return ()\n      if not (null programs)\n        then do\n          if not (null modules') then putStrLn \"\" else return ()\n          putStrLn \"Programs:\"\n          mapM_ (printProgram verbose) programs\n        else return ()\n\n  return True\n\n-- | If a module has no exports in its manifest, scan its .loc file for type signatures\nfillModuleExports :: FilePath -> ModuleManifest -> IO ModuleManifest\nfillModuleExports libDir m\n  | not (null (mmExports m)) = return m\n  | otherwise = do\n      let modDir = libDir </> T.unpack (mmName m)\n          modName = T.unpack (mmName m)\n      mainFile <- findMainLocFile modDir modName\n      case mainFile of\n        Nothing -> return m\n        Just f -> do\n          sigs <- extractTypeSignatures f\n          return m {mmExports = sigs}\n\n-- | Extract top-level type signatures from a .loc file\nextractTypeSignatures :: FilePath -> IO [(T.Text, T.Text)]\nextractTypeSignatures path = do\n  result <- try (TIO.readFile path) :: IO (Either SomeException T.Text)\n  case result of\n    Left _ -> return []\n    Right content ->\n      return\n        . map parseSig\n        . filter isTypeSig\n        . T.lines\n        $ content\n  where\n    isTypeSig ln =\n      let stripped = T.stripStart ln\n       in not (T.null stripped)\n            && T.head stripped /= '-' -- not a comment\n            && T.head stripped /= '{' -- not a block comment\n            && T.isInfixOf \" :: \" stripped\n            && not (T.isPrefixOf \"type \" stripped)\n            && not (T.isPrefixOf \"source \" stripped)\n            && not (T.isPrefixOf \"import \" stripped)\n            && not (T.isPrefixOf \"module \" stripped)\n            && not (T.isPrefixOf \"class \" stripped)\n            && not (T.isPrefixOf \"instance \" stripped)\n\n    parseSig ln =\n      let (sigName, rest) = T.breakOn \" :: \" (T.stripStart ln)\n          typ = T.strip (T.drop 4 rest) -- drop \" :: \"\n       in (T.strip sigName, typ)\n\nloadModuleManifests :: FilePath -> IO [ModuleManifest]\nloadModuleManifests fdbDir = do\n  result <- try (listDirectory fdbDir) :: IO (Either SomeException [FilePath])\n  case result of\n    Left _ -> return []\n    Right entries -> do\n      let moduleFiles = filter (\".module\" `isSuffixOf`) entries\n      catMaybes\n        <$> mapM\n          ( \\f -> do\n              r <- try (BL.readFile (fdbDir </> f)) :: IO (Either SomeException BL.ByteString)\n              case r of\n                Left _ -> return Nothing\n                Right bs -> case JSON.eitherDecode bs of\n                  Right m -> return (Just m)\n                  Left _ -> return Nothing\n          )\n          moduleFiles\n\nloadProgramManifests :: FilePath -> IO [ProgramManifest]\nloadProgramManifests fdbDir = do\n  result <- try (listDirectory fdbDir) :: IO (Either SomeException [FilePath])\n  case result of\n    Left _ -> return []\n    Right entries -> do\n      let manifestFiles = filter (\".manifest\" `isSuffixOf`) entries\n      catMaybes\n        <$> mapM\n          ( \\f -> do\n              r <- try (BL.readFile (fdbDir </> f)) :: IO (Either SomeException BL.ByteString)\n              case r of\n                Left _ -> return Nothing\n                Right bs -> case JSON.eitherDecode bs of\n                  Right m ->\n                    let m' =\n                          if T.null (pmName m)\n                            then m {pmName = T.pack (dropExtension (takeFileName f))}\n                            else m\n                     in return (Just m')\n                  Left _ -> return Nothing\n          )\n          manifestFiles\n\n-- | Discover modules in the library that lack manifests\ndiscoverModules :: FilePath -> FilePath -> IO [ModuleManifest]\ndiscoverModules libDir fdbDir = do\n  libExists <- doesDirectoryExist libDir\n  if not libExists\n    then return []\n    else do\n      entries <- listDirectory libDir\n      catMaybes\n        <$> mapM\n          ( \\name -> do\n              let manifestPath = fdbDir </> name ++ \".module\"\n                  moduleDir = libDir </> name\n              hasManifest <- doesFileExist manifestPath\n              isDir <- doesDirectoryExist moduleDir\n              if hasManifest || not isDir\n                then return Nothing\n                else do\n                  -- Try to read package.yaml for basic info\n                  let pkgYaml = moduleDir </> \"package.yaml\"\n                  pkgExists <- doesFileExist pkgYaml\n                  if pkgExists\n                    then do\n                      r <- try (BL.readFile pkgYaml) :: IO (Either SomeException BL.ByteString)\n                      case r of\n                        Left _ -> return (Just (minimalManifest name))\n                        Right bs -> case decodePackageMeta bs of\n                          Just meta ->\n                            return . Just $\n                              ModuleManifest\n                                { mmName = if T.null (packageName meta) then T.pack name else packageName meta\n                                , mmVersion = packageVersion meta\n                                , mmSynopsis = packageSynopsis meta\n                                , mmExports = []\n                                , mmMorlocDeps = []\n                                , mmReason = \"\"\n                                }\n                          Nothing -> return (Just (minimalManifest name))\n                    else return (Just (minimalManifest name))\n          )\n          entries\n  where\n    minimalManifest name =\n      ModuleManifest\n        { mmName = T.pack name\n        , mmVersion = \"\"\n        , mmSynopsis = \"\"\n        , mmExports = []\n        , mmMorlocDeps = []\n        , mmReason = \"\"\n        }\n\nprintModule :: Int -> ModuleManifest -> IO ()\nprintModule verbose m = do\n  let name = mmName m\n      ver = if T.null (mmVersion m) then \"\" else \" \" <> T.unpack (mmVersion m)\n      syn = if T.null (mmSynopsis m) then \"\" else \"  \" <> T.unpack (mmSynopsis m)\n  putStrLn $ \"  \" <> T.unpack name <> ver <> syn\n  if verbose > 0\n    then mapM_ (\\(n, t) -> putStrLn $ \"    \" <> T.unpack n <> \" :: \" <> T.unpack t) (mmExports m)\n    else return ()\n\nprintProgram :: Int -> ProgramManifest -> IO ()\nprintProgram verbose p = do\n  let name = pmName p\n      cmds = pmCommands p\n      cmdCount = length cmds\n      summary = show cmdCount <> \" command\" <> (if cmdCount /= 1 then \"s\" else \"\")\n  putStrLn $ \"  \" <> T.unpack name <> \"  \" <> summary\n  if verbose > 0\n    then\n      mapM_ (\\c -> putStrLn $ \"    \" <> T.unpack (pcName c) <> \" :: \" <> T.unpack (pcReturnType c)) cmds\n    else return ()\n\n-- ======================================================================\n-- Uninstall command\n-- ======================================================================\n\ncmdUninstall :: UninstallCommand -> Config.Config -> IO Bool\ncmdUninstall args config = do\n  let fdbDir = Config.configHome config </> \"fdb\"\n      libDir = Config.configLibrary config </> Config.configPlane config\n      binDir = Config.configHome config </> \"bin\"\n      exeDir = Config.configHome config </> \"exe\"\n      dryRun = uninstallDryRun args\n      kind = uninstallKind args\n\n  names <- if uninstallAll args\n    then do\n      fdbExists <- doesDirectoryExist fdbDir\n      if not fdbExists\n        then return []\n        else do\n          entries <- listDirectory fdbDir\n          let moduleNames = [dropExtension f | f <- entries, \".module\" `isSuffixOf` f]\n          return moduleNames\n    else return (uninstallNames args)\n\n  if null names\n    then do\n      if uninstallAll args\n        then putStrLn \"No modules installed\"\n        else putStrLn \"No module names specified. Use --all to uninstall all modules.\"\n      return True\n    else do\n      let skipDepCheck = uninstallAll args\n      allPassed <- mapM (\\name -> uninstallOne fdbDir libDir binDir exeDir dryRun skipDepCheck kind name) names\n      let anyRemoved = or allPassed\n\n      -- Regenerate completions if anything was actually removed\n      if anyRemoved && not dryRun\n        then Completion.regenerateCompletions False (Config.configHome config)\n        else return ()\n\n      return True\n\nuninstallOne ::\n  FilePath -> FilePath -> FilePath -> FilePath -> Bool -> Bool -> Maybe ListKind -> String -> IO Bool\nuninstallOne fdbDir libDir binDir exeDir dryRun skipDepCheck kind name = do\n  let moduleManifest = fdbDir </> name ++ \".module\"\n      programManifest = fdbDir </> name ++ \".manifest\"\n      moduleDir = libDir </> name\n\n  hasModule <- doesFileExist moduleManifest\n  hasModuleDir <- doesDirectoryExist moduleDir\n  hasProgram <- doesFileExist programManifest\n\n  let removeModule = (hasModule || hasModuleDir) && kind /= Just ListPrograms\n      removeProgram = hasProgram && kind /= Just ListModules\n\n  if not removeModule && not removeProgram\n    then do\n      putStrLn $ \"Nothing found for '\" <> name <> \"'\"\n      return False\n    else do\n      -- Reverse dependency check for modules (skip when uninstalling all)\n      if removeModule\n        then do\n          if not skipDepCheck then checkReverseDeps fdbDir name else return ()\n          if dryRun\n            then do\n              putStrLn $ \"Would uninstall module '\" <> name <> \"'\"\n              if hasModuleDir\n                then putStrLn $ \"  Remove: \" <> moduleDir\n                else return ()\n              if hasModule\n                then putStrLn $ \"  Remove: \" <> moduleManifest\n                else return ()\n            else do\n              if hasModuleDir then removeDirectoryRecursive moduleDir else return ()\n              if hasModule then removeFile moduleManifest else return ()\n              hPutStrLn stderr $ \"Uninstalled module '\" <> name <> \"'\"\n        else return ()\n\n      if removeProgram\n        then do\n          let binPath = binDir </> name\n          binExists <- doesFileExist binPath\n          if dryRun\n            then do\n              putStrLn $ \"Would uninstall program '\" <> name <> \"'\"\n              if binExists then putStrLn $ \"  Remove: \" <> binPath else return ()\n              -- Check for exe dir\n              exeDirPath <- findExeDir exeDir name\n              case exeDirPath of\n                Just d -> putStrLn $ \"  Remove: \" <> d\n                Nothing -> return ()\n              putStrLn $ \"  Remove: \" <> programManifest\n            else do\n              if binExists then removeFile binPath else return ()\n              exeDirPath <- findExeDir exeDir name\n              case exeDirPath of\n                Just d -> removeDirectoryRecursive d\n                Nothing -> return ()\n              removeFile programManifest\n              hPutStrLn stderr $ \"Uninstalled program '\" <> name <> \"'\"\n        else return ()\n\n      return (removeModule || removeProgram)\n\n-- | Find the exe directory for a program\nfindExeDir :: FilePath -> String -> IO (Maybe FilePath)\nfindExeDir exeDir name = do\n  exists <- doesDirectoryExist exeDir\n  if not exists\n    then return Nothing\n    else do\n      entries <- listDirectory exeDir\n      -- Look for name or name-<hash>\n      let matches = filter (\\e -> e == name || (name ++ \"-\") `isPrefixOf'` e) entries\n      case matches of\n        (m : _) -> return (Just (exeDir </> m))\n        [] -> return Nothing\n  where\n    isPrefixOf' prefix str = take (length prefix) str == prefix\n\n-- | Check if any other modules depend on the one being uninstalled\ncheckReverseDeps :: FilePath -> String -> IO ()\ncheckReverseDeps fdbDir name = do\n  result <- try (listDirectory fdbDir) :: IO (Either SomeException [FilePath])\n  case result of\n    Left _ -> return ()\n    Right entries -> do\n      let moduleFiles = filter (\".module\" `isSuffixOf`) entries\n          nameT = T.pack name\n      forM_ moduleFiles $ \\f -> do\n        r <- try (BL.readFile (fdbDir </> f)) :: IO (Either SomeException BL.ByteString)\n        case r of\n          Left _ -> return ()\n          Right bs -> case JSON.eitherDecode bs :: Either String ModuleManifest of\n            Right m\n              | nameT `elem` mmMorlocDeps m && mmName m /= nameT ->\n                  putStrLn $ \"Warning: module '\" <> T.unpack (mmName m) <> \"' depends on '\" <> name <> \"'\"\n            _ -> return ()\n"
  },
  {
    "path": "executable/UI.hs",
    "content": "{- |\nModule      : UI\nDescription : CLI argument parsing with optparse-applicative\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nDefines the command-line interface for the @morloc@ executable using\noptparse-applicative: subcommands (make, typecheck, install, init, dump,\ncompletion), their options, and help text.\n-}\nmodule UI\n  ( opts\n  , CliCommand (..)\n  , MakeCommand (..)\n  , InitCommand (..)\n  , InstallCommand (..)\n  , TypecheckCommand (..)\n  , DumpCommand (..)\n  , ListCommand (..)\n  , ListKind (..)\n  , UninstallCommand (..)\n  , NewCommand (..)\n  , EvalCommand (..)\n  ) where\n\nimport Morloc.Module (GitProtocol (..), OverwriteProtocol (..))\nimport Morloc.Version (versionStr)\nimport Options.Applicative\nimport qualified Options.Applicative.Extra as OAE\n\nopts :: ParserInfo CliCommand\nopts =\n  info\n    (cliParser <**> helper <**> OAE.simpleVersioner versionStr)\n    ( fullDesc\n        <> progDesc \"Call 'morloc make -h', 'morloc install -h', etc for details\"\n        <> header (\"morloc v\" <> versionStr)\n    )\n\ndata CliCommand\n  = CmdMake MakeCommand\n  | CmdInstall InstallCommand\n  | CmdUninstall UninstallCommand\n  | CmdList ListCommand\n  | CmdTypecheck TypecheckCommand\n  | CmdDump DumpCommand\n  | CmdInit InitCommand\n  | CmdNew NewCommand\n  | CmdEval EvalCommand\n\ncliParser :: Parser CliCommand\ncliParser =\n  hsubparser\n    ( makeSubcommand\n        <> installSubcommand\n        <> uninstallSubcommand\n        <> listSubcommand\n        <> typecheckSubcommand\n        <> dumpSubcommand\n        <> initSubcommand\n        <> newSubcommand\n        <> evalSubcommand\n    )\n\ndata MakeCommand = MakeCommand\n  { makeExpression :: Bool\n  , makeConfig :: String\n  , makeVerbose :: Int\n  , makeVanilla :: Bool\n  , makeOutfile :: String\n  , makeInstall :: Bool\n  , makeForce :: Bool\n  , makeInclude :: [String]\n  , makeScript :: String\n  }\n\nmakeCommandParser :: Parser MakeCommand\nmakeCommandParser =\n  MakeCommand\n    <$> optExpression\n    <*> optConfig\n    <*> optVerbose\n    <*> optVanilla\n    <*> optOutfile\n    <*> optMakeInstall\n    <*> optMakeForce\n    <*> optMakeInclude\n    <*> optScript\n\nmakeSubcommand :: Mod CommandFields CliCommand\nmakeSubcommand = command \"make\" (info (CmdMake <$> makeCommandParser) (progDesc \"Build a morloc script\"))\n\ndata InitCommand = InitCommand\n  { initConfig :: String\n  , initQuiet :: Bool\n  , initVanilla :: Bool\n  , initForce :: OverwriteProtocol\n  , initSlurmSupport :: Bool\n  , initSanitize :: Bool\n  }\n\ninitCommandParser :: Parser InitCommand\ninitCommandParser =\n  InitCommand\n    <$> optConfig\n    <*> optQuiet\n    <*> optVanilla\n    <*> optForce\n    <*> optSlurmSupport\n    <*> optSanitize\n\ninitSubcommand :: Mod CommandFields CliCommand\ninitSubcommand = command \"init\" (info (CmdInit <$> initCommandParser) (progDesc \"Initialize morloc environment\"))\n\ndata NewCommand = NewCommand\n  { newName :: String\n  }\n\nnewCommandParser :: Parser NewCommand\nnewCommandParser =\n  NewCommand\n    <$> strArgument\n      ( metavar \"NAME\"\n          <> value \"\"\n          <> help \"Package name (defaults to current directory name)\"\n      )\n\nnewSubcommand :: Mod CommandFields CliCommand\nnewSubcommand = command \"new\" (info (CmdNew <$> newCommandParser) (progDesc \"Create a new morloc package\"))\n\ndata InstallCommand = InstallCommand\n  { installConfig :: String\n  , installVanilla :: Bool\n  , installVerbose :: Int\n  , installForce :: OverwriteProtocol\n  , installUseSSH :: GitProtocol\n  , installNoTypecheck :: Bool\n  , installBuild :: Bool\n  , installModuleStrings :: [String]\n  }\n\nmakeInstallParser :: Parser InstallCommand\nmakeInstallParser =\n  InstallCommand\n    <$> optConfig\n    <*> optVanilla\n    <*> optVerbose\n    <*> optForce\n    <*> optUseSSH\n    <*> optNoTypecheck\n    <*> optInstallBuild\n    <*> optModuleStrings\n\ninstallSubcommand :: Mod CommandFields CliCommand\ninstallSubcommand = command \"install\" (info (CmdInstall <$> makeInstallParser) (progDesc \"Install a morloc module\"))\n\ndata TypecheckCommand = TypecheckCommand\n  { typecheckConfig :: String\n  , typecheckVanilla :: Bool\n  , typecheckType :: Bool\n  , typecheckRaw :: Bool\n  , typecheckExpression :: Bool\n  , typecheckVerbose :: Int\n  , typecheckRealize :: Bool\n  , typecheckScript :: String\n  }\n\nmakeTypecheckParser :: Parser TypecheckCommand\nmakeTypecheckParser =\n  TypecheckCommand\n    <$> optConfig\n    <*> optVanilla\n    <*> optType\n    <*> optRaw\n    <*> optExpression\n    <*> optVerbose\n    <*> optRealize\n    <*> optScript\n\ntypecheckSubcommand :: Mod CommandFields CliCommand\ntypecheckSubcommand =\n  command\n    \"typecheck\"\n    (info (CmdTypecheck <$> makeTypecheckParser) (progDesc \"Typecheck a morloc program\"))\n\ndumpSubcommand :: Mod CommandFields CliCommand\ndumpSubcommand =\n  command \"dump\" (info (CmdDump <$> makeDumpParser) (progDesc \"Dump parsed code\"))\n\ndata DumpCommand = DumpCommand\n  { dumpConfig :: String\n  , dumpVanilla :: Bool\n  , dumpVerbose :: Int\n  , dumpExpression :: Bool\n  , dumpScript :: String\n  }\n\nmakeDumpParser :: Parser DumpCommand\nmakeDumpParser =\n  DumpCommand\n    <$> optConfig\n    <*> optVanilla\n    <*> optVerbose\n    <*> optExpression\n    <*> optScript\n\ndata ListKind = ListModules | ListPrograms\n  deriving (Show, Eq)\n\ndata ListCommand = ListCommand\n  { listPattern :: Maybe String\n  , listConfig :: String\n  , listVanilla :: Bool\n  , listVerbose :: Int\n  , listKind :: Maybe ListKind\n  }\n\nmakeListParser :: Parser ListCommand\nmakeListParser =\n  ListCommand\n    <$> optListPattern\n    <*> optConfig\n    <*> optVanilla\n    <*> optVerbose\n    <*> optListKind\n\nlistSubcommand :: Mod CommandFields CliCommand\nlistSubcommand =\n  command \"list\" (info (CmdList <$> makeListParser) (progDesc \"List installed modules and programs\"))\n\ndata UninstallCommand = UninstallCommand\n  { uninstallNames :: [String]\n  , uninstallConfig :: String\n  , uninstallVanilla :: Bool\n  , uninstallKind :: Maybe ListKind\n  , uninstallDryRun :: Bool\n  , uninstallAll :: Bool\n  }\n\nmakeUninstallParser :: Parser UninstallCommand\nmakeUninstallParser =\n  UninstallCommand\n    <$> optUninstallNamesOrNone\n    <*> optConfig\n    <*> optVanilla\n    <*> optUninstallKind\n    <*> optDryRun\n    <*> optUninstallAll\n\nuninstallSubcommand :: Mod CommandFields CliCommand\nuninstallSubcommand =\n  command\n    \"uninstall\"\n    (info (CmdUninstall <$> makeUninstallParser) (progDesc \"Uninstall a module or program\"))\n\noptExpression :: Parser Bool\noptExpression =\n  switch\n    ( long \"expression\"\n        <> short 'e'\n        <> help \"Read script as string rather than file\"\n    )\n\noptVanilla :: Parser Bool\noptVanilla =\n  switch\n    ( long \"vanilla\"\n        <> help \"Ignore local configuration files\"\n    )\n\noptForce :: Parser OverwriteProtocol\noptForce =\n  flag\n    DoNotOverwrite\n    ForceOverwrite\n    ( long \"force\"\n        <> short 'f'\n        <> help \"Overwrite files if they already exist\"\n    )\n\noptUseSSH :: Parser GitProtocol\noptUseSSH =\n  flag\n    HttpsProtocol\n    SshProtocol\n    ( long \"ssh\"\n        <> help \"Use SSH protocol for remote git access\"\n    )\n\noptNoTypecheck :: Parser Bool\noptNoTypecheck =\n  switch\n    ( long \"no-typecheck\"\n        <> help \"Skip typechecking during install\"\n    )\n\noptInstallBuild :: Parser Bool\noptInstallBuild =\n  switch\n    ( long \"build\"\n        <> short 'b'\n        <> help \"Build and install executable after module install\"\n    )\n\noptModuleStrings :: Parser [String]\noptModuleStrings =\n  some -- one or more\n    . strArgument\n    $ ( metavar \"INSTALL\"\n          <> help \"Module install strings\"\n      )\n\noptRaw :: Parser Bool\noptRaw =\n  switch\n    ( long \"raw\"\n        <> help \"Print raw objects\"\n    )\n\noptSlurmSupport :: Parser Bool\noptSlurmSupport =\n  switch\n    ( long \"slurm\"\n        <> help \"Allow use of SLURM for remote jobs\"\n    )\n\noptSanitize :: Parser Bool\noptSanitize =\n  switch\n    ( long \"sanitize\"\n        <> help \"Enable alignment sanitizer for debugging memory layout issues\"\n    )\n\noptVerbose :: Parser Int\noptVerbose = length <$> many (flag' () (short 'v'))\n\noptQuiet :: Parser Bool\noptQuiet =\n  switch\n    ( long \"quiet\"\n        <> short 'q'\n        <> help \"Print minimal output to STDERR\"\n    )\n\noptRealize :: Parser Bool\noptRealize =\n  switch\n    ( long \"realize\"\n        <> short 'r'\n        <> help \"Typecheck the composition realizations\"\n    )\n\noptConfig :: Parser String\noptConfig =\n  strOption\n    ( long \"config\"\n        <> metavar \"CONFIG\"\n        <> value \"\"\n        <> help \"Use this config rather than the one in morloc home\"\n    )\n\noptOutfile :: Parser String\noptOutfile =\n  strOption\n    ( long \"outfile\"\n        <> short 'o'\n        <> metavar \"OUT\"\n        <> value \"\"\n        <> showDefault\n        <> help \"The name of the generated executable\"\n    )\n\noptMakeInstall :: Parser Bool\noptMakeInstall =\n  switch\n    ( long \"install\"\n        <> help \"Install module to PATH\"\n    )\n\noptMakeForce :: Parser Bool\noptMakeForce =\n  switch\n    ( long \"force\"\n        <> short 'f'\n        <> help \"Overwrite existing install\"\n    )\n\noptMakeInclude :: Parser [String]\noptMakeInclude =\n  many\n    ( strOption\n        ( long \"include\"\n            <> metavar \"PATTERN\"\n            <> help \"File pattern to include in install\"\n        )\n    )\n\noptScript :: Parser String\noptScript = argument str (metavar \"<script>\" <> value \"main.loc\")\n\noptType :: Parser Bool\noptType =\n  switch\n    ( long \"type\"\n        <> short 't'\n        <> help \"Parse a typestring instread of an expression\"\n    )\n\noptListKind :: Parser (Maybe ListKind)\noptListKind =\n  flag' (Just ListModules) (long \"modules\" <> help \"List only modules\")\n    <|> flag' (Just ListPrograms) (long \"programs\" <> help \"List only programs\")\n    <|> pure Nothing\n\noptListPattern :: Parser (Maybe String)\noptListPattern =\n  optional . strArgument $\n    ( metavar \"PATTERN\"\n        <> help \"Filter by subsequence match on name\"\n    )\n\noptUninstallNamesOrNone :: Parser [String]\noptUninstallNamesOrNone =\n  many\n    . strArgument\n    $ ( metavar \"NAME\"\n          <> help \"Names of modules or programs to uninstall\"\n      )\n\noptUninstallAll :: Parser Bool\noptUninstallAll =\n  switch\n    ( long \"all\"\n        <> help \"Uninstall all installed modules\"\n    )\n\noptUninstallKind :: Parser (Maybe ListKind)\noptUninstallKind =\n  flag' (Just ListModules) (long \"module\" <> help \"Uninstall only the module\")\n    <|> flag' (Just ListPrograms) (long \"program\" <> help \"Uninstall only the program\")\n    <|> pure Nothing\n\noptDryRun :: Parser Bool\noptDryRun =\n  switch\n    ( long \"dry-run\"\n        <> help \"Show what would be removed without removing\"\n    )\n\ndata EvalCommand = EvalCommand\n  { evalConfig :: String\n  , evalVanilla :: Bool\n  , evalVerbose :: Int\n  , evalSave :: String\n  , evalExpression :: String\n  , evalArgs :: [String]\n  }\n\nevalCommandParser :: Parser EvalCommand\nevalCommandParser =\n  EvalCommand\n    <$> optConfig\n    <*> optVanilla\n    <*> optVerbose\n    <*> optSave\n    <*> strArgument\n      ( metavar \"EXPRESSION\"\n          <> help \"Morloc expression to evaluate\"\n      )\n    <*> many (strArgument (metavar \"ARGS...\" <> help \"Extra arguments passed to the compiled program\"))\n\nevalSubcommand :: Mod CommandFields CliCommand\nevalSubcommand =\n  command\n    \"eval\"\n    (info (CmdEval <$> evalCommandParser) (progDesc \"Evaluate a morloc expression\"))\n\noptSave :: Parser String\noptSave =\n  strOption\n    ( long \"save\"\n        <> metavar \"NAME\"\n        <> value \"\"\n        <> help \"Save as a named command instead of running\"\n    )\n"
  },
  {
    "path": "fourmolu.yaml",
    "content": "# Fourmolu configuration for morloc compiler\n# https://github.com/fourmolu/fourmolu\n\n# Indentation\nindentation: 2\n\n# Maximum line length (soft limit for formatting decisions)\ncolumn-limit: 100\n\n# How to format function arguments\nfunction-arrows: trailing\n\n# How to format record syntax\nrecord-brace-space: true\n\n# Indent where bindings\nindent-wheres: true\n\n# How to format haddock comments\nhaddock-style: multi-line\n\n# How to format import lists\nimport-export-style: leading\n\n# How to format record fields\nrecord-style: multi-line\n\n# How to format let expressions\nlet-style: inline\n\n# How to format in expressions\nin-style: right-align\n\n# How to treat single constraint contexts\nsingle-constraint-parens: always\n\n# Unicode syntax (don't convert to unicode)\nunicode: never\n\n# Respect language pragmas in files\nrespectful: true\n\n# Newlines between declarations\nnewlines-between-decls: 1\n"
  },
  {
    "path": "hie.yaml",
    "content": "cradle:\n  stack:\n    - path: \"./library\"\n      component: \"morloc:lib\"\n\n    - path: \"./executable/Main.hs\"\n      component: \"morloc:exe:morloc\"\n\n    - path: \"./executable/Subcommands.hs\"\n      component: \"morloc:exe:morloc\"\n\n    - path: \"./executable/UI.hs\"\n      component: \"morloc:exe:morloc\"\n\n    - path: \"./executable/Paths_morloc.hs\"\n      component: \"morloc:exe:morloc\"\n\n    - path: \"./test-suite\"\n      component: \"morloc:test:morloc-test\"\n"
  },
  {
    "path": "library/Morloc/BaseTypes.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.BaseTypes\nDescription : Predefined base type names and constructors\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nSmart constructors for the built-in morloc types (Unit, Int, Real, Bool, Str,\nList, Tuple, etc.) as both 'TVar' names and 'TypeU' values. These correspond\nto the types that are hardwired into the type system and have special\nserialization\\/deserialization support.\n-}\nmodule Morloc.BaseTypes\n  ( unit\n  , real\n  , f32\n  , f64\n  , int\n  , i8\n  , i16\n  , i32\n  , i64\n  , uint\n  , u8\n  , u16\n  , u32\n  , u64\n  , bool\n  , str\n  , tuple\n  , list\n  , vector\n  , matrix\n  , tensor\n  , record\n  , unitU\n  , realU\n  , f32U\n  , f64U\n  , intU\n  , i8U\n  , i16U\n  , i32U\n  , i64U\n  , uintU\n  , u8U\n  , u16U\n  , u32U\n  , u64U\n  , boolU\n  , strU\n  , tupleU\n  , listU\n  , effectU\n  , optionalU\n  ) where\n\nimport Morloc.Data.Text (pretty)\nimport Morloc.Namespace.Prim (TVar (..))\nimport Morloc.Namespace.Type (TypeU (..), emptyEffectSet)\nimport Prelude hiding (log)\n\nunit :: TVar\nunit = TV \"Unit\"\n\nreal :: TVar\nreal = TV \"Real\"\n\nf32 :: TVar\nf32 = TV \"Float32\"\n\nf64 :: TVar\nf64 = TV \"Float64\"\n\nint :: TVar\nint = TV \"Int\"\n\ni8 :: TVar\ni8 = TV \"Int8\"\n\ni16 :: TVar\ni16 = TV \"Int16\"\n\ni32 :: TVar\ni32 = TV \"Int32\"\n\ni64 :: TVar\ni64 = TV \"Int64\"\n\nu8 :: TVar\nu8 = TV \"UInt8\"\n\nuint :: TVar\nuint = TV \"UInt\"\n\nu16 :: TVar\nu16 = TV \"UInt16\"\n\nu32 :: TVar\nu32 = TV \"UInt32\"\n\nu64 :: TVar\nu64 = TV \"UInt64\"\n\nbool :: TVar\nbool = TV \"Bool\"\n\nstr :: TVar\nstr = TV \"Str\"\n\nlist :: TVar\nlist = TV \"List\"\n\ntuple :: Int -> TVar\ntuple k = TV $ \"Tuple\" <> pretty k\n\nvector :: TVar\nvector = TV \"Vector\"\n\nmatrix :: TVar\nmatrix = TV \"Matrix\"\n\ntensor :: Int -> TVar\ntensor k = TV $ \"Tensor\" <> pretty k\n\nrecord :: TVar\nrecord = TV \"Record\"\n\nunitU :: TypeU\nunitU = VarU $ TV \"Unit\"\n\nrealU :: TypeU\nrealU = VarU $ TV \"Real\"\n\nf32U :: TypeU\nf32U = VarU $ TV \"Float32\"\n\nf64U :: TypeU\nf64U = VarU $ TV \"Float64\"\n\nintU :: TypeU\nintU = VarU $ TV \"Int\"\n\ni8U :: TypeU\ni8U = VarU $ TV \"Int8\"\n\ni16U :: TypeU\ni16U = VarU $ TV \"Int16\"\n\ni32U :: TypeU\ni32U = VarU $ TV \"Int32\"\n\ni64U :: TypeU\ni64U = VarU $ TV \"Int64\"\n\nu8U :: TypeU\nu8U = VarU $ TV \"UInt8\"\n\nuintU :: TypeU\nuintU = VarU $ TV \"UInt\"\n\nu16U :: TypeU\nu16U = VarU $ TV \"UInt16\"\n\nu32U :: TypeU\nu32U = VarU $ TV \"UInt32\"\n\nu64U :: TypeU\nu64U = VarU $ TV \"UInt64\"\n\nboolU :: TypeU\nboolU = VarU $ TV \"Bool\"\n\nstrU :: TypeU\nstrU = VarU $ TV \"Str\"\n\nlistU :: TypeU -> TypeU\nlistU t = AppU (VarU list) [t]\n\ntupleU :: [TypeU] -> TypeU\ntupleU ts = AppU (VarU $ tuple (length ts)) ts\n\neffectU :: TypeU -> TypeU\neffectU = EffectU emptyEffectSet\n\noptionalU :: TypeU -> TypeU\noptionalU = OptionalU\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Docstrings.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Docstrings\nDescription : Generate CLI help text and argument documentation for exported functions\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProcesses docstring annotations from type signatures into the final\n'MDoc' records used by the nexus for @--help@ output, including argument\nnames, default values, metavars, and CLI option flags.\n-}\nmodule Morloc.CodeGenerator.Docstrings (processDocstrings) where\n\nimport qualified Data.Map as Map\nimport Data.Text (Text)\nimport qualified Morloc.BaseTypes as MBT\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Monad as MM\n\n-- Most of the transmogrification of docstrings occurs in the parser, but there\n-- are some limitations there since the types are not yet known. If a type in a\n-- function signature is labeled as `unrolled: true`, then the standard\n-- positional argument should be replaced with a group of arguments for every\n-- field in the record. But the type signature for the function will have only\n-- the name of the type, not the details.\n--\n-- In addition to unrolling arguments, docstring information can be inherited\n-- even for non-record types. The main use case for this is defining a type that\n-- has a description and short/long option terms that are reused across function\n-- signatures.\n--\n-- This top-level function collects docstring info and passes it to `processArgDoc`\nprocessDocstrings ::\n  AnnoS (Indexed Type) One a -> MorlocMonad (AnnoS (Indexed Type) One a, CmdDocSet)\nprocessDocstrings e@(AnnoS (Idx i t) _ _) = do\n  sgmap <- MM.gets stateSignatures\n  argdoc <- case GMap.lookup i sgmap of\n    (GMapJust (Monomorphic (TermTypes (Just et) _ _))) -> return $ edocs et\n    (GMapJust (Polymorphic _ _ et _)) -> return $ edocs et\n    _ -> case t of\n      (FunT ts _) -> return $ ArgDocSig defaultValue (take (length ts) (repeat defaultValue)) defaultValue\n      _ -> return $ ArgDocAlias defaultValue\n  -- Declaration-level docstrings take precedence over signature docstrings\n  -- for the command-level description.\n  declDocs <- lookupDeclDocs i\n  let argdoc' = case declDocs of\n        [] -> argdoc\n        ls -> overrideCmdDocLines ls argdoc\n  doc <- processArgDoc i t argdoc'\n  return (e, doc)\n\n-- | Look up the declaration-level docstring for the term at a given index.\n-- Returns an empty list if no declaration docstring exists.\nlookupDeclDocs :: Int -> MorlocMonad [Text]\nlookupDeclDocs i = do\n  nameMap <- MM.gets stateName\n  case Map.lookup i nameMap of\n    Nothing -> return []\n    Just name -> do\n      termDocs <- MM.gets stateTermDocs\n      return $ Map.findWithDefault [] name termDocs\n\n-- | Override the command-level docLines while preserving all other docstring\n-- fields (argument docs, return docs, metavars, etc.).\noverrideCmdDocLines :: [Text] -> ArgDoc -> ArgDoc\noverrideCmdDocLines ls (ArgDocSig cmd args ret) =\n  ArgDocSig (cmd { docLines = ls }) args ret\noverrideCmdDocLines ls (ArgDocRec vars fields) =\n  ArgDocRec (vars { docLines = ls }) fields\noverrideCmdDocLines ls (ArgDocAlias vars) =\n  ArgDocAlias (vars { docLines = ls })\n\n-- dispatch docstring info for each argument to `processArgDoc`\nprocessArgDoc :: Int -> Type -> ArgDoc -> MorlocMonad CmdDocSet\nprocessArgDoc i (FunT ts t) (ArgDocSig cmddoc argdocs retdoc) = do\n  (ts', argdocs') <- zipWithM (reduceArgDoc i) ts (map ArgDocAlias argdocs) |>> unzip\n  cmdargs <- zipWithM makeCmdArg ts' argdocs'\n  (t', retdoc') <- reduceArgDoc i t (ArgDocAlias retdoc)\n  return $\n    CmdDocSet\n      { cmdDocDesc = docLines cmddoc\n      , cmdDocName = docName cmddoc\n      , cmdDocArgs = cmdargs\n      , cmdDocRet = (t', getReturnDesc retdoc' (docReturn cmddoc))\n      }\nprocessArgDoc i t (ArgDocSig cmddoc [] retdoc) = do\n  (t', retdoc') <- reduceArgDoc i t (ArgDocAlias retdoc)\n  return $\n    CmdDocSet\n      { cmdDocDesc = docLines cmddoc\n      , cmdDocName = docName cmddoc\n      , cmdDocArgs = []\n      , cmdDocRet = (t', getReturnDesc retdoc' (docReturn cmddoc))\n      }\nprocessArgDoc _ t (ArgDocAlias r) =\n  return $\n    CmdDocSet\n      { cmdDocDesc = docLines r\n      , cmdDocName = docName r\n      , cmdDocArgs = []\n      , cmdDocRet = (t, [])\n      }\nprocessArgDoc i t r = do\n  (t', r') <- reduceArgDoc i t r\n  case (t', r') of\n    (NamT _ _ _ ts, ArgDocRec args entries) -> do\n      cmdargs <- zipWithM makeCmdArg (map snd ts) (map (ArgDocAlias . snd) entries)\n      return $\n        CmdDocSet\n          { cmdDocDesc = docLines args\n          , cmdDocName = docName args\n          , cmdDocArgs = cmdargs\n          , cmdDocRet = (t, [])\n          }\n    _ -> MM.throwSystemError \"Expected a record type with docstrings but found a non-record type\"\n\ngetReturnDesc :: ArgDoc -> Maybe Text -> [Text]\ngetReturnDesc _ (Just ret) = [ret]\ngetReturnDesc (ArgDocRec r _) _ = docLines r\ngetReturnDesc (ArgDocSig r _ _) _ = docLines r\ngetReturnDesc (ArgDocAlias r) _ = docLines r\n\nreduceArgDoc :: Int -> Type -> ArgDoc -> MorlocMonad (Type, ArgDoc)\nreduceArgDoc i t@(VarT v) arg = do\n  scope <- MM.getGeneralScope i\n  case Map.lookup v scope of\n    (Just [(_, typeOf -> parentType, parentArg, _)]) ->\n      inheritArgDoc arg parentArg >>= reduceArgDoc i parentType\n    (Just _) -> MM.throwSystemError $ \"Multiple definitions for type alias '\" <> pretty (unTVar v) <> \"'\"\n    Nothing -> return (t, arg)\n  where\n    inheritArgDoc :: ArgDoc -> ArgDoc -> MorlocMonad ArgDoc\n    inheritArgDoc (ArgDocAlias r1) (ArgDocAlias r2) = return $ ArgDocAlias (inheritArgDocVars r1 r2)\n    inheritArgDoc (ArgDocAlias r1) (ArgDocRec r2 rs) = return $ ArgDocRec (inheritArgDocVars r1 r2) rs\n    inheritArgDoc _ _ = MM.throwSystemError $ \"Cannot inherit docstrings for type alias '\" <> pretty (unTVar v) <> \"'\"\n\n    inheritArgDocVars :: ArgDocVars -> ArgDocVars -> ArgDocVars\n    inheritArgDocVars r1 r2 =\n      ArgDocVars\n        { docLines = if (length (docLines r1) > 0) then docLines r1 else docLines r2\n        , docName = docName r1 <|> docName r2\n        , docLiteral = docLiteral r1 <|> docLiteral r2\n        , docUnroll = docUnroll r1 <|> docUnroll r2\n        , docDefault = docDefault r1 <|> docDefault r2\n        , docMetavar = docMetavar r1 <|> docMetavar r2\n        , docArg = docArg r1 <|> docArg r2\n        , docTrue = docTrue r1 <|> docTrue r2\n        , docFalse = docFalse r1 <|> docFalse r2\n        , docReturn = docReturn r1 <|> docReturn r2\n        }\nreduceArgDoc i (NamT o v ps (map snd -> ts)) (ArgDocRec arg rs) = do\n  let args = map (ArgDocAlias . snd) rs\n      keys = map fst rs\n  entries <- zipWithM (reduceArgDoc i) ts args\n  let args' = [r | (ArgDocAlias r) <- map snd entries]\n  return (NamT o v ps (zip keys (map fst entries)), ArgDocRec arg (zip keys args'))\nreduceArgDoc _ t r = return (t, r)\n\nmakeCmdArg :: Type -> ArgDoc -> MorlocMonad CmdArg\nmakeCmdArg recType@(NamT _ _ _ rs) (ArgDocRec arg entries) = do\n  -- Set the metavar default for groups to the record type name\n  let typedEntries = zipWith (\\(k, t) (_, r) -> (k, (t, r))) rs entries\n  resolveArgDocVars typedEntries recType arg\nmakeCmdArg t (ArgDocRec r _) = resolveArgDocVars [] t r\nmakeCmdArg t (ArgDocAlias r) = resolveArgDocVars [] t r\nmakeCmdArg _ (ArgDocSig _ _ _) = MM.throwSystemError \"Illegal functional CLI parameter\"\n\nresolveArgDocVars :: [(Key, (Type, ArgDocVars))] -> Type -> ArgDocVars -> MorlocMonad CmdArg\nresolveArgDocVars rs t r\n  | docUnroll r == Just False = resolvePos t r |>> CmdArgPos\n  | length rs > 0 && docUnroll r == Just True = resolveGrp t r rs\n  | t == VarT MBT.bool = resolveFlagCmdArg r\n  | isJust (docArg r) && isJust (docDefault r) = resolveOpt t r |>> CmdArgOpt\n  | otherwise = resolvePos t r |>> CmdArgPos\n\nresolveGrp :: Type -> ArgDocVars -> [(Key, (Type, ArgDocVars))] -> MorlocMonad CmdArg\nresolveGrp recType@(NamT _ v _ _) arg argEntries = do\n  entries <- mapM resolveRecDocVars argEntries\n  return . CmdArgGrp $\n    RecDocSet\n      { recDocType = recType\n      , recDocDesc = docLines arg\n      , recDocMetavar = fromMaybe (unTVar v) (docMetavar arg)\n      , recDocOpt = docArg arg\n      , recDocEntries = entries\n      }\n  where\n    resolveRecDocVars ::\n      (Key, (Type, ArgDocVars)) -> MorlocMonad (Key, Either ArgFlagDocSet ArgOptDocSet)\n    resolveRecDocVars (k, (t, r))\n      | t == VarT MBT.bool = do\n          eitherFlag <- resolveFlag r\n          case eitherFlag of\n            (Right flag) -> return $ (k, Left flag)\n            (Left _) -> MM.throwSystemError $ \"Non-optional field found in unrolled record\"\n      | otherwise = do\n          opt <- resolveOpt t r\n          return (k, Right opt)\nresolveGrp _ _ _ = MM.throwSystemError \"Cannot unroll a non-record type into CLI argument groups\"\n\n-- resolve a boolean into either a flag option or a positional\nresolveFlag :: ArgDocVars -> MorlocMonad (Either ArgPosDocSet ArgFlagDocSet)\nresolveFlag r =\n  case (docTrue r, docFalse r, (==) \"true\" <$> docDefault r) of\n    -- if no default value is given, make default based on given args\n    -- e.g., true: -v/--verbose\n    (Just rt, Nothing, Nothing) -> flag rt Nothing False\n    -- e.g., false: -q/--quit\n    (Nothing, Just rf, Nothing) -> flag rf Nothing True\n    -- e.g., true: -v/--verbose\n    --       false: -q/--quit\n    (Just rt, Just rf, Nothing) -> flag rt (Just rf) False\n    -- set default to TRUE\n    (Nothing, Just rf, Just True) -> flag rf Nothing True\n    (Just rt, Just rf, Just True) -> flag rf (Just rt) True\n    -- set default to FALSE\n    (Just rt, Nothing, Just False) -> flag rt Nothing False\n    (Just rt, Just rf, Just False) -> flag rt (Just rf) False\n    -- handle noop cases\n    (Just _, Nothing, Just True) -> MM.throwSystemError \"Noop flag\"\n    (Nothing, Just _, Just False) -> MM.throwSystemError \"Noop flag\"\n    -- handle positional with a given default\n    (Nothing, Nothing, Just _) -> MM.throwSystemError \"Positional argument with default\"\n    -- handle positional\n    (Nothing, Nothing, Nothing) ->\n      return . Left $\n        ArgPosDocSet\n          { argPosDocType = VarT MBT.bool\n          , argPosDocDesc = docLines r\n          , argPosDocMetavar = docMetavar r <|> Just \"BOOL\"\n          , argPosDocLiteral = docLiteral r\n          }\n  where\n    flag :: CliOpt -> Maybe CliOpt -> Bool -> MorlocMonad (Either ArgPosDocSet ArgFlagDocSet)\n    flag opt rev def =\n      return . Right $\n        ArgFlagDocSet\n          { argFlagDocDesc = docLines r\n          , argFlagDocOpt = opt\n          , argFlagDocOptRev = rev\n          , argFlagDocDefault = if def then \"true\" else \"false\"\n          }\n\nresolveFlagCmdArg :: ArgDocVars -> MorlocMonad CmdArg\nresolveFlagCmdArg r = do\n  eitherFlag <- resolveFlag r\n  case eitherFlag of\n    (Right flag) -> return . CmdArgFlag $ flag\n    (Left pos) -> return . CmdArgPos $ pos\n\nresolveOpt :: Type -> ArgDocVars -> MorlocMonad ArgOptDocSet\nresolveOpt t r = case (docArg r, docDefault r) of\n  (Nothing, _) -> MM.throwSystemError \"Optional argument missing tags\"\n  (Just opt, Nothing)\n    -- literal ?Str: auto-default to null (the only way to get null is to omit the flag)\n    | isLiteralOptStr -> makeOpt opt \"null\"\n    | otherwise ->\n        MM.throwSystemError $ \"Optional argument \" <> pretty (makeArg opt) <> \" must have default values\"\n  (Just opt, Just def)\n    -- literal ?Str with non-null default is an error\n    | isLiteralOptStr && def /= \"null\" ->\n        MM.throwSystemError $\n          \"Optional argument \" <> pretty (makeArg opt)\n          <> \" has type ?Str with literal: true, so default must be null (got \\\"\"\n          <> pretty def <> \"\\\")\"\n    | otherwise -> makeOpt opt def\n  where\n    isLiteralOptStr = docLiteral r == Just True && isOptionalStrType t\n\n    isOptionalStrType (OptionalT (VarT v)) = v == MBT.str\n    isOptionalStrType _ = False\n\n    makeOpt opt def = return $\n      ArgOptDocSet\n        { argOptDocType = t\n        , argOptDocDesc = docLines r\n        , argOptDocMetavar = fromMaybe (makeOptMeta t) (docMetavar r)\n        , argOptDocLiteral = docLiteral r\n        , argOptDocArg = opt\n        , argOptDocDefault = def\n        }\n\nmakeArg ::\n  CliOpt ->\n  Text -- argument string, such as \"-h/--help\"\nmakeArg (CliOptShort s) = \"-\" <> MT.show' s\nmakeArg (CliOptLong l) = \"--\" <> l\nmakeArg (CliOptBoth s l) = \"-\" <> MT.show' s <> \"/--\" <> l\n\nmakeOptMeta :: Type -> Text\nmakeOptMeta (UnkT v) = unTVar v\nmakeOptMeta (VarT v) = unTVar v\nmakeOptMeta (FunT _ _) = \"FUN\" -- illegal, but who's watching?\nmakeOptMeta (AppT (VarT v) _) = unTVar v\nmakeOptMeta (AppT _ _) = \"VAL\" -- weird stuff, choose your own metadata\nmakeOptMeta (NamT _ v _ _) = unTVar v\nmakeOptMeta (EffectT _ t) = \"<E>\" <> makeOptMeta t\nmakeOptMeta (OptionalT t) = \"?\" <> makeOptMeta t\nmakeOptMeta (NatLitT n) = MT.show' n\nmakeOptMeta (NatAddT _ _) = \"NAT\"\nmakeOptMeta (NatMulT _ _) = \"NAT\"\nmakeOptMeta (NatSubT _ _) = \"NAT\"\nmakeOptMeta (NatDivT _ _) = \"NAT\"\n\nresolvePos :: Type -> ArgDocVars -> MorlocMonad ArgPosDocSet\nresolvePos t r = do\n  return $\n    ArgPosDocSet\n      { argPosDocType = t\n      , argPosDocDesc = docLines r\n      , argPosDocMetavar = docMetavar r\n      , argPosDocLiteral = docLiteral r\n      }\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Emit.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Emit\nDescription : Group serialized manifolds by language and translate to target source code\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.CodeGenerator.Emit\n  ( pool\n  , emit\n  , TranslateFn\n  ) where\n\nimport Morloc.CodeGenerator.Grammars.Common (invertSerialManifold)\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Monad as MM\n\n{- | Callback type for language-specific translation.\nThe executable provides concrete implementations for each language.\n-}\ntype TranslateFn = Lang -> [Source] -> [SerialManifold] -> MorlocMonad Script\n\n-- | Sort manifolds into pools. Within pools, group manifolds into call sets.\npool :: [SerialManifold] -> [(Lang, [SerialManifold])]\npool es =\n  let (langs, indexedSegments) = unzip . groupSort . map (\\x@(SerialManifold i lang _ _ _) -> (lang, (i, x))) $ es\n      uniqueSegments = map (Map.elems . Map.fromList) indexedSegments\n   in zip langs uniqueSegments\n\n-- | Translate a pool of serialized manifolds to target language source code\nemit ::\n  TranslateFn ->\n  Lang ->\n  [SerialManifold] ->\n  MorlocMonad Script\nemit translateFn lang xs = do\n  srcs' <- findSources xs\n  let xs' = map invertSerialManifold xs\n  translateFn lang srcs' xs'\n\nfindSources :: [SerialManifold] -> MorlocMonad [Source]\nfindSources ms = unique <$> concatMapM (foldSerialManifoldM fm) ms\n  where\n    fm =\n      defaultValue\n        { opSerialExprM = serialExprSrcs\n        , opNativeExprM = nativeExprSrcs\n        , opNativeManifoldM = nativeManifoldSrcs\n        , opSerialManifoldM = nativeSerialSrcs\n        }\n\n    nativeExprSrcs (AppExeN_ _ (SrcCallP src) xss) = return (src : concat xss)\n    nativeExprSrcs (ExeN_ _ (SrcCallP src)) = return [src]\n    nativeExprSrcs (DeserializeN_ _ s xs) = return $ serialASTsources s <> xs\n    nativeExprSrcs e = return $ foldlNE (<>) [] e\n\n    serialExprSrcs (SerializeS_ s xs) = return $ serialASTsources s <> xs\n    serialExprSrcs e = return $ foldlSE (<>) [] e\n\n    serialASTsources :: SerialAST -> [Source]\n    serialASTsources (SerialPack _ (p, s)) = [typePackerForward p, typePackerReverse p] <> serialASTsources s\n    serialASTsources (SerialList _ s) = serialASTsources s\n    serialASTsources (SerialTuple _ ss) = concatMap serialASTsources ss\n    serialASTsources (SerialObject _ _ _ (map snd -> ss)) = concatMap serialASTsources ss\n    serialASTsources _ = []\n\n    nativeManifoldSrcs (NativeManifold_ m lang _ e) = (<>) e <$> lookupConstructors lang m\n    nativeSerialSrcs (SerialManifold_ m lang _ _ e) = (<>) e <$> lookupConstructors lang m\n\n    lookupConstructors :: Lang -> Int -> MorlocMonad [Source]\n    lookupConstructors lang i = MM.metaSources i |>> filter ((==) lang . srcLang)\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Express.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Express\nDescription : Resolve type aliases and infer concrete types in manifold trees\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nTransforms the parameterized 'AnnoS' trees into a form where every\nnode carries both its general type and inferred concrete type. This is\nthe step where language-specific type aliases are resolved and type\nparameters are fully instantiated.\n-}\nmodule Morloc.CodeGenerator.Express\n  ( express\n  ) where\n\nimport qualified Data.Set as Set\nimport Morloc.CodeGenerator.Infer\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.TypeEval as TE\n\nmkIdx :: AnnoS g One (Indexed c, d) -> Type -> Indexed Type\nmkIdx (AnnoS _ (Idx i _, _) _) = Idx i\n\nsetManifoldConfig ::\n  Int ->\n  AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) ->\n  MorlocMonad ()\nsetManifoldConfig midx (AnnoS _ _ (AppS (AnnoS (Idx fidx _) _ (VarS _ _)) _)) = linkConfigIndex midx fidx\nsetManifoldConfig midx (AnnoS _ _ (AppS (AnnoS (Idx fidx _) _ (ExeS _)) _)) = linkConfigIndex midx fidx\nsetManifoldConfig midx (AnnoS _ _ (AppS e _)) = setManifoldConfig midx e\nsetManifoldConfig midx (AnnoS _ _ (LamS _ e)) = setManifoldConfig midx e\nsetManifoldConfig midx (AnnoS _ _ (DoBlockS e)) = setManifoldConfig midx e\nsetManifoldConfig midx (AnnoS _ _ (EvalS e)) = setManifoldConfig midx e\nsetManifoldConfig midx (AnnoS _ _ (CoerceS _ e)) = setManifoldConfig midx e\nsetManifoldConfig _ (AnnoS _ _ (IntrinsicS _ _)) = return ()\nsetManifoldConfig midx (AnnoS _ _ (IfS _ t _)) = setManifoldConfig midx t\nsetManifoldConfig _ (AnnoS _ _ (CallS _)) = return ()\nsetManifoldConfig _ _ = return ()\n\nlinkConfigIndex :: Int -> Int -> MorlocMonad ()\nlinkConfigIndex midx fidx = do\n  s <- MM.get\n  case Map.lookup fidx (stateManifoldConfig s) of\n    Nothing -> return ()\n    (Just mconfig) -> do\n      MM.sayVVV $ \"Copy manifold config from\" <+> pretty fidx <+> \"to\" <+> pretty midx\n      MM.put (s {stateManifoldConfig = Map.insert midx mconfig (stateManifoldConfig s)})\n\npropagateScope :: Int -> Int -> MorlocMonad ()\npropagateScope calleeIdx appIdx = do\n  s <- MM.get\n  case GMap.yIsX calleeIdx appIdx (stateConcreteTypedefs s) of\n    (Just gmap') -> MM.put $ s {stateConcreteTypedefs = gmap'}\n    Nothing -> return ()\n\nexpress :: AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) -> MorlocMonad PolyHead\nexpress e@(AnnoS (Idx midx t) (Idx cidx _, _) _) = do\n  -- Store the return effect labels before forceExportThunks strips them\n  let retEffects = extractReturnEffects t\n  MM.modify (\\s -> s { stateManifoldEffects = Map.insert midx retEffects (stateManifoldEffects s) })\n  forceExportThunks cidx t <$> expressCore e\n  where\n    extractReturnEffects (FunT _ (EffectT effs _)) = effs\n    extractReturnEffects (EffectT effs _) = effs\n    extractReturnEffects _ = Set.empty\n\n-- At the export boundary, thunks cannot be serialized. This function:\n--   1. Wraps thunk-typed args in PolyDoBlock so they are received as plain\n--      values from the CLI and suspended inside the pool.\n--   2. Wraps thunk return types in PolyEval so they are evaluated before\n--      serialization back to the user.\nforceExportThunks :: Int -> Type -> PolyHead -> PolyHead\nforceExportThunks cidx t (PolyHead lang midx args body) =\n  let inputTs = case t of FunT inputs _ -> inputs; _ -> []\n      thunkArgIds = [ann a | (a, EffectT _ _) <- zip args inputTs]\n      retT = case t of FunT _ ret -> ret; t' -> t'\n      body' = suspendThunkArgs thunkArgIds body\n      body'' = forceAtReturn cidx retT body'\n   in PolyHead lang midx args body''\n  where\n    -- Wrap BndVar references to thunk-typed args in PolyDoBlock.\n    -- The arg is deserialized as the inner type; the suspend creates the thunk.\n    suspendThunkArgs [] e = e\n    suspendThunkArgs ids e = goExpr ids e\n\n    goExpr ids (PolyBndVar (C (Idx ci (EffectT effs inner))) i)\n      | i `elem` ids = wrapSuspends ci (EffectT effs inner) i\n    goExpr ids (PolyManifold l m f e) = PolyManifold l m f (goExpr ids e)\n    goExpr ids (PolyLet i e1 e2) = PolyLet i (goExpr ids e1) (goExpr ids e2)\n    goExpr ids (PolyReturn e) = PolyReturn (goExpr ids e)\n    goExpr ids (PolyApp e es) = PolyApp (goExpr ids e) (map (goExpr ids) es)\n    goExpr ids (PolyEval ti e) = PolyEval ti (goExpr ids e)\n    goExpr ids (PolyDoBlock ti e) = PolyDoBlock ti (goExpr ids e)\n    goExpr ids (PolyCoerce c ti e) = PolyCoerce c ti (goExpr ids e)\n    goExpr ids (PolyIntrinsic ti intr es) = PolyIntrinsic ti intr (map (goExpr ids) es)\n    goExpr ids (PolyList v ti es) = PolyList v ti (map (goExpr ids) es)\n    goExpr ids (PolyTuple v es) = PolyTuple v (map (fmap (goExpr ids)) es)\n    goExpr ids (PolyRecord o v ps rs) = PolyRecord o v ps (map (fmap (fmap (goExpr ids))) rs)\n    goExpr ids (PolyIf c t' e) = PolyIf (goExpr ids c) (goExpr ids t') (goExpr ids e)\n    goExpr ids (PolyRemoteInterface l ti is rf e) = PolyRemoteInterface l ti is rf (goExpr ids e)\n    goExpr _ e = e\n\n    -- Peel EffectT layers, wrapping each in PolyDoBlock, with the innermost\n    -- BndVar carrying the fully-unwrapped type.\n    wrapSuspends ci (EffectT effs inner) i =\n      PolyDoBlock (Idx ci (EffectT effs inner)) (wrapSuspends ci inner i)\n    wrapSuspends ci inner i = PolyBndVar (C (Idx ci inner)) i\n\n    forceAtReturn c rt (PolyReturn e) = PolyReturn (wrapForces c rt e)\n    forceAtReturn c rt (PolyManifold l m f e) = PolyManifold l m f (forceAtReturn c rt e)\n    forceAtReturn c rt (PolyLet i e1 e2) = PolyLet i e1 (forceAtReturn c rt e2)\n    forceAtReturn c rt e = wrapForces c rt e\n\n    wrapForces c (EffectT _ inner) e = wrapForces c inner (PolyEval (Idx c inner) e)\n    wrapForces _ _ e = e\n\nexpressCore :: AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) -> MorlocMonad PolyHead\nexpressCore (AnnoS (Idx midx c@(FunT inputs _)) (Idx cidx lang, _) (ExeS exe)) = do\n  MM.sayVVV $ \"express CallS (midx=\" <> pretty midx <> \",\" <+> \"cidx=\" <> pretty cidx <> \"):\"\n  ids <- MM.takeFromCounter (length inputs)\n  exe' <- case exe of\n    (SrcCall src) -> return $ SrcCallP src\n    (PatCall pat) -> return $ PatCallP pat\n  let lambdaVals = fromJust $ safeZipWith PolyBndVar (map (C . Idx cidx) inputs) ids\n  return\n    . PolyHead lang midx [Arg i None | i <- ids]\n    . PolyReturn\n    $ PolyApp (PolyExe (Idx midx c) exe') lambdaVals\nexpressCore (AnnoS (Idx midx _) (_, lambdaArgs) (LamS _ e@(AnnoS (Idx _ applicationType) (c, _) x))) = do\n  MM.sayVVV $ \"express LamS (midx=\" <> pretty midx <> \"):\"\n  setManifoldConfig midx e\n  expressCore (AnnoS (Idx midx applicationType) (c, lambdaArgs) x)\nexpressCore (AnnoS (Idx midx (AppT (VarT v) [t])) (Idx cidx lang, args) (LstS xs)) = do\n  MM.sayVVV $ \"express LstS\"\n  xs' <- mapM (\\x -> expressPolyExprWrap lang (mkIdx x t) x) xs\n  let x = PolyList (Idx cidx v) (Idx cidx t) xs'\n  return $ PolyHead lang midx [Arg i None | Arg i _ <- args] (PolyReturn x)\nexpressCore (AnnoS (Idx _ t) _ (LstS _)) = error $ \"Invalid list form: \" <> show t\nexpressCore (AnnoS t@(Idx midx (AppT (VarT v) ts)) (Idx cidx lang, args) (TupS xs)) = do\n  MM.sayVVV $ \"express TupS:\" <+> pretty t\n  let idxTs = zipWith mkIdx xs ts\n  xs' <- fromJust <$> safeZipWithM (expressPolyExprWrap lang) idxTs xs\n  let x = PolyTuple (Idx cidx v) (fromJust $ safeZip idxTs xs')\n  return $ PolyHead lang midx [Arg i None | Arg i _ <- args] (PolyReturn x)\nexpressCore (AnnoS g _ (TupS _)) = error $ \"Invalid tuple form: \" <> show g\nexpressCore (AnnoS (Idx midx t@(NamT o v ps rs)) (Idx cidx lang, args) (NamS entries)) = do\n  MM.sayVVV $ \"express NamT:\" <+> pretty t\n  let idxTypes = zipWith mkIdx (map snd entries) (map snd rs)\n  xs' <- fromJust <$> safeZipWithM (expressPolyExprWrap lang) idxTypes (map snd entries)\n  let x = PolyRecord o (Idx cidx v) (map (Idx cidx) ps) (zip (map fst rs) (zip idxTypes xs'))\n  return $ PolyHead lang midx [Arg i None | Arg i _ <- args] (PolyReturn x)\nexpressCore (AnnoS (Idx midx t) (Idx cidx lang, args) (NamS entries)) = do\n  MM.sayVVV $ \"express NamT expand:\" <+> pretty t\n  mayT <- evalGeneralStep midx (type2typeu t)\n  case mayT of\n    (Just t') -> expressCore (AnnoS (Idx midx (typeOf t')) (Idx cidx lang, args) (NamS entries))\n    Nothing -> MM.throwSourcedError midx $ \"Missing concrete:\" <+> \"t=\" <> pretty t\nexpressCore e = do\n  MM.sayVVV \"express default\"\n  expressDefault e\n\nreduceType :: Scope -> Type -> Maybe Type\nreduceType scope t0 =\n  let tu0 = type2typeu t0\n   in case TE.evaluateStep scope tu0 of\n        (Just tu1) -> if tu0 == tu1 then Nothing else Just (typeOf tu1)\n        Nothing -> Nothing\n\nexpressDefault :: AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) -> MorlocMonad PolyHead\nexpressDefault e0@(AnnoS (Idx midx t) (Idx cidx lang, args) _) =\n  PolyHead lang midx [Arg i None | Arg i _ <- args] . ensurePolyReturn\n    <$> expressPolyExprWrap lang (Idx cidx t) e0\n  where\n    -- ensure the manifold body has PolyReturn at the return position\n    ensurePolyReturn (PolyReturn x) = PolyReturn x\n    ensurePolyReturn (PolyLet i e1 e2) = PolyLet i e1 (ensurePolyReturn e2)\n    ensurePolyReturn (PolyManifold l m f e) = PolyManifold l m f (ensurePolyReturn e)\n    ensurePolyReturn x = PolyReturn x\n\nexpressPolyExprWrap ::\n  Lang ->\n  Indexed Type ->\n  AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) ->\n  MorlocMonad PolyExpr\nexpressPolyExprWrap l t e@(AnnoS (Idx midx _) _ (LamS _ lamExpr)) = do\n  setManifoldConfig midx lamExpr\n  expressPolyExprWrapCommon l t e\nexpressPolyExprWrap l t e = expressPolyExprWrapCommon l t e\n\nexpressPolyExprWrapCommon ::\n  Lang -> Indexed Type -> AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) -> MorlocMonad PolyExpr\nexpressPolyExprWrapCommon l t e@(AnnoS _ _ (AppS (AnnoS (Idx gidxCall _) _ _) _)) = do\n  bconf <- MM.gets stateBuildConfig\n  mconMap <- MM.gets stateManifoldConfig\n  expressPolyExpr (decideRemoteness bconf (Map.lookup gidxCall mconMap)) l t e\nexpressPolyExprWrapCommon l t e@(AnnoS (Idx midx _) _ _) = do\n  bconf <- MM.gets stateBuildConfig\n  mconMap <- MM.gets stateManifoldConfig\n  expressPolyExpr (decideRemoteness bconf (Map.lookup midx mconMap)) l t e\n\ndecideRemoteness :: BuildConfig -> Maybe ManifoldConfig -> Lang -> Lang -> Maybe RemoteForm\ndecideRemoteness _ Nothing l1 l2\n  | l1 == l2 = Nothing\n  | otherwise = Just ForeignCall\ndecideRemoteness _ (Just (ManifoldConfig _ _ Nothing)) l1 l2\n  | l1 == l2 = Nothing\n  | otherwise = Just ForeignCall\ndecideRemoteness bconf (Just (ManifoldConfig _ _ (Just res))) l1 l2 =\n  case (buildConfigSlurmSupport bconf, l1 /= l2) of\n    (Just True, _) -> Just $ RemoteCall res\n    (_, True) -> Just $ ForeignCall\n    _ -> Nothing\n\nexpressPolyExpr ::\n  (Lang -> Lang -> Maybe RemoteForm) ->\n  Lang ->\n  Indexed Type ->\n  AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) ->\n  MorlocMonad PolyExpr\nexpressPolyExpr\n  findRemote\n  parentLang\n  _\n  ( AnnoS\n      (Idx midx (FunT lamInputTypes lamOutType))\n      (Idx cidxLam _, lamArgs)\n      ( LamS\n          vs\n          ( AnnoS\n              _\n              (Idx _ appLang, appArgs)\n              ( AppS\n                  funExpr@(AnnoS (Idx gidxCall (FunT callInputTypes _)) (Idx _ callLang, _) _)\n                  xs\n                )\n            )\n        )\n    )\n    | isLocal = do\n        propagateScope gidxCall midx\n        MM.sayVVV \"case #4\"\n        let nContextArgs = length appArgs - length vs\n            contextArgs = map unvalue (take nContextArgs appArgs)\n\n            typedLambdaArgs =\n              fromJust $\n                safeZipWith\n                  (\\(Arg i _) t -> Arg i (Just t))\n                  (drop nContextArgs lamArgs)\n                  lamInputTypes\n\n        xs' <- fromJust <$> safeZipWithM (expressPolyExprWrap appLang) (zipWith mkIdx xs callInputTypes) xs\n\n        call <- expressPolyApp parentLang funExpr xs'\n\n        return\n          . PolyManifold parentLang midx (ManifoldPart contextArgs typedLambdaArgs)\n          $ call\n    | not isLocal = do\n        propagateScope gidxCall midx\n\n        xsInfo <- mapM partialExpress xs\n\n        MM.sayVVV $ \"  xsInfo:\" <+> pretty xsInfo\n\n        let xs' = map (\\(_, _, e) -> e) xsInfo\n            callArgs = unique (concatMap (\\(rs, _, _) -> rs) xsInfo)\n            args = [i | Arg i _ <- appArgs]\n            allParentArgs = args <> [i | (_, Just (i, _), _) <- xsInfo]\n            lets = [PolyLet i e | (_, Just (i, e), _) <- xsInfo]\n            passedParentArgs = concat [[r | r <- allParentArgs, r == i] | i <- callArgs]\n            nContextArgs = length appArgs - length vs\n\n            lambdaTypeMap = zip vs (map (Idx cidxLam) lamInputTypes)\n            boundVars =\n              [ PolyBndVar (maybe (A parentLang) C (lookup v lambdaTypeMap)) i\n              | Arg i v <- appArgs\n              ]\n            untypedContextArgs = map unvalue $ take nContextArgs appArgs\n            typedPassedArgs = fromJust $ safeZipWith (\\(Arg i _) t -> Arg i (Just t)) (drop nContextArgs lamArgs) lamInputTypes\n\n            localForm = ManifoldPart untypedContextArgs typedPassedArgs\n\n            foreignForm = ManifoldFull [Arg i None | i <- passedParentArgs]\n\n        call <- expressPolyApp parentLang funExpr xs'\n\n        return\n          . PolyManifold parentLang midx localForm\n          . chain lets\n          . PolyReturn\n          . PolyApp\n            ( PolyRemoteInterface callLang (Idx cidxLam lamOutType) passedParentArgs (fromJust remote)\n                . PolyManifold callLang midx foreignForm\n                $ call\n            )\n          $ boundVars\n    where\n      remote = findRemote parentLang callLang\n      isLocal = isNothing remote\n\n      chain :: [a -> a] -> a -> a\n      chain [] x = x\n      chain (f : fs) x = chain fs (f x)\n\n      partialExpress ::\n        AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) ->\n        MorlocMonad\n          ( [Int]\n          , Maybe (Int, PolyExpr)\n          , PolyExpr\n          )\n      partialExpress (AnnoS (Idx _ t) (Idx cidx argLang, args@[Arg idx _]) (BndS v)) = do\n        MM.sayVVV $\n          \"partialExpress case #0:\" <+> \"x=\"\n            <> pretty v <+> \"cidx=\"\n            <> pretty cidx <+> \"t =\" <+> pretty t\n            <> \"\\n  parentLang:\"\n            <> pretty parentLang\n            <> \"\\n  callLang:\"\n            <> pretty callLang\n            <> \"\\n  argLang:\"\n            <> pretty argLang\n            <> \"\\n  args:\"\n            <> pretty args\n        let x' = PolyBndVar (C (Idx cidx t)) idx\n        return ([idx], Nothing, x')\n      partialExpress x@(AnnoS (Idx _ t) (Idx cidx argLang, args) _)\n        | argLang == callLang = do\n            MM.sayVVV $\n              \"partialExpress case #2:\" <+> \"cidx=\"\n                <> pretty cidx <+> \"t =\" <+> pretty t\n                <> \"\\n  parentLang:\"\n                <> pretty parentLang\n                <> \"\\n  callLang:\"\n                <> pretty callLang\n                <> \"\\n  argLang:\"\n                <> pretty argLang\n                <> \"\\n  args:\"\n                <> pretty args\n            let argParentType = Idx cidx t\n            x' <- expressPolyExprWrap argLang argParentType x\n            return ([i | Arg i _ <- args], Nothing, x')\n        | otherwise = do\n            MM.sayVVV $\n              \"partialExpress case #1:\" <+> \"cidx=\"\n                <> pretty cidx <+> \"t =\" <+> pretty t\n                <> \"\\n  parentLang:\"\n                <> pretty parentLang\n                <> \"\\n  callLang:\"\n                <> pretty callLang\n                <> \"\\n  argLang:\"\n                <> pretty argLang\n                <> \"\\n  args:\"\n                <> pretty args\n            let argparentType = Idx cidx t\n            letVal <- expressPolyExprWrap argLang argparentType x\n            idx <- MM.getCounter\n            MM.sayVVV $ \"making index in partialExpress #1:\" <+> pretty idx\n\n            let x' = PolyLetVar (Idx cidx t) idx\n            return ([idx], Just (idx, letVal), x')\nexpressPolyExpr _ _ _ (AnnoS lambdaType@(Idx midx _) (Idx _ lang, manifoldArguments) (LamS vs body)) = do\n  MM.sayVVV $ \"expressPolyExpr LamS:\" <+> pretty lambdaType\n\n  body' <- expressPolyExprWrap lang lambdaType body\n\n  inputTypes <- case val lambdaType of\n    (FunT ts _) -> return ts\n    _ -> return []\n\n  let contextArguments = map unvalue $ take (length manifoldArguments - length vs) manifoldArguments\n      boundArguments = map unvalue $ drop (length contextArguments) manifoldArguments\n      typeBoundArguments = fromJust $ safeZipWith (\\t (Arg i _) -> Arg i (Just t)) inputTypes boundArguments\n\n  MM.sayVVV $\n    \"Express lambda:\"\n      <> \"\\n  vs:\" <+> pretty vs\n      <> \"\\n  lambdaType:\" <+> pretty lambdaType\n      <> \"\\n  manifoldArguments:\" <+> list (map pretty manifoldArguments)\n      <> \"\\n  contextArguments:\" <+> list (map pretty contextArguments)\n      <> \"\\n  boundArguments\" <+> list (map pretty typeBoundArguments)\n\n  return\n    . PolyManifold lang midx (ManifoldPart contextArguments typeBoundArguments)\n    . PolyReturn\n    $ body'\n-- Inline source call: skip PolyManifold, emit as direct subexpression\nexpressPolyExpr\n  findRemote\n  parentLang\n  _\n  ( AnnoS\n      (Idx midx _)\n      _\n      (AppS f@(AnnoS (Idx gidxCall (FunT inputs _)) (Idx cidxCall callLang, _) (ExeS (SrcCall src))) xs)\n    )\n    | srcInline src && isLocal = do\n        propagateScope gidxCall midx\n        xsExpr <- zipWithM (expressPolyExprWrap callLang) (map (Idx cidxCall) inputs) xs\n        expressPolyApp parentLang f xsExpr >>= stripPolyReturn\n    where\n      remote = findRemote parentLang callLang\n      isLocal = isNothing remote\n      stripPolyReturn (PolyReturn e) = return e\n      stripPolyReturn e = return e\nexpressPolyExpr\n  findRemote\n  parentLang\n  pc\n  ( AnnoS\n      (Idx midx _)\n      (_, args)\n      (AppS f@(AnnoS (Idx gidxCall (FunT inputs _)) (Idx cidxCall callLang, _) _) xs)\n    )\n    | isLocal = do\n        propagateScope gidxCall midx\n        xsExpr <- zipWithM (expressPolyExprWrap callLang) (map (Idx cidxCall) inputs) xs\n\n        func <- expressPolyApp parentLang f xsExpr\n        return\n          . PolyManifold callLang midx (ManifoldFull (map unvalue args))\n          $ func\n    | not isLocal = do\n        propagateScope gidxCall midx\n        let idxInputTypes = zipWith mkIdx xs inputs\n        mayXs <- safeZipWithM (expressPolyExprWrap callLang) idxInputTypes xs\n        func <- expressPolyApp parentLang f (fromJust mayXs)\n        return\n          . PolyManifold parentLang midx (ManifoldFull (map unvalue args))\n          . PolyReturn\n          . PolyApp\n            ( PolyRemoteInterface callLang pc [] (fromJust remote)\n                . PolyManifold callLang midx (ManifoldFull (map unvalue args))\n                $ func\n            )\n          $ [PolyBndVar (A parentLang) i | Arg i _ <- args]\n    where\n      remote = findRemote parentLang callLang\n      isLocal = isNothing remote\nexpressPolyExpr\n  findRemote\n  parentLang\n  (val -> FunT pinputs poutput)\n  e@(AnnoS (Idx midx (FunT callInputs _)) (Idx cidx callLang, _) _)\n    | isLocal = do\n        ids <- MM.takeFromCounter (length callInputs)\n        let lambdaVals = bindVarIds ids (map (C . Idx cidx) callInputs)\n            lambdaTypedArgs = fromJust $ safeZipWith annotate ids (map Just callInputs)\n        retapp <- expressPolyApp parentLang e lambdaVals\n        return\n          . PolyManifold callLang midx (ManifoldPass lambdaTypedArgs)\n          $ retapp\n    | otherwise = do\n        ids <- MM.takeFromCounter (length callInputs)\n        let lambdaArgs = [Arg i None | i <- ids]\n            lambdaTypedArgs = map (`Arg` Nothing) ids\n            callVals = bindVarIds ids (map (C . Idx cidx) callInputs)\n        retapp <- expressPolyApp callLang e callVals\n        return\n          . PolyManifold parentLang midx (ManifoldPass lambdaTypedArgs)\n          . PolyReturn\n          . PolyApp\n            ( PolyRemoteInterface callLang (Idx cidx poutput) (map ann lambdaArgs) (fromJust remote)\n                . PolyManifold callLang midx (ManifoldFull lambdaArgs)\n                $ retapp\n            )\n          $ fromJust\n          $ safeZipWith (PolyBndVar . C) (map (Idx cidx) pinputs) (map ann lambdaArgs)\n    where\n      remote = findRemote parentLang callLang\n      isLocal = isNothing remote\nexpressPolyExpr _ _ _ (AnnoS (Idx i c) (Idx cidx _, rs) (BndS v)) = do\n  MM.sayVVV $ \"express' VarS\" <+> parens (pretty v) <+> \"::\" <+> pretty c\n  case [j | (Arg j v') <- rs, v == v'] of\n    [r] -> return $ PolyBndVar (C (Idx cidx c)) r\n    rs' ->\n      MM.throwSourcedError i $\n        \"Expected VarS\"\n          <+> dquotes (pretty v)\n          <+> \"of type\"\n          <+> parens (pretty c)\n          <+> \"to match exactly one argument, found:\"\n          <+> list (map pretty rs')\n          <> \"\\n  v:\" <+> pretty v\n          <> \"\\n  cidx:\" <+> pretty cidx\n          <> \"\\n  gidx:\" <+> pretty cidx\n          <> \"\\n  rs:\" <+> list (map pretty rs)\nexpressPolyExpr _ _ _ (AnnoS (Idx i c) (Idx cidx _, rs) (LetBndS v)) = do\n  case [j | (Arg j v') <- rs, v == v'] of\n    [r] -> return $ PolyLetVar (Idx cidx c) r\n    _ -> MM.throwSourcedError i $ \"Undefined let-bound variable:\" <+> pretty v\nexpressPolyExpr\n  _\n  parentLang\n  parentType\n  (AnnoS _ (Idx cidx _, _) (LetS v e1 e2)) = do\n    let bodyArgs = case e2 of AnnoS _ (_, args) _ -> args\n        -- unused let-bound variables (e.g. from do-block bare statements) won't\n        -- appear in body args; use cidx as a unique dummy ID in that case\n        letId = case [j | Arg j v' <- bodyArgs, v' == v] of\n          [j] -> j\n          _ -> cidx\n    let e1Type = case e1 of AnnoS (Idx _ t) _ _ -> mkIdx e1 t\n    e1' <- expressPolyExprWrap parentLang e1Type e1\n    e2' <- expressPolyExprWrap parentLang parentType e2\n    return $ PolyLet letId e1' e2'\nexpressPolyExpr _ _ _ (AnnoS (Idx _ (VarT v)) (Idx cidx _, _) (RealS x)) = return $ PolyReal (Idx cidx v) x\nexpressPolyExpr _ _ _ (AnnoS (Idx _ (VarT v)) (Idx cidx _, _) (IntS x)) = return $ PolyInt (Idx cidx v) x\nexpressPolyExpr _ _ _ (AnnoS (Idx _ (VarT v)) (Idx cidx _, _) (LogS x)) = return $ PolyLog (Idx cidx v) x\nexpressPolyExpr _ _ _ (AnnoS (Idx _ (VarT v)) (Idx cidx _, _) (StrS x)) = return $ PolyStr (Idx cidx v) x\nexpressPolyExpr _ _ _ (AnnoS (Idx _ (VarT v)) (Idx cidx _, _) UniS) = return $ PolyNull (Idx cidx v)\nexpressPolyExpr _ _ _ (AnnoS (Idx _ (OptionalT (VarT v))) (Idx cidx _, _) NullS) = return $ PolyNull (Idx cidx v)\nexpressPolyExpr _ _ _ (AnnoS _ (Idx cidx _, _) NullS) = return $ PolyNull (Idx cidx (TV \"Unit\"))\nexpressPolyExpr _ parentLang pc (AnnoS (Idx midx (AppT (VarT v) [t])) (Idx cidx lang, args) (LstS xs)) = do\n  xs' <- mapM (\\x -> expressPolyExprWrap lang (mkIdx x t) x) xs\n  let e = PolyList (Idx cidx v) (Idx cidx t) xs'\n  return $ expressContainer pc (Idx midx parentLang) (Idx cidx lang) args e\nexpressPolyExpr _ _ _ (AnnoS _ _ (LstS _)) = error \"LstS can only be (AppP (VarP _) [_]) type\"\nexpressPolyExpr _ parentLang pc (AnnoS (Idx midx (AppT (VarT v) ts)) (Idx cidx lang, args) (TupS xs)) = do\n  let idxTs = zipWith mkIdx xs ts\n  xs' <- fromJust <$> safeZipWithM (expressPolyExprWrap lang) idxTs xs\n  let e = PolyTuple (Idx cidx v) (fromJust $ safeZip idxTs xs')\n  return $ expressContainer pc (Idx midx parentLang) (Idx cidx lang) args e\nexpressPolyExpr _ parentLang pc (AnnoS (Idx midx (NamT o v ps rs)) (Idx cidx lang, args) (NamS entries)) = do\n  let tsIdx = zipWith mkIdx (map snd entries) (map snd rs)\n  xs' <- fromJust <$> safeZipWithM (expressPolyExprWrap lang) tsIdx (map snd entries)\n  let e = PolyRecord o (Idx cidx v) (map (Idx cidx) ps) (zip (map fst rs) (zip tsIdx xs'))\n  return $ expressContainer pc (Idx midx parentLang) (Idx cidx lang) args e\nexpressPolyExpr _ pl pc (AnnoS (Idx i t) c e@(NamS _)) = do\n  scope <- MM.getGeneralScope i\n  case reduceType scope t of\n    (Just t') -> expressPolyExprWrap pl pc (AnnoS (Idx i t') c e)\n    Nothing -> error \"Expected a record type\"\n-- Recursive call used as a value (not applied via AppS)\nexpressPolyExpr _ parentLang _ (AnnoS (Idx i c) (Idx _cidx _, _) (CallS v)) = do\n  (mid, crossLang) <- lookupRecursiveTarget parentLang v\n  -- Strip EffectT from return type (serial manifolds force thunks)\n  case c of\n    FunT inputs (EffectT effs out) ->\n      return . PolyDoBlock (Idx i (EffectT effs out))\n        $ PolyExe (Idx i (FunT inputs out)) (RecCallP mid crossLang)\n    _ ->\n      return $ PolyExe (Idx i c) (RecCallP mid crossLang)\nexpressPolyExpr _ _ _ (AnnoS (Idx i _) _ (AppS (AnnoS _ _ (BndS v)) _)) =\n  MM.throwSourcedError i $\n    \"Undefined function\" <+> dquotes (pretty v) <> \", did you forget an import?\"\nexpressPolyExpr _ _ _ (AnnoS _ _ (AppS (AnnoS _ _ (LamS vs _)) _)) =\n  error $ \"All applications of lambdas should have been eliminated of length \" <> show (length vs)\nexpressPolyExpr _ parentLang pc (AnnoS (Idx midx t) (Idx cidx lang, args) (IfS cond thenE elseE)) = do\n  let boolType = VarT (TV \"Bool\")\n  cond' <- expressPolyExprWrap lang (mkIdx cond boolType) cond\n  thenE' <- expressPolyExprWrap lang (mkIdx thenE t) thenE\n  elseE' <- expressPolyExprWrap lang (mkIdx elseE t) elseE\n  let e = PolyIf cond' thenE' elseE'\n  return $ expressContainer pc (Idx midx parentLang) (Idx cidx lang) args e\nexpressPolyExpr _ parentLang pc (AnnoS (Idx midx t) (Idx cidx lang, args) (DoBlockS x)) = do\n  -- The inner expression has the unwrapped type (without EffectT).\n  -- Passing EffectT through would cause cross-language calls to generate\n  -- effect-wrapped return types for pure functions.\n  let innerT = case t of EffectT _ inner -> inner; _ -> t\n  x' <- expressPolyExprWrap lang (mkIdx x innerT) x\n  let e = PolyDoBlock (Idx cidx t) x'\n  return $ expressContainer pc (Idx midx parentLang) (Idx cidx lang) args e\nexpressPolyExpr _ parentLang _ (AnnoS (Idx _ t) (Idx cidx _, _) (CoerceS coercion x)) = do\n  let innerType = unapplyCoercion coercion t\n  x' <- expressPolyExprWrap parentLang (Idx cidx innerType) x\n  return $ PolyCoerce coercion (Idx cidx t) x'\nexpressPolyExpr _ parentLang _ (AnnoS (Idx _ t) (Idx cidx _lang, _) (EvalS x)) = do\n  -- Always use pushForceIntoRemote: if the inner expression contains a\n  -- PolyRemoteInterface (cross-language call), it strips EffectT Set.empty so the remote\n  -- pool forces the thunk and serializes the concrete result. If no\n  -- PolyRemoteInterface is found (same-language), it falls back to PolyEval.\n  -- We cannot rely on parentLang /= lang because Realize.hs assigns both to\n  -- the same language when the EvalS node lives in a same-language context,\n  -- even if the inner expression calls into a foreign language.\n  x' <- expressPolyExprWrap parentLang (Idx cidx t) x\n  return $ pushForceIntoRemote (Idx cidx t) x'\nexpressPolyExpr _ parentLang pc (AnnoS (Idx midx t) (Idx cidx lang, args) (IntrinsicS intr xs)) = do\n  xs' <- mapM (\\x@(AnnoS (Idx xi xt) _ _) -> expressPolyExprWrap lang (Idx xi xt) x) xs\n  let e = PolyIntrinsic (Idx cidx t) intr xs'\n  return $ expressContainer pc (Idx midx parentLang) (Idx cidx lang) args e\n\n-- Nullary source/pattern call (e.g., clockResNs :: {Int})\nexpressPolyExpr\n  findRemote\n  parentLang\n  pc\n  f@(AnnoS (Idx midx _) (Idx _ callLang, args) (ExeS _))\n    | isLocal = do\n        call <- expressPolyApp parentLang f []\n        return\n          . PolyManifold callLang midx (ManifoldFull (map unvalue args))\n          $ call\n    | otherwise = do\n        call <- expressPolyApp callLang f []\n        return\n          . PolyManifold parentLang midx (ManifoldFull (map unvalue args))\n          . PolyReturn\n          . PolyApp\n            ( PolyRemoteInterface callLang pc [] (fromJust remote)\n                . PolyManifold callLang midx (ManifoldFull (map unvalue args))\n                $ call\n            )\n          $ [PolyBndVar (A parentLang) i | Arg i _ <- args]\n    where\n      remote = findRemote parentLang callLang\n      isLocal = isNothing remote\nexpressPolyExpr _ _ parentType x@(AnnoS (Idx m t) _ _) = do\n  MM.sayVVV \"Bad case\"\n  MM.sayVVV $ \"  t :: \" <> pretty t\n  name' <- MM.metaName m\n  case name' of\n    (Just v) ->\n      MM.throwSourcedError m $\n        \"Missing concrete:\"\n          <> \"\\n  t:\" <+> viaShow t\n          <> \"\\n  v:\" <+> pretty v\n          <> \"\\n parentType:\" <+> pretty parentType\n          <> \"\\n x:\" <+> pretty x\n    Nothing ->\n      MM.throwSourcedError m $\n        \"Missing concrete in unnamed function:\"\n          <> \"\\n  t:\" <+> pretty t\n          <> \"\\n parentType:\" <+> pretty parentType\n          <> \"\\n x:\" <+> pretty x\n\nexpressPolyApp ::\n  Lang ->\n  AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]) ->\n  [PolyExpr] ->\n  MorlocMonad PolyExpr\nexpressPolyApp _ (AnnoS g _ (ExeS (SrcCall src))) xs =\n  return . PolyReturn $ PolyApp (PolyExe g (SrcCallP src)) xs\nexpressPolyApp _ (AnnoS g _ (ExeS (PatCall pat))) xs =\n  return . PolyReturn $ PolyApp (PolyExe g (PatCallP pat)) xs\nexpressPolyApp lang f@(AnnoS g@(Idx i _) _ (AppS _ _)) es = do\n  fe <- expressPolyExprWrap lang g f\n  return\n    . PolyLet i fe\n    . PolyReturn\n    $ PolyApp (PolyLetVar g i) es\nexpressPolyApp _ (AnnoS g (_, args) (BndS v)) xs = do\n  case [j | (Arg j u) <- args, u == v] of\n    [j] -> return . PolyReturn $ PolyApp (PolyExe g (LocalCallP j)) xs\n    _ -> error \"Unreachable? BndS value should have been wired uniquely to args previously\"\nexpressPolyApp parentLang (AnnoS (Idx i t) _ (CallS v)) xs = do\n  (mid, crossLang) <- lookupRecursiveTarget parentLang v\n  -- Serial manifolds force thunks before serializing, so strip EffectT from the\n  -- return type and wrap in PolyDoBlock to reconstruct the thunk after deserializing.\n  case t of\n    FunT inputs (EffectT effs out) ->\n      return . PolyReturn\n        . PolyDoBlock (Idx i (EffectT effs out))\n        $ PolyApp (PolyExe (Idx i (FunT inputs out)) (RecCallP mid crossLang)) xs\n    _ ->\n      return . PolyReturn $ PolyApp (PolyExe (Idx i t) (RecCallP mid crossLang)) xs\nexpressPolyApp _ (AnnoS _ _ (LamS _ _)) _ = error \"unexpected LamS - should have been handled\"\nexpressPolyApp _ (AnnoS _ _ (VarS _ _)) _ = error \"unexpected VarS - should have been substituted\"\nexpressPolyApp _ _ _ = error \"Unreachable? This does not seem to be applicable\"\n\nexpressContainer ::\n  Indexed Type -> Indexed Lang -> Indexed Lang -> [Arg EVar] -> PolyExpr -> PolyExpr\nexpressContainer pc (Idx midx parentLang) (Idx _ lang) args e\n  | parentLang /= lang =\n      PolyApp\n        ( PolyRemoteInterface lang pc [i | Arg i _ <- args] ForeignCall\n            . PolyManifold lang midx (ManifoldFull (map unvalue args))\n            . PolyReturn\n            $ e\n        )\n        $ [PolyBndVar (A parentLang) i | Arg i _ <- args]\n  | otherwise = e\n\nunvalue :: Arg a -> Arg None\nunvalue (Arg i _) = Arg i None\n\n{- | Handle cross-language force by stripping EffectT from the callee's function\nreturn type. The source function actually returns the unwrapped type; the\nEffectT wrapper is a type-system abstraction. By removing it, Common.hs won't\nauto-wrap in DoBlockN, so the raw value is serialized directly.\nIf no PolyRemoteInterface is found, falls back to wrapping in PolyEval.\n-}\npushForceIntoRemote :: Indexed Type -> PolyExpr -> PolyExpr\npushForceIntoRemote t = go\n  where\n    go (PolyManifold l m f e) = PolyManifold l m f (go e)\n    go (PolyReturn e) = PolyReturn (go e)\n    go (PolyLet i e1 e2) = PolyLet i e1 (go e2)\n    go (PolyApp (PolyRemoteInterface lang _ args remote callee) xs) =\n      PolyApp (PolyRemoteInterface lang t args remote (stripThunkReturn callee)) xs\n    go e = PolyEval t e -- fallback for local expressions\n\n    -- Strip EffectT from the function's return type inside the callee manifold\n    stripThunkReturn (PolyManifold l m f body) = PolyManifold l m f (stripInBody body)\n    stripThunkReturn e = stripInBody e\n\n    stripInBody (PolyReturn e) = PolyReturn (stripInExe e)\n    stripInBody (PolyLet i e1 e2) = PolyLet i e1 (stripInBody e2)\n    stripInBody e = stripInExe e\n\n    stripInExe (PolyApp (PolyExe (Idx gidx (FunT inputs (EffectT _ out))) exe) xs) =\n      PolyApp (PolyExe (Idx gidx (FunT inputs out)) exe) xs\n    stripInExe (PolyApp (PolyExe (Idx gidx (EffectT _ out)) exe) xs) =\n      PolyApp (PolyExe (Idx gidx out) exe) xs\n    stripInExe e = e\n\n-- | Resolve a function name to its manifold ID and determine if the call is cross-language.\n-- Returns (manifold ID, Nothing) for same-pool calls, (manifold ID, Just targetLang) for foreign calls.\n-- Searches all manifolds in stateName, not just exports, to support non-exported recursive helpers.\nlookupRecursiveTarget :: Lang -> EVar -> MorlocMonad (Int, Maybe Lang)\nlookupRecursiveTarget parentLang v = do\n  nameMap <- MM.gets stateName\n  langMap <- MM.gets stateManifoldLang\n  -- Filter to concrete manifolds only (those in langMap) to avoid picking up\n  -- general/polymorphic indices that don't have serial manifold definitions\n  let reverseMap = Map.fromList [(name, idx) | (idx, name) <- Map.toList nameMap, Map.member idx langMap]\n  case Map.lookup v reverseMap of\n    (Just mid) -> do\n      let crossLang = case Map.lookup mid langMap of\n            Just tl | tl /= parentLang -> Just tl\n            _ -> Nothing\n      return (mid, crossLang)\n    Nothing -> MM.throwSystemError $ \"Cannot resolve recursive call to\" <+> pretty v\n\nbindVarIds :: [Int] -> [Three Lang Type (Indexed Type)] -> [PolyExpr]\nbindVarIds [] [] = []\nbindVarIds (i : args) (t : types) = PolyBndVar t i : bindVarIds args types\nbindVarIds [] ts = error $ \"bindVarIds: too few arguments: \" <> show ts\nbindVarIds _ [] = error \"bindVarIds: too few types\"\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Grammars/Common.hs",
    "content": "{-# LANGUAGE DeriveGeneric #-}\n{-# LANGUAGE FlexibleContexts #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Grammars.Common\nDescription : Shared codegen utilities: manifold inversion, naming, pool doc merging\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProvides 'invertSerialManifold' (the preprocessing step that all translators\nrun), 'PoolDocs' (the accumulator for the lowering fold), naming convention\nhelpers, and the fold framework ('FoldRules', 'foldWithSerialManifoldM').\n-}\nmodule Morloc.CodeGenerator.Grammars.Common\n  ( invertSerialManifold\n  , PoolDocs (..)\n  , mergePoolDocs\n\n    -- * Naming conventions\n  , svarNamer\n  , nvarNamer\n  , helperNamer\n  , argNamer\n  , manNamer\n  , patternSetter\n\n    -- * Record collection/unification\n  , RecEntry (..)\n  , RecMap\n  , collectRecords\n  , unifyRecords\n  , structName\n\n    -- * Dispatch extraction\n  , DispatchEntry (..)\n  , extractLocalDispatch\n  , extractRemoteDispatch\n\n    -- * Utilities\n  , provideClosure\n  , makeManifoldIndexer\n  , renderPoolDocs\n  ) where\n\nimport qualified Control.Monad.State as CMS\nimport Data.Binary (Binary)\nimport GHC.Generics (Generic)\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.CodeGenerator.Serial (serialAstToType)\nimport Morloc.Data.Doc\nimport Morloc.Data.Text (Text)\nimport Morloc.Monad (Identity, Index, newIndex, runIdentity, runIndex)\n\n-- Stores pieces of code made while building a pool\ndata PoolDocs = PoolDocs\n  { poolCompleteManifolds :: [MDoc]\n  -- ^ completely generated manifolds\n  , poolExpr :: MDoc\n  -- ^ the inplace expression\n  , poolPriorLines :: [MDoc]\n  -- ^ lines to precede the returned expression\n  , poolPriorExprs :: [MDoc]\n  -- ^ expressions that should precede this manifold, may include helper\n  -- functions or imports\n  }\n\ninstance Defaultable PoolDocs where\n  defaultValue =\n    PoolDocs\n      { poolCompleteManifolds = []\n      , poolExpr = \"\"\n      , poolPriorLines = []\n      , poolPriorExprs = []\n      }\n\n{- | Merge a series of pools, keeping prior lines, expression and manifolds, but\nmerging bodies with a function. For example, merge all elements in a list and\nprocess the poolExpr variables into list syntax in the given language.\n-}\nmergePoolDocs :: ([MDoc] -> MDoc) -> [PoolDocs] -> PoolDocs\nmergePoolDocs f ms =\n  PoolDocs\n    { poolCompleteManifolds = concatMap poolCompleteManifolds ms\n    , poolExpr = f (map poolExpr ms)\n    , poolPriorLines = concatMap poolPriorLines ms\n    , poolPriorExprs = concatMap poolPriorExprs ms\n    }\n\nprovideClosure :: Source -> [MDoc] -> [[MDoc]]\nprovideClosure src args0 = f (srcRsize src) args0\n  where\n    f [] args = [args]\n    f (n : ns) args\n      | n < length args = take n args : f ns (drop n args)\n      | otherwise =\n          error $\n            \"Invalid rsize value for imported \"\n              <> show (srcLang src)\n              <> \" function \"\n              <> show (unEVar (srcAlias src))\n\nsvarNamer :: Int -> MDoc\nsvarNamer i = \"s\" <> viaShow i\n\nnvarNamer :: Int -> MDoc\nnvarNamer i = \"n\" <> viaShow i\n\nhelperNamer :: Int -> MDoc\nhelperNamer i = \"helper\" <> viaShow i\n\nargNamer :: (HasTypeM t) => Arg t -> MDoc\nargNamer (Arg i (typeMof -> Native _)) = nvarNamer i\nargNamer (Arg i (typeMof -> Function _ _)) = nvarNamer i\nargNamer (Arg i _) = svarNamer i\n\n-- create a name for a manifold based on a unique id\nmanNamer :: Int -> MDoc\nmanNamer i = \"m\" <> viaShow i\n\nrenderPoolDocs :: PoolDocs -> MDoc\nrenderPoolDocs e = vsep . punctuate line $ poolPriorExprs e <> poolCompleteManifolds e\n\n-- The surround rules control the setting of manifold ids across the recursion\nmakeManifoldIndexer :: (Monad m) => m Int -> (Int -> m ()) -> SurroundManifoldM m sm nm se ne sr nr\nmakeManifoldIndexer getId putId =\n  defaultValue\n    { surroundSerialManifoldM = surroundSM\n    , surroundNativeManifoldM = surroundNM\n    }\n  where\n    -- \\| Run a computation in a child manifold, manage manifold indices\n    descend childManifoldIndex x f = do\n      originalManifoldIndex <- getId\n      putId childManifoldIndex\n      x' <- f x\n      putId originalManifoldIndex\n      return x'\n\n    surroundSM f sm@(SerialManifold i _ _ _ _) = descend i sm f\n\n    surroundNM f nm@(NativeManifold i _ _ _) = descend i nm f\n\npatternSetter ::\n  (TypeF -> [MDoc] -> MDoc) -> -- make a tuple from a type and list of elements\n  (TypeF -> [MDoc] -> MDoc) -> -- make a record from a type and list of elements\n  (TypeF -> MDoc -> Int -> MDoc) -> -- access an element in a tuple\n  (TypeF -> MDoc -> Text -> MDoc) -> -- access an element in a record\n  MDoc -> -- initial data variable name\n  TypeF -> -- data type\n  Selector -> -- selection pattern\n  [MDoc] -> -- ordered arguments substituted at set sites\n  MDoc -- the returned data structure with a new spine that reuses unchanged fields\npatternSetter makeTuple makeRecord accessTuple accessRecord dat0 t0 s0 args0 =\n  snd (setter dat0 t0 s0 args0)\n  where\n    setter :: MDoc -> TypeF -> Selector -> [MDoc] -> ([MDoc], MDoc)\n\n    -- tuple setters\n    setter dat1 tupleType@(AppF _ ts1) (SelectorIdx s1 ss1) args1 =\n      second (makeTuple tupleType) $ statefulMap (chooseField dat1 (s1 : ss1)) args1 (zip [0 ..] ts1)\n      where\n        chooseField :: MDoc -> [(Int, Selector)] -> [MDoc] -> (Int, TypeF) -> ([MDoc], MDoc)\n        chooseField dat ss args (i, t) =\n          let dat' = accessTuple tupleType dat i\n           in case (lookup i ss) of\n                (Just s) -> setter dat' t s args\n                Nothing -> (args, dat')\n\n    -- record setters\n    setter dat1 recType@(NamF _ _ _ rs1) (SelectorKey s1 ss1) args1 =\n      second (makeRecord recType) $ statefulMap (chooseField dat1 (s1 : ss1)) args1 rs1\n      where\n        chooseField :: MDoc -> [(Text, Selector)] -> [MDoc] -> (Key, TypeF) -> ([MDoc], MDoc)\n        chooseField dat ss args (Key k, t) =\n          let dat' = accessRecord recType dat k\n           in case (lookup k ss) of\n                (Just s) -> setter dat' t s args\n                Nothing -> (args, dat')\n    setter _ _ _ (arg : args2) = (args2, arg)\n    setter _ _ _ [] = error \"Illegal setter\"\n\n-- Represents the dependency of a on previously bound expressions\ndata D a = D a [(Int, Either SerialExpr NativeExpr)]\n\nunD :: D a -> a\nunD (D a _) = a\n\ngetDeps :: D a -> [(Int, Either SerialExpr NativeExpr)]\ngetDeps (D _ d) = d\n\nclass Dependable a where\n  weave :: D a -> a\n  atomize :: a -> [(Int, Either SerialExpr NativeExpr)] -> Index (D a)\n  isAtomic :: a -> Bool\n\ninstance Dependable NativeExpr where\n  weave (D x ((i, Left se) : deps)) = weave $ D (SerialLetN i se x) deps\n  weave (D x ((i, Right ne) : deps)) = weave $ D (NativeLetN i ne x) deps\n  weave (D x []) = x\n\n  atomize e deps\n    | isAtomic e = return $ D e deps\n    | otherwise = do\n        i <- newIndex\n        return $ D (LetVarN (typeFof e) i) ((i, Right e) : deps)\n\n  isAtomic (AppExeN _ _ _) = False\n  isAtomic ManN {} = False\n  isAtomic SerialLetN {} = False\n  isAtomic NativeLetN {} = False\n  isAtomic ListN {} = False\n  isAtomic TupleN {} = False\n  isAtomic RecordN {} = False\n  isAtomic IfN {} = False\n  isAtomic _ = True\n\ninstance Dependable SerialExpr where\n  weave (D x ((i, Left se) : deps)) = weave $ D (SerialLetS i se x) deps\n  weave (D x ((i, Right ne) : deps)) = weave $ D (NativeLetS i ne x) deps\n  weave (D x []) = x\n\n  atomize e deps\n    | isAtomic e = return $ D e deps\n    | otherwise = do\n        i <- newIndex\n        t <- case typeMof e of\n          Passthrough -> return Nothing\n          (Serial ft) -> return $ Just ft\n          _ -> return Nothing\n        -- _ -> error \"This type must be serialized\"\n        return $ D (LetVarS t i) ((i, Left e) : deps)\n\n  isAtomic (LetVarS _ _) = True\n  isAtomic (BndVarS _ _) = True\n  isAtomic (ReturnS _) = True\n  isAtomic (SerializeS _ _) = True\n  isAtomic _ = False\n\n-- ---------------------------------------------------------------------------\n-- Variable index substitution [old/new]\n--\n-- When a let-binding has a trivial RHS (just a variable reference), we\n-- eliminate it by substituting the bound index throughout the body and\n-- remaining deps.  Since variable indices appear in many AST positions\n-- (variable refs, binding sites, manifold parameter forms, pool call\n-- descriptors, LocalCallP executable refs), the substitution must\n-- reach all of them.\n-- ---------------------------------------------------------------------------\n\n-- | Detect a trivial native variable reference.\nnativeTrivialVar :: NativeExpr -> Maybe Int\nnativeTrivialVar (LetVarN _ j) = Just j\nnativeTrivialVar (BndVarN _ j) = Just j\nnativeTrivialVar _ = Nothing\n\n-- | Detect a trivial serial variable reference.\nserialTrivialVar :: SerialExpr -> Maybe Int\nserialTrivialVar (LetVarS _ j) = Just j\nserialTrivialVar (BndVarS _ j) = Just j\nserialTrivialVar _ = Nothing\n\n-- | Substitute variable index [old/new] throughout a NativeExpr.\nrenameNE :: Int -> Int -> NativeExpr -> NativeExpr\nrenameNE old new = go where\n  ri i = if i == old then new else i\n  go (ManN nm) = ManN (renameNM old new nm)\n  go (AppExeN t exe args) = AppExeN t (renameExe old new exe) (map goA args)\n  go (ReturnN ne) = ReturnN (go ne)\n  go (SerialLetN i se ne) = SerialLetN (ri i) (renameSE old new se) (go ne)\n  go (NativeLetN i ne1 ne2) = NativeLetN (ri i) (go ne1) (go ne2)\n  go (LetVarN t i) = LetVarN t (ri i)\n  go (BndVarN t i) = BndVarN t (ri i)\n  go (DeserializeN t s se) = DeserializeN t s (renameSE old new se)\n  go (ExeN t exe) = ExeN t (renameExe old new exe)\n  go (ListN v t nes) = ListN v t (map go nes)\n  go (TupleN v xs) = TupleN v (map go xs)\n  go (RecordN o v ps rs) = RecordN o v ps (map (second go) rs)\n  go e@(LogN _ _) = e\n  go e@(RealN _ _) = e\n  go e@(IntN _ _) = e\n  go e@(StrN _ _) = e\n  go e@(NullN _) = e\n  go (DoBlockN t ne) = DoBlockN t (go ne)\n  go (EvalN t ne) = EvalN t (go ne)\n  go (CoerceN c t ne) = CoerceN c t (go ne)\n  go (IfN t c th el) = IfN t (go c) (go th) (go el)\n  go (IntrinsicN t intr msch nes) = IntrinsicN t intr msch (map go nes)\n  goA (NativeArgManifold nm) = NativeArgManifold (renameNM old new nm)\n  goA (NativeArgExpr ne) = NativeArgExpr (go ne)\n\n-- | Substitute variable index [old/new] throughout a SerialExpr.\nrenameSE :: Int -> Int -> SerialExpr -> SerialExpr\nrenameSE old new = go where\n  ri i = if i == old then new else i\n  go (ManS sm) = ManS (renameSM old new sm)\n  go (AppPoolS t p args) = AppPoolS t (renamePoolCall old new p) (map goA args)\n  go (AppRecS t m es) = AppRecS t m (map go es)\n  go (AppForeignRecS t m s es) = AppForeignRecS t m s (map go es)\n  go (ReturnS se) = ReturnS (go se)\n  go (SerialLetS i se1 se2) = SerialLetS (ri i) (go se1) (go se2)\n  go (NativeLetS i ne se) = NativeLetS (ri i) (renameNE old new ne) (go se)\n  go (LetVarS mt i) = LetVarS mt (ri i)\n  go (BndVarS mt i) = BndVarS mt (ri i)\n  go (SerializeS s ne) = SerializeS s (renameNE old new ne)\n  goA (SerialArgManifold sm) = SerialArgManifold (renameSM old new sm)\n  goA (SerialArgExpr se) = SerialArgExpr (go se)\n\nrenameNM :: Int -> Int -> NativeManifold -> NativeManifold\nrenameNM old new (NativeManifold m lang form body) =\n  NativeManifold m lang (renameForm old new form) (renameNE old new body)\n\nrenameSM :: Int -> Int -> SerialManifold -> SerialManifold\nrenameSM old new (SerialManifold m lang form hf body) =\n  SerialManifold m lang (renameForm old new form) hf (renameSE old new body)\n\nrenameForm :: Int -> Int -> ManifoldForm a b -> ManifoldForm a b\nrenameForm old new form = case form of\n  ManifoldPass args -> ManifoldPass (map ra args)\n  ManifoldFull args -> ManifoldFull (map ra args)\n  ManifoldPart ctx bnd -> ManifoldPart (map ra ctx) (map ra bnd)\n  where\n    ra (Arg i t) = Arg (if i == old then new else i) t\n\nrenamePoolCall :: Int -> Int -> PoolCall -> PoolCall\nrenamePoolCall old new (PoolCall mid sock rform args) =\n  PoolCall mid sock rform (map ra args)\n  where\n    ra (Arg i t) = Arg (if i == old then new else i) t\n\nrenameExe :: Int -> Int -> ExecutableExpressionPool -> ExecutableExpressionPool\nrenameExe old new (LocalCallP i) = LocalCallP (if i == old then new else i)\nrenameExe _ _ other = other\n\nrenameDeps :: Int -> Int -> [(Int, Either SerialExpr NativeExpr)]\n           -> [(Int, Either SerialExpr NativeExpr)]\nrenameDeps old new = map f where\n  ri i = if i == old then new else i\n  f (i, Left se) = (ri i, Left (renameSE old new se))\n  f (i, Right ne) = (ri i, Right (renameNE old new ne))\n\ninvertSerialManifold :: SerialManifold -> SerialManifold\ninvertSerialManifold sm0 =\n  runIndex (maxIndex sm0) (unD <$> foldSerialManifoldM fm sm0)\n  where\n    fm =\n      FoldManifoldM\n        { opSerialManifoldM = invertSerialManifoldM\n        , opNativeManifoldM = invertNativeManifoldM\n        , opSerialExprM = invertSerialExprM\n        , opNativeExprM = invertNativeExprM\n        , opSerialArgM = invertSerialArgM\n        , opNativeArgM = invertNativeArgM\n        }\n\n    invertSerialManifoldM :: SerialManifold_ (D SerialExpr) -> Index (D SerialManifold)\n    invertSerialManifoldM (SerialManifold_ m lang form headForm se) = do\n      return (D (SerialManifold m lang form headForm (weave se)) [])\n\n    invertNativeManifoldM :: NativeManifold_ (D NativeExpr) -> Index (D NativeManifold)\n    invertNativeManifoldM (NativeManifold_ m lang form (weave -> ne)) = do\n      return (D (NativeManifold m lang form ne) [])\n\n    invertSerialExprM ::\n      SerialExpr_ (D SerialManifold) (D SerialExpr) (D NativeExpr) (D SerialArg) (D NativeArg) ->\n      Index (D SerialExpr)\n    invertSerialExprM (ManS_ (D sm lets)) = return $ D (ManS sm) lets\n    invertSerialExprM (AppPoolS_ t pool serialArgs) = do\n      let serialArgs' = map unD serialArgs\n          deps = concatMap getDeps serialArgs\n      atomize (AppPoolS t pool serialArgs') deps\n    invertSerialExprM (ReturnS_ (D se lets)) = return $ D (ReturnS se) lets\n    invertSerialExprM (SerialLetS_ i (D se1 lets1) (D se2 lets2))\n      | Just j <- serialTrivialVar se1 =\n          return $ D (renameSE i j se2) (renameDeps i j lets2 <> lets1)\n      | otherwise =\n          return $ D se2 (lets2 <> ((i, Left se1) : lets1))\n    invertSerialExprM (NativeLetS_ i (D ne1 lets1) (D se2 lets2))\n      | Just j <- nativeTrivialVar ne1 =\n          return $ D (renameSE i j se2) (renameDeps i j lets2 <> lets1)\n      | otherwise =\n          return $ D se2 (lets2 <> ((i, Right ne1) : lets1))\n    invertSerialExprM (LetVarS_ t i) = atomize (LetVarS t i) []\n    invertSerialExprM (BndVarS_ t i) = atomize (BndVarS t i) []\n    invertSerialExprM (AppRecS_ t mid serialExprs) = do\n      let serialExprs' = map unD serialExprs\n          deps = concatMap getDeps serialExprs\n      atomize (AppRecS t mid serialExprs') deps\n    invertSerialExprM (AppForeignRecS_ t mid socket serialExprs) = do\n      let serialExprs' = map unD serialExprs\n          deps = concatMap getDeps serialExprs\n      atomize (AppForeignRecS t mid socket serialExprs') deps\n    invertSerialExprM (SerializeS_ s (D ne lets)) = atomize (SerializeS s ne) lets\n\n    invertNativeExprM ::\n      NativeExpr_ (D NativeManifold) (D SerialExpr) (D NativeExpr) (D SerialArg) (D NativeArg) ->\n      Index (D NativeExpr)\n    invertNativeExprM (AppExeN_ t exe nativeArgs) = do\n      let nativeArgs' = map unD nativeArgs\n          deps = concatMap getDeps nativeArgs\n      case (t, exe) of\n        -- Source functions return the unwrapped type; the compiler wraps in suspend\n        (EffectF _ innerT, SrcCallP _) ->\n          return $ D (DoBlockN t (weave (D (AppExeN innerT exe nativeArgs') deps))) []\n        (OptionalF _, SrcCallP _) ->\n          atomize (AppExeN t exe nativeArgs') deps\n        _ -> atomize (AppExeN t exe nativeArgs') deps\n    invertNativeExprM (ManN_ (D nm lets)) = atomize (ManN nm) lets\n    invertNativeExprM (ReturnN_ (D ne lets)) = atomize (ReturnN ne) lets\n    -- Eliminate trivial let-bindings where the RHS is just a variable\n    -- reference, e.g. \"let i = j\" becomes substitution [i/j] in body.\n    -- This avoids redundant assignments like \"n7 = n8\" in generated code.\n    invertNativeExprM (SerialLetN_ i (D se1 lets1) (D ne2 lets2))\n      | Just j <- serialTrivialVar se1 =\n          return $ D (renameNE i j ne2) (renameDeps i j lets2 <> lets1)\n      | otherwise =\n          return $ D ne2 (lets2 <> ((i, Left se1) : lets1))\n    invertNativeExprM (NativeLetN_ i (D ne1 lets1) (D ne2 lets2))\n      | Just j <- nativeTrivialVar ne1 =\n          return $ D (renameNE i j ne2) (renameDeps i j lets2 <> lets1)\n      | otherwise =\n          return $ D ne2 (lets2 <> ((i, Right ne1) : lets1))\n    invertNativeExprM (LetVarN_ t i) = atomize (LetVarN t i) []\n    invertNativeExprM (BndVarN_ t i) = atomize (BndVarN t i) []\n    invertNativeExprM (DeserializeN_ t s (D se lets)) = atomize (DeserializeN t s se) lets\n    invertNativeExprM (ExeN_ t x) = atomize (ExeN t x) []\n    invertNativeExprM (ListN_ v t nes) = atomize (ListN v t (map unD nes)) (concatMap getDeps nes)\n    invertNativeExprM (TupleN_ v xs) = atomize (TupleN v (map unD xs)) (concatMap getDeps xs)\n    invertNativeExprM (RecordN_ o v ps rs) = atomize (RecordN o v ps (map (second unD) rs)) (concatMap (getDeps . snd) rs)\n    invertNativeExprM (LogN_ v x) = atomize (LogN v x) []\n    invertNativeExprM (RealN_ v x) = atomize (RealN v x) []\n    invertNativeExprM (IntN_ v x) = atomize (IntN v x) []\n    invertNativeExprM (StrN_ v x) = atomize (StrN v x) []\n    invertNativeExprM (NullN_ v) = atomize (NullN v) []\n    -- keep dependencies inside suspend so thunk body stays lazy\n    invertNativeExprM (DoBlockN_ t (D ne lets)) = return $ D (DoBlockN t (weave (D ne lets))) []\n    invertNativeExprM (EvalN_ t (D ne lets)) = atomize (EvalN t ne) lets\n    -- coercion is transparent: pass through like EvalN\n    invertNativeExprM (CoerceN_ c t (D ne lets)) = atomize (CoerceN c t ne) lets\n    -- keep dependencies inside if branches (like suspend)\n    invertNativeExprM (IfN_ t (D condNe condLets) (D thenNe thenLets) (D elseNe elseLets)) =\n      atomize (IfN t (weave (D condNe condLets)) (weave (D thenNe thenLets)) (weave (D elseNe elseLets))) []\n    invertNativeExprM (IntrinsicN_ t intr msch nes) =\n      atomize (IntrinsicN t intr msch (map unD nes)) (concatMap getDeps nes)\n\n    invertSerialArgM :: SerialArg_ (D SerialManifold) (D SerialExpr) -> Index (D SerialArg)\n    invertSerialArgM (SerialArgManifold_ (D sm deps)) = return $ D (SerialArgManifold sm) deps\n    invertSerialArgM (SerialArgExpr_ (D se deps)) = return $ D (SerialArgExpr se) deps\n\n    invertNativeArgM :: NativeArg_ (D NativeManifold) (D NativeExpr) -> Index (D NativeArg)\n    invertNativeArgM (NativeArgManifold_ (D nm deps)) = return $ D (NativeArgManifold nm) deps\n    invertNativeArgM (NativeArgExpr_ (D ne deps)) = return $ D (NativeArgExpr ne) deps\n\nmaxIndex :: SerialManifold -> Int\nmaxIndex = (+ 1) . runIdentity . foldSerialManifoldM fm\n  where\n    fm =\n      FoldManifoldM\n        { opSerialManifoldM = findSerialManifoldIndices\n        , opNativeManifoldM = findNativeManifoldIndices\n        , opSerialExprM = findSerialIndices\n        , opNativeExprM = findNativeIndices\n        , opSerialArgM = return . foldlSA max 0\n        , opNativeArgM = return . foldlNA max 0\n        }\n\n    findSerialManifoldIndices :: (Monad m) => SerialManifold_ Int -> m Int\n    findSerialManifoldIndices (SerialManifold_ _ _ form _ bodyMax) = do\n      let formIndices = abilist const const form\n      return $ foldl max bodyMax formIndices\n\n    findNativeManifoldIndices :: (Monad m) => NativeManifold_ Int -> m Int\n    findNativeManifoldIndices (NativeManifold_ _ _ form bodyMax) = do\n      let formIndices = abilist const const form\n      return $ foldl max bodyMax formIndices\n\n    findSerialIndices :: (Monad m) => SerialExpr_ Int Int Int Int Int -> m Int\n    findSerialIndices (LetVarS_ _ i) = return i\n    findSerialIndices (BndVarS_ _ i) = return i\n    findSerialIndices e = return $ foldlSE max 0 e\n\n    findNativeIndices :: (Monad m) => NativeExpr_ Int Int Int Int Int -> m Int\n    findNativeIndices (LetVarN_ _ i) = return i\n    findNativeIndices (BndVarN_ _ i) = return i\n    findNativeIndices e = return $ foldlNE max 0 e\n\n{- | A record entry stores the common name, keys, and types of records that are\nnot imported from source. These records are generated as structs (or\nequivalent) in the pool. 'unifyRecords' takes all such records and \"unifies\"\nones with the same name and keys. The unified records may have different\ntypes, but they will all be instances of the same generic struct. Fields that\ndiffer between instances are made generic.\n-}\ndata RecEntry = RecEntry\n  { recName :: MDoc\n  , recFields ::\n      [ ( Key\n        , Maybe TypeF\n        )\n      ]\n  }\n  deriving (Show)\n\n-- | Lookup table mapping (FVar, keys) to their unified RecEntry.\ntype RecMap = [((FVar, [Key]), RecEntry)]\n\ncollectRecords :: SerialManifold -> [(FVar, Int, [(Key, TypeF)])]\ncollectRecords e0@(SerialManifold i0 _ _ _ _) =\n  unique $ CMS.evalState (surroundFoldSerialManifoldM manifoldIndexer fm e0) i0\n  where\n    fm = defaultValue {opFoldWithNativeExprM = nativeExpr, opFoldWithSerialExprM = serialExpr}\n\n    manifoldIndexer = makeManifoldIndexer CMS.get CMS.put\n\n    nativeExpr _ (DeserializeN_ t s xs) = do\n      manifoldIndex <- CMS.get\n      let tRecs = seekRecs manifoldIndex t\n          sRecs = seekRecs manifoldIndex (serialAstToType s)\n      return $ xs <> tRecs <> sRecs\n    nativeExpr efull e = do\n      manifoldIndex <- CMS.get\n      let newRecs = seekRecs manifoldIndex (typeFof efull)\n      return $ foldlNE (<>) newRecs e\n\n    serialExpr _ (SerializeS_ s xs) = do\n      manifoldIndex <- CMS.get\n      return $ seekRecs manifoldIndex (serialAstToType s) <> xs\n    serialExpr _ e = return $ foldlSE (<>) [] e\n\n    seekRecs :: Int -> TypeF -> [(FVar, Int, [(Key, TypeF)])]\n    seekRecs m (NamF _ v@(FV _ (CV \"struct\")) _ rs) = [(v, m, rs)] <> concatMap (seekRecs m . snd) rs\n    seekRecs m (NamF _ _ _ rs) = concatMap (seekRecs m . snd) rs\n    seekRecs m (FunF ts t) = concatMap (seekRecs m) (t : ts)\n    seekRecs m (AppF t ts) = concatMap (seekRecs m) (t : ts)\n    seekRecs _ (UnkF _) = []\n    seekRecs _ (VarF _) = []\n    seekRecs m (EffectF _ t) = seekRecs m t\n    seekRecs m (OptionalF t) = seekRecs m t\n    seekRecs _ (NatLitF _) = []\n\nunifyRecords ::\n  [ ( FVar\n    , Int\n    , [(Key, TypeF)]\n    )\n  ] ->\n  RecMap\nunifyRecords xs =\n  zipWith (\\i ((v, ks), es) -> ((v, ks), RecEntry (structName i v) es)) [1 ..]\n    . map (\\((v, ks), rss) -> ((v, ks), map unifyField (transpose (map snd rss))))\n    . groupSort\n    . unique\n    $ [((v, map fst es), (m, es)) | (v, m, es) <- xs]\n\nstructName :: Int -> FVar -> MDoc\nstructName i (FV v (CV \"struct\")) = \"mlc_\" <> pretty v <> \"_\" <> pretty i\nstructName _ (FV _ v) = pretty v\n\nunifyField :: [(Key, TypeF)] -> (Key, Maybe TypeF)\nunifyField [] = error \"Empty field\"\nunifyField rs@((v, _) : _)\n  | not (all ((== v) . fst) rs) =\n      error $ \"Bad record - unequal fields: \" <> show (unique rs)\n  | otherwise = case unique (map snd rs) of\n      [t] -> (v, Just t)\n      _ -> (v, Nothing)\n\n-- | A dispatch table entry: manifold ID and argument count.\ndata DispatchEntry = DispatchEntry\n  { dispatchId :: Int\n  , dispatchArgCount :: Int\n  }\n  deriving (Show, Eq, Ord, Generic)\n\ninstance Binary DispatchEntry\n\n{- | Extract local dispatch entries from serial manifolds.\nSkips manifolds marked as remote workers.\n-}\nextractLocalDispatch :: [SerialManifold] -> [DispatchEntry]\nextractLocalDispatch = catMaybes . map localEntry\n  where\n    localEntry (SerialManifold _ _ _ HeadManifoldFormRemoteWorker _) = Nothing\n    localEntry (SerialManifold i _ form _ _) = Just $ DispatchEntry i (getSize form)\n\n    getSize :: ManifoldForm (Or TypeS TypeF) TypeS -> Int\n    getSize = sum . abilist (\\_ _ -> 1) (\\_ _ -> 1)\n\n-- | Extract remote dispatch entries by walking the AST.\nextractRemoteDispatch :: [SerialManifold] -> [DispatchEntry]\nextractRemoteDispatch = map (uncurry DispatchEntry) . unique . concatMap getRemotes\n  where\n    getRemotes :: SerialManifold -> [(Int, Int)]\n    getRemotes = runIdentity . foldSerialManifoldM (defaultValue {opSerialExprM = getRemoteSE})\n\n    getRemoteSE ::\n      SerialExpr_ [(Int, Int)] [(Int, Int)] [(Int, Int)] [(Int, Int)] [(Int, Int)] ->\n      Identity [(Int, Int)]\n    getRemoteSE (AppPoolS_ _ (PoolCall i _ (RemoteCall _) _) xss) = return $ (i, length xss) : concat xss\n    getRemoteSE x = return $ foldlSE mappend mempty x\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Grammars/Macro.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Grammars.Macro\nDescription : Expand parameters in concrete types\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.CodeGenerator.Grammars.Macro\n  ( expandMacro\n  ) where\n\nimport Data.Text (Text)\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.Data.Text as MT\nimport Text.Parsec (Parsec, runParser, eof, many1, getState)\nimport Text.Parsec.Char (noneOf, string, digit)\nimport Text.Parsec.Text ()\n\ntype Parser = Parsec Text ParserState\n\nnewtype ParserState = ParserState {stateParameters :: [Text]}\n\nexpandMacro :: Text -> [Text] -> Text\nexpandMacro t [] = t\nexpandMacro t ps =\n  case runParser (pBase <* eof) (ParserState ps) \"typemacro\" t of\n    Left err' -> error (show err')\n    Right es -> es\n\npBase :: Parser Text\npBase = MT.concat <$> many1 (pChar <|> pMacro)\n\npChar :: Parser Text\npChar = MT.pack <$> many1 (noneOf ['$'])\n\npMacro :: Parser Text\npMacro = do\n  xs <- stateParameters <$> getState\n  _ <- string \"$\"\n  n <- read <$> many1 digit\n  -- index is 1-based\n  let i = n - 1\n  return (xs !! i)\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Grammars/Translator/Generic.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE QuasiQuotes #-}\n{-# LANGUAGE TemplateHaskell #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Grammars.Translator.Generic\nDescription : Descriptor-driven translator for dynamically-typed languages\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nGeneric translator that generates pool code for dynamically-typed interpreted\nlanguages based on a LangDescriptor. All language-specific behavior is driven\nby descriptor fields -- no hardcoded language-specific code.\n-}\nmodule Morloc.CodeGenerator.Grammars.Translator.Generic\n  ( translate\n  , preprocess\n  , CodegenManifest (..)\n  , printProgram\n  ) where\n\nimport qualified Data.Aeson as Aeson\nimport qualified Data.Binary as Binary\nimport qualified Data.ByteString.Lazy as BL\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Data.Text.Encoding as TE\nimport Morloc.CodeGenerator.Grammars.Common\nimport Morloc.CodeGenerator.Grammars.Translator.Imperative\n  ( IAccessor (..)\n  , IExpr (..)\n  , IProgram (..)\n  , IStmt (..)\n  , IndexM\n  , LowerConfig (..)\n  , buildProgram\n  , defaultDeserialize\n  , defaultFoldRules\n  , defaultSerialize\n  )\nimport Morloc.CodeGenerator.Grammars.Translator.PseudoCode (pseudocodeSerialManifold)\nimport Morloc.CodeGenerator.LanguageDescriptor\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.Config as MC\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.DataFiles as DF\nimport qualified Morloc.LangRegistry as LR\nimport qualified Morloc.Language as ML\nimport qualified Morloc.Version as MV\nimport Morloc.Monad (asks, gets, newIndex, runIndex)\nimport qualified Morloc.Monad as MM\nimport Morloc.Quasi\nimport qualified System.Directory as Dir\nimport qualified System.Exit as Exit\nimport System.IO (hClose, openBinaryTempFile)\nimport qualified System.Process as Proc\n\n-- | Simple template substitution: replace {{key}} with value\nsubstituteT :: Text -> [(Text, Text)] -> Text\nsubstituteT = foldl (\\t (k, v) -> T.replace (\"{{\" <> k <> \"}}\") v t)\n\npreprocess :: SerialManifold -> MorlocMonad SerialManifold\npreprocess = return . invertSerialManifold\n\ntranslate :: Lang -> [Source] -> [SerialManifold] -> MorlocMonad Script\ntranslate lang srcs es = do\n  desc <- loadDescriptorForLang lang\n  case ldCodegenCommand desc of\n    Just cmd -> translateExternal cmd lang desc srcs es\n    Nothing -> translateBuiltin lang desc srcs es\n\n-- | Translate using the built-in generic renderer.\ntranslateBuiltin :: Lang -> LangDescriptor -> [Source] -> [SerialManifold] -> MorlocMonad Script\ntranslateBuiltin lang desc srcs es = do\n  home <- pretty <$> asks MC.configHome\n  lib <- MT.pack <$> asks MC.configLibrary\n  let opt = home <> \"/opt\"\n\n  -- translate source imports\n  includeDocs <-\n    mapM\n      (translateSource desc)\n      (unique . mapMaybe srcPath $ srcs)\n\n  debugLog (vsep (map pseudocodeSerialManifold es) <> \"\\n\")\n\n  -- build src name function\n  let srcNamer =\n        if ldQualifiedImports desc\n          then qualifiedSrcName lib\n          else \\src -> pretty (srcName src)\n\n  -- add language-specific preamble from registry\n  registry <- gets stateLangRegistry\n  let preambleTemplates = case LR.lookupLang (ML.langName lang) registry of\n        Just entry -> LR.lrePreamble entry\n        Nothing -> []\n  homeDir <- asks MC.configHome\n  let preambleDocs = map (substitutePreamble home lib opt homeDir) preambleTemplates\n\n  let allSources = preambleDocs ++ includeDocs\n      mDocs = map (translateSegment desc srcNamer) es\n      program = buildProgram allSources mDocs es\n\n  let code = printProgram desc program\n  let exefile = ML.makeExecutablePoolName lang\n  let rendered = T.replace \"__MORLOC_VERSION__\" (MT.pack MV.versionStr) (render code)\n\n  poolSubdir <- getPoolSubdir\n\n  return $\n    Script\n      { scriptBase = \"pool\"\n      , scriptLang = lang\n      , scriptCode = \".\" :/ Dir \"pools\" [Dir poolSubdir [File exefile (Code rendered)]]\n      , scriptMake = []\n      }\n  where\n    substitutePreamble :: MDoc -> Text -> MDoc -> Path -> Text -> MDoc\n    substitutePreamble homeDoc libText optDoc _homeDir t =\n      pretty\n        . T.replace \"{{home}}\" (render homeDoc)\n        . T.replace \"{{lib}}\" libText\n        . T.replace \"{{opt}}\" (render optDoc)\n        $ t\n\n-- | Translate using an external codegen tool.\ntranslateExternal ::\n  Text -> Lang -> LangDescriptor -> [Source] -> [SerialManifold] -> MorlocMonad Script\ntranslateExternal cmd lang desc srcs es = do\n  home <- asks MC.configHome\n  lib <- MT.pack <$> asks MC.configLibrary\n\n  includeDocs <-\n    mapM\n      (translateSource desc)\n      (unique . mapMaybe srcPath $ srcs)\n\n  debugLog (vsep (map pseudocodeSerialManifold es) <> \"\\n\")\n\n  let srcNamer =\n        if ldQualifiedImports desc\n          then qualifiedSrcName lib\n          else \\src -> pretty (srcName src)\n\n  let mDocs = map (translateSegment desc srcNamer) es\n      program = buildProgram includeDocs mDocs es\n\n  -- find the lang.yaml path for the codegen tool\n  let langYamlPath = home </> \"lang\" </> T.unpack (ML.langName lang) </> \"lang.yaml\"\n\n  -- serialize IProgram to a temp file\n  tmpDir <- liftIO Dir.getTemporaryDirectory\n  (tmpPath, tmpHandle) <- liftIO $ openBinaryTempFile tmpDir \"iprogram.bin\"\n  liftIO $ do\n    BL.hPut tmpHandle (Binary.encode program)\n    hClose tmpHandle\n\n  -- invoke the codegen command: cmd lang.yaml iprogram.bin\n  let cmdStr = T.unpack cmd\n  (exitCode, stdoutStr, stderrStr) <-\n    liftIO $\n      Proc.readCreateProcessWithExitCode\n        (Proc.proc cmdStr [langYamlPath, tmpPath])\n        \"\"\n\n  -- clean up temp file\n  liftIO $ Dir.removeFile tmpPath\n\n  case exitCode of\n    Exit.ExitFailure code' ->\n      MM.throwSystemError $\n        \"External codegen '\"\n          <> pretty cmd\n          <> \"' failed with exit code \"\n          <> pretty code'\n          <> \":\\n\"\n          <> pretty stderrStr\n    Exit.ExitSuccess -> do\n      -- parse the codegen manifest from stdout\n      let manifest = Aeson.decodeStrict (TE.encodeUtf8 (T.pack stdoutStr)) :: Maybe CodegenManifest\n      case manifest of\n        Nothing ->\n          MM.throwSystemError $\n            \"External codegen '\"\n              <> pretty cmd\n              <> \"' produced invalid manifest on stdout\"\n        Just m -> do\n          let exefile = ML.makeExecutablePoolName lang\n              poolContent = T.replace \"__MORLOC_VERSION__\" (MT.pack MV.versionStr) (cgmPoolCode m)\n              buildCmds = map (SysRun . Code) (cgmBuildCommands m)\n          poolSubdir <- getPoolSubdir\n          return $\n            Script\n              { scriptBase = \"pool\"\n              , scriptLang = lang\n              , scriptCode = \".\" :/ Dir \"pools\" [Dir poolSubdir [File exefile (Code poolContent)]]\n              , scriptMake = buildCmds\n              }\n\n-- | Manifest returned by an external codegen tool on stdout.\ndata CodegenManifest = CodegenManifest\n  { cgmPoolCode :: Text\n  -- ^ rendered pool file content\n  , cgmBuildCommands :: [Text]\n  -- ^ build commands to run after writing files\n  }\n  deriving (Show)\n\ninstance Aeson.FromJSON CodegenManifest where\n  parseJSON = Aeson.withObject \"CodegenManifest\" $ \\v ->\n    CodegenManifest\n      <$> v Aeson..: \"pool_code\"\n      <*> (v Aeson..:? \"build_commands\" Aeson..!= [])\n\ninstance Aeson.ToJSON CodegenManifest where\n  toJSON m =\n    Aeson.object\n      [ \"pool_code\" Aeson..= cgmPoolCode m\n      , \"build_commands\" Aeson..= cgmBuildCommands m\n      ]\n\n{- | Load the language descriptor for a language.\nTries embedded lang.yaml first, then falls back to filesystem.\nIf the pool template is empty, loads it from the embedded or filesystem pool file.\n-}\nloadDescriptorForLang :: Lang -> MorlocMonad LangDescriptor\nloadDescriptorForLang lang = do\n  let name = ML.langName lang\n      ext = ML.langExtension lang\n  desc <- loadDescriptorByName name\n  -- if pool template is empty, load from embedded or filesystem pool file\n  if T.null (ldPoolTemplate desc)\n    then do\n      poolText <- loadPoolTemplate name ext\n      return desc {ldPoolTemplate = poolText}\n    else return desc\n  where\n    loadDescriptorByName :: T.Text -> MorlocMonad LangDescriptor\n    loadDescriptorByName name =\n      case lookup (T.unpack name) [(n, DF.embededFileText ef) | (n, ef) <- DF.langRegistryFiles] of\n        Just yamlText -> case loadLangDescriptorFromText yamlText of\n          Left err ->\n            MM.throwSystemError $\n              \"Failed to parse embedded lang.yaml for \" <> pretty name <> \": \" <> pretty err\n          Right desc -> return desc\n        Nothing -> do\n          -- try filesystem\n          home <- asks MC.configHome\n          let descPath = home </> \"lang\" </> T.unpack name </> \"lang.yaml\"\n          result <- liftIO $ loadLangDescriptor descPath\n          case result of\n            Left err ->\n              MM.throwSystemError $\n                \"Failed to load language descriptor for \" <> pretty name <> \": \" <> pretty err\n            Right desc -> return desc\n\n    loadPoolTemplate :: T.Text -> String -> MorlocMonad T.Text\n    loadPoolTemplate name ext =\n      -- try embedded pool template first\n      case lookupEmbeddedPool name of\n        Just t -> return t\n        Nothing -> do\n          -- try filesystem\n          home <- asks MC.configHome\n          let poolPath = home </> \"lang\" </> T.unpack name </> \"pool.\" <> ext\n          liftIO $ MT.readFile poolPath\n\n    lookupEmbeddedPool :: T.Text -> Maybe T.Text\n    lookupEmbeddedPool \"py\" = Just $ DF.embededFileText (DF.poolTemplateGeneric \"py\")\n    lookupEmbeddedPool \"r\" = Just $ DF.embededFileText (DF.poolTemplateGeneric \"r\")\n    lookupEmbeddedPool \"cpp\" = Just $ DF.embededFileText (DF.poolTemplate \"cpp\")\n    lookupEmbeddedPool _ = Nothing\n\n{- | Get the pool subdirectory name from the module name.\nThis ensures each program gets its own pool directory (e.g., pools/foo/).\n-}\ngetPoolSubdir :: MorlocMonad String\ngetPoolSubdir = MM.getModuleName\n\ndebugLog :: Doc ann -> MorlocMonad ()\ndebugLog d = do\n  verbosity <- gets stateVerbosity\n  when (verbosity > 0) $ (liftIO . putDoc) d\n\ntranslateSource :: LangDescriptor -> Path -> MorlocMonad MDoc\ntranslateSource desc p = do\n  let p' = MT.stripPrefixIfPresent \"./\" (MT.pack p)\n      p'' = if ldIncludeRelToFile desc then \"../\" <> p' else p'\n  if ldQualifiedImports desc\n    then do\n      lib <- MT.pack <$> asks MC.configLibrary\n      let tmpl = ldImportTemplate desc\n          ns = render (makeNamespace lib p)\n          modPath = render (makeImportPath lib p)\n      return . pretty $\n        substituteT\n          tmpl\n          [ (\"namespace\", ns)\n          , (\"module_path\", modPath)\n          ]\n    else do\n      let tmpl = ldImportTemplate desc\n      return . pretty $ substituteT tmpl [(\"path\", p'')]\n\n-- | Qualify a source function name with its module path.\nqualifiedSrcName :: Text -> Source -> MDoc\nqualifiedSrcName lib src = case srcPath src of\n  Nothing -> pretty $ srcName src\n  (Just path) -> makeNamespace lib path <> \".\" <> pretty (srcName src)\n\nmakeNamespace :: Text -> Path -> MDoc\nmakeNamespace lib =\n  pretty\n    . MT.liftToText (map toLower')\n    . MT.replace \"/\" \"_\"\n    . MT.replace \"-\" \"_\"\n    . MT.replace \".\" \"_\"\n    . MT.stripPrefixIfPresent \"/\"\n    . MT.stripPrefixIfPresent \"./\"\n    . MT.stripPrefixIfPresent lib\n    . MT.liftToText dropExtensions\n    . MT.pack\n  where\n    toLower' c = if c >= 'A' && c <= 'Z' then toEnum (fromEnum c + 32) else c\n    dropExtensions = reverse . drop 1 . dropWhile (/= '.') . reverse\n\nmakeImportPath :: Text -> Path -> MDoc\nmakeImportPath lib =\n  pretty\n    . MT.liftToText (map toLower')\n    . MT.replace \"/\" \".\"\n    . MT.stripPrefixIfPresent \"/\"\n    . MT.stripPrefixIfPresent \"./\"\n    . MT.stripPrefixIfPresent lib\n    . MT.liftToText dropExtensions\n    . MT.pack\n  where\n    toLower' c = if c >= 'A' && c <= 'Z' then toEnum (fromEnum c + 32) else c\n    dropExtensions = reverse . drop 1 . dropWhile (/= '.') . reverse\n\ntranslateSegment :: LangDescriptor -> (Source -> MDoc) -> SerialManifold -> MDoc\ntranslateSegment desc srcNamer m0 =\n  let cfg = genericLowerConfig desc srcNamer\n   in renderPoolDocs $ runIndex 0 (foldWithSerialManifoldM (defaultFoldRules cfg) m0)\n\n-- | Build a LowerConfig from a LangDescriptor and a source name function\ngenericLowerConfig :: LangDescriptor -> (Source -> MDoc) -> LowerConfig IndexM\ngenericLowerConfig desc srcNamer = cfg\n  where\n    cfg =\n      LowerConfig\n        { lcSrcName = srcNamer\n        , lcTypeOf = \\_ -> return Nothing\n        , lcSerialAstType = \\_ -> return Nothing\n        , lcDeserialAstType = \\_ -> return Nothing\n        , lcRawDeserialAstType = \\_ -> return Nothing\n        , lcTypeMOf = \\_ -> return Nothing\n        , lcPackerName = srcNamer\n        , lcUnpackerName = srcNamer\n        , lcRecordAccessor = genericRecordAccessor desc\n        , lcDeserialRecordAccessor = \\_ k v -> case ldKeyAccess desc of\n            \"double_bracket\" -> v <> \"[[\" <> dquotes (pretty k) <> \"]]\"\n            _ -> v <> \"[\" <> dquotes (pretty k) <> \"]\"\n        , lcTupleAccessor = \\i v -> case ldIndexStyle desc of\n            ZeroBracket -> v <> \"[\" <> pretty i <> \"]\"\n            OneBracket -> v <> \"[\" <> pretty (i + 1) <> \"]\"\n            OneDoubleBracket -> [idoc|#{v}[[#{pretty (i + 1)}]]|]\n        , lcNewIndex = newIndex\n        , lcPrintExpr = genericPrintExpr desc\n        , lcPrintStmt = genericPrintStmt desc\n        , lcEvalPattern = \\t p xs -> return $ genericEvalPattern desc t p xs\n        , lcListConstructor = \\v _ es -> case ldListStyle desc of\n            BracketList -> list es\n            FunctionCallList -> pretty (ldGenericListFn desc) <> tupled es\n            TypeDependentList -> case v of\n              (FV _ (CV typeName))\n                | typeName `elem` ldAtomicTypes desc -> pretty (ldAtomicListFn desc) <> tupled es\n              _ -> pretty (ldGenericListFn desc) <> tupled es\n        , lcTupleConstructor = \\_ -> case ldTupleConstructor desc of\n            \"\" -> tupled\n            name -> \\es -> pretty name <> tupled es\n        , lcRecordConstructor = \\_ _ _ _ rs ->\n            return $\n              defaultValue\n                { poolExpr =\n                    pretty (ldRecordConstructor desc)\n                      <> tupled [makeRecordKey desc k <+> pretty (ldRecordSeparator desc) <+> v | (k, v) <- rs]\n                }\n        , lcForeignCall = \\socketFile mid args ->\n            let midDoc = pretty mid <> pretty (ldForeignCallIntSuffix desc)\n                argsDoc = case ldListStyle desc of\n                  BracketList -> list args\n                  _ -> pretty (ldGenericListFn desc) <> tupled args\n             in pretty (ldForeignCallFn desc)\n                  <> tupled [makeGenericSocketPath desc socketFile, midDoc, argsDoc]\n        , lcRemoteCall = genericRemoteCall desc\n        , lcMakeIf = genericMakeIf desc cfg\n        , lcMakeLet = \\namer i _ e1 e2 -> return $ genericMakeLet desc namer i e1 e2\n        , lcReturn = \\e -> pretty $ substituteT (ldReturnTemplate desc) [(\"expr\", render e)]\n        , lcMakeDoBlock = \\_ stmts expr ->\n            let suspendBlock = ldDoBlockBlock desc\n             in if T.null suspendBlock\n                  then\n                    -- pass stmts through, wrap expr only\n                    let wrapped = pretty $ substituteT (ldDoBlockExpr desc) [(\"expr\", render expr)]\n                     in (stmts, wrapped)\n                  else\n                    -- absorb stmts into block\n                    case stmts of\n                      [] ->\n                        let wrapped = pretty $ substituteT (ldDoBlockExpr desc) [(\"expr\", render expr)]\n                         in ([], wrapped)\n                      _ ->\n                        let body = render (vsep (stmts <> [expr]))\n                            wrapped = pretty $ substituteT suspendBlock [(\"body\", body)]\n                         in ([], wrapped)\n        , lcSerialize = defaultSerialize cfg\n        , lcDeserialize = \\_ -> defaultDeserialize cfg\n        , lcMakeFunction = \\mname args _ priorLines body headForm ->\n            let makeExt (Just HeadManifoldFormRemoteWorker) = \"_remote\"\n                makeExt _ = \"\"\n                fullName = render (mname <> makeExt headForm)\n                argsText = render (hsep (punctuate \",\" (map argNamer args)))\n                header =\n                  pretty $\n                    substituteT\n                      (ldFuncDefHeader desc)\n                      [ (\"name\", fullName)\n                      , (\"args\", argsText)\n                      ]\n                wrapError [] = []\n                wrapError xs =\n                  let openLine = ldErrorWrapOpen desc\n                      closeLines = ldErrorWrapClose desc\n                   in if T.null openLine\n                        then xs\n                        else\n                          let tryBlock = nest 4 (vsep (pretty openLine : xs))\n                              exceptBlock =\n                                nest 4 . vsep $\n                                  map (\\l -> pretty $ substituteT l [(\"name\", fullName)]) closeLines\n                           in [vsep [tryBlock, exceptBlock]]\n             in return . Just $ case ldBlockStyle desc of\n                  IndentBlock ->\n                    nest 4 (vsep [header, vsep (wrapError priorLines), body])\n                  BraceBlock ->\n                    block 4 header (vsep $ priorLines <> [body])\n                  EndKeywordBlock ->\n                    let endKw = ldBlockEnd desc\n                     in vsep [header, indent 4 (vsep $ priorLines <> [body]), pretty endKw]\n        , lcMakeLambda = \\mname contextArgs boundArgs ->\n            let tmpl = ldPartialTemplate desc\n                fnText = render mname\n                allArgsList = contextArgs <> boundArgs\n                fnWithCtxList = mname : contextArgs\n                fnWithCtx = render (hsep (punctuate \",\" fnWithCtxList))\n                allArgs = render (hsep (punctuate \",\" allArgsList))\n                boundArgsText = render (hsep (punctuate \",\" boundArgs))\n             in pretty $\n                  substituteT\n                    tmpl\n                    [ (\"fn\", fnText)\n                    , (\"fn_with_context\", fnWithCtx)\n                    , (\"all_args\", allArgs)\n                    , (\"bound_args\", boundArgsText)\n                    ]\n        }\n\n{- | Record access: for languages with ldDictStyleRecords=True,\nuse bracket access for dict/NamRecord and dot access for others.\n-}\ngenericRecordAccessor :: LangDescriptor -> NamType -> CVar -> MDoc -> MDoc -> MDoc\ngenericRecordAccessor desc namType constructor record field\n  | ldDictStyleRecords desc = case (namType, constructor) of\n      (NamTable, CV \"dict\") -> record <> \"[\" <> dquotes field <> \"]\"\n      (NamRecord, _) -> record <> \"[\" <> dquotes field <> \"]\"\n      _ -> record <> \".\" <> field\n  | otherwise = case ldFieldAccess desc of\n      DotAccess -> record <> \".\" <> field\n      DollarAccess -> record <> \"$\" <> field\n\n-- | Remote call with template-driven resource packing\ngenericRemoteCall :: LangDescriptor -> MDoc -> Int -> RemoteResources -> [MDoc] -> IndexM PoolDocs\ngenericRemoteCall desc socketFile mid res args = do\n  let resMem = T.pack . show $ fromMaybe (-1) (remoteResourcesMemory res)\n      resTime = T.pack . show $ maybe (-1) unTimeInSeconds (remoteResourcesTime res)\n      resCPU = T.pack . show $ fromMaybe (-1) (remoteResourcesThreads res)\n      resGPU = T.pack . show $ fromMaybe 0 (remoteResourcesGpus res)\n      remoteFn =\n        if T.null (ldRemoteCallFn desc)\n          then pretty (ldForeignCallFn desc)\n          else pretty (ldRemoteCallFn desc)\n      resPacked =\n        pretty $\n          substituteT\n            (ldResourcePackTemplate desc)\n            [ (\"mem\", resMem)\n            , (\"time\", resTime)\n            , (\"cpus\", resCPU)\n            , (\"gpus\", resGPU)\n            ]\n      call =\n        remoteFn\n          <> tupled [pretty mid, dquotes socketFile, dquotes \".morloc-cache\", resPacked, list args]\n  return $ defaultValue {poolExpr = call}\n\n-- | Format a record key: bare identifier or quoted string\nmakeRecordKey :: LangDescriptor -> Key -> MDoc\nmakeRecordKey desc k\n  | ldQuoteRecordKeys desc = dquotes (pretty k)\n  | otherwise = pretty k\n\nmakeGenericSocketPath :: LangDescriptor -> MDoc -> MDoc\nmakeGenericSocketPath desc socketFileBasename =\n  let tmpl = ldSocketPathTemplate desc\n      socketText = render (dquotes socketFileBasename)\n   in pretty $ substituteT tmpl [(\"socket\", socketText)]\n\n-- | Generic if/else rendering for descriptor-driven languages\ngenericMakeIf :: LangDescriptor -> LowerConfig IndexM -> NativeExpr -> PoolDocs -> PoolDocs -> PoolDocs -> IndexM PoolDocs\ngenericMakeIf desc cfg _ condDocs thenDocs elseDocs = do\n  idx <- lcNewIndex cfg\n  let v = helperNamer idx\n      condE = poolExpr condDocs\n      thenE = poolExpr thenDocs\n      elseE = poolExpr elseDocs\n      thenBlock = poolPriorLines thenDocs <> [v <+> pretty (ldAssignOp desc) <+> thenE]\n      elseBlock = poolPriorLines elseDocs <> [v <+> pretty (ldAssignOp desc) <+> elseE]\n      ifStmt = case ldBlockStyle desc of\n        IndentBlock ->\n          vsep\n            [ v <+> pretty (ldAssignOp desc) <+> pretty (ldNullLiteral desc)\n            , nest 4 (vsep (\"if\" <+> condE <> \":\" : thenBlock))\n            , nest 4 (vsep (\"else:\" : elseBlock))\n            ]\n        BraceBlock ->\n          vsep\n            [ v <+> pretty (ldAssignOp desc) <+> pretty (ldNullLiteral desc) <> \";\"\n            , \"if\" <+> parens condE <+> \"{\"\n            , indent 4 (vsep thenBlock)\n            , \"} else {\"\n            , indent 4 (vsep elseBlock)\n            , \"}\"\n            ]\n        EndKeywordBlock ->\n          let endKw = ldBlockEnd desc\n           in vsep\n                [ v <+> \"<-\" <+> \"if\" <+> parens condE <+> \"{\"\n                , indent 4 (vsep (poolPriorLines thenDocs <> [thenE]))\n                , \"} else {\"\n                , indent 4 (vsep (poolPriorLines elseDocs <> [elseE]))\n                , \"}\" <+> pretty endKw\n                ]\n  return $\n    PoolDocs\n      { poolCompleteManifolds = poolCompleteManifolds condDocs <> poolCompleteManifolds thenDocs <> poolCompleteManifolds elseDocs\n      , poolExpr = v\n      , poolPriorLines = poolPriorLines condDocs <> [ifStmt]\n      , poolPriorExprs = poolPriorExprs condDocs <> poolPriorExprs thenDocs <> poolPriorExprs elseDocs\n      }\n\ngenericMakeLet :: LangDescriptor -> (Int -> MDoc) -> Int -> PoolDocs -> PoolDocs -> PoolDocs\ngenericMakeLet desc namer i (PoolDocs ms1' e1' rs1 pes1) (PoolDocs ms2' e2' rs2 pes2) =\n  let rs = rs1 ++ [namer i <+> pretty (ldAssignOp desc) <+> e1'] ++ rs2\n   in PoolDocs (ms1' <> ms2') e2' rs (pes1 <> pes2)\n\n-- | Generic expression printer driven by descriptor\ngenericPrintExpr :: LangDescriptor -> IExpr -> MDoc\ngenericPrintExpr desc = go\n  where\n    go (IVar v) = pretty v\n    go (IBoolLit True) = pretty (ldBoolTrue desc)\n    go (IBoolLit False) = pretty (ldBoolFalse desc)\n    go (INullLit _) = pretty (ldNullLiteral desc)\n    go (IIntLit _ i) = viaShow i <> pretty (ldIntLiteralSuffix desc)\n    go (IRealLit _ r) = viaShow r\n    go (IStrLit s) = textEsc' s\n    go (IListLit es) = case ldListStyle desc of\n      BracketList -> list (map go es)\n      FunctionCallList -> pretty (ldGenericListFn desc) <> tupled (map go es)\n      TypeDependentList -> pretty (ldGenericListFn desc) <> tupled (map go es)\n    go (ITupleLit es) = case ldTupleConstructor desc of\n      \"\" -> tupled (map go es)\n      name -> pretty name <> tupled (map go es)\n    go (IRecordLit _ _ entries) =\n      pretty (ldRecordConstructor desc)\n        <> tupled [makeRecordKey desc k <+> pretty (ldRecordSeparator desc) <+> go e | (k, e) <- entries]\n    go (IAccess e (IIdx i)) = case ldIndexStyle desc of\n      ZeroBracket -> go e <> \"[\" <> pretty i <> \"]\"\n      OneBracket -> go e <> \"[\" <> pretty (i + 1) <> \"]\"\n      OneDoubleBracket -> go e <> \"[[\" <> pretty (i + 1) <> \"]]\"\n    go (IAccess e (IKey k)) = case ldKeyAccess desc of\n      \"double_bracket\" -> go e <> \"[[\" <> dquotes (pretty k) <> \"]]\"\n      _ -> go e <> \"[\" <> dquotes (pretty k) <> \"]\"\n    go (IAccess e (IField f)) = case ldFieldAccess desc of\n      DotAccess -> go e <> \".\" <> pretty f\n      DollarAccess -> go e <> \"$\" <> pretty f\n    go (ISerCall schema e) =\n      pretty (ldSerializeFn desc) <> \"(\" <> go e <> \", \" <> dquotes (pretty schema) <> \")\"\n    go (IDesCall schema _ e) =\n      pretty (ldDeserializeFn desc) <> \"(\" <> go e <> \", \" <> dquotes (pretty schema) <> \")\"\n    go (IPack packer e) = pretty packer <> parens (go e)\n    go (ICall f Nothing argGroups) =\n      pretty f <> hsep (map (tupled . map go) argGroups)\n    go (ICall f (Just _) argGroups) =\n      pretty f <> hsep (map (tupled . map go) argGroups)\n    go (IForeignCall _ _ _) = error \"use IRawExpr for generic foreign calls\"\n    go (IRemoteCall _ _ _ _) = error \"use IRawExpr for generic remote calls\"\n    go (ILambda args body) =\n      let argsText = render (hsep (punctuate \",\" (map pretty args)))\n          bodyText = render (go body)\n       in pretty $\n            substituteT\n              (ldLambdaTemplate desc)\n              [ (\"args\", argsText)\n              , (\"body\", bodyText)\n              ]\n    go (IRawExpr d) = pretty d\n    go (IDoBlock e) =\n      pretty $ substituteT (ldDoBlockExpr desc) [(\"expr\", render (go e))]\n    go (IEval e) = go e <> \"()\"\n    go (IIntrinsicHash schema e) =\n      let prefix = ldIntrinsicPrefix desc\n       in pretty prefix <> \"mlc_hash(\" <> go e <> \", \" <> dquotes (pretty schema) <> \")\"\n    go (IIntrinsicSave fmt schema e path) =\n      let prefix = ldIntrinsicPrefix desc\n          saveFn :: Text\n          saveFn = case fmt of\n            \"json\"     -> \"mlc_save_json\"\n            \"voidstar\" -> \"mlc_save_voidstar\"\n            _          -> \"mlc_save\"\n       in pretty prefix <> pretty saveFn <> \"(\" <> go e <> \", \" <> dquotes (pretty schema) <> \", \" <> go path <> \")\"\n    go (IIntrinsicLoad schema _ path) =\n      let prefix = ldIntrinsicPrefix desc\n       in pretty prefix <> \"mlc_load(\" <> dquotes (pretty schema) <> \", \" <> go path <> \")\"\n    go (IIntrinsicShow schema e) =\n      let prefix = ldIntrinsicPrefix desc\n       in pretty prefix <> \"mlc_show(\" <> go e <> \", \" <> dquotes (pretty schema) <> \")\"\n    go (IIntrinsicRead schema _ e) =\n      let prefix = ldIntrinsicPrefix desc\n       in pretty prefix <> \"mlc_read(\" <> dquotes (pretty schema) <> \", \" <> go e <> \")\"\n\n-- | Generic statement printer driven by descriptor\ngenericPrintStmt :: LangDescriptor -> IStmt -> MDoc\ngenericPrintStmt desc = go\n  where\n    printE = genericPrintExpr desc\n\n    go (IAssign v Nothing e) = pretty v <+> pretty (ldAssignOp desc) <+> printE e\n    go (IAssign v (Just _) e) = pretty v <+> pretty (ldAssignOp desc) <+> printE e\n    go (IMapList resultVar _ iterVar collection bodyStmts yieldExpr) =\n      case ldMapStyle desc of\n        LoopAppend ->\n          vsep\n            [ pretty resultVar <+> pretty (ldAssignOp desc) <+> \"[]\"\n            , nest\n                4\n                ( vsep\n                    ( (\"for\" <+> pretty iterVar <+> \"in\" <+> printE collection <> \":\")\n                        : map go bodyStmts\n                        ++ [pretty resultVar <> \".append(\" <> printE yieldExpr <> \")\"]\n                    )\n                )\n            ]\n        ApplyCallback ->\n          block\n            4\n            ( pretty resultVar <+> pretty (ldAssignOp desc) <+> \"lapply(\"\n                <> printE collection\n                <> \",\" <+> \"function(\"\n                <> pretty iterVar\n                <> \")\"\n            )\n            (vsep (map go bodyStmts ++ [printE yieldExpr]))\n            <> \")\"\n        ListComprehension ->\n          vsep\n            [ pretty resultVar <+> pretty (ldAssignOp desc) <+> \"[\"\n                <> printE yieldExpr\n                  <+> \"for\"\n                  <+> pretty iterVar\n                  <+> \"in\"\n                  <+> printE collection\n                <> \"]\"\n            ]\n    go (IReturn e) = pretty $ substituteT (ldReturnTemplate desc) [(\"expr\", render (printE e))]\n    go (IExprStmt e) = printE e\n    go (IIf resultVar _ condExpr thenStmts thenExpr elseStmts elseExpr) =\n      case ldBlockStyle desc of\n        IndentBlock ->\n          vsep\n            [ pretty resultVar <+> pretty (ldAssignOp desc) <+> pretty (ldNullLiteral desc)\n            , nest 4 (vsep (\"if\" <+> printE condExpr <> \":\" : map go thenStmts ++ [pretty resultVar <+> pretty (ldAssignOp desc) <+> printE thenExpr]))\n            , nest 4 (vsep (\"else:\" : map go elseStmts ++ [pretty resultVar <+> pretty (ldAssignOp desc) <+> printE elseExpr]))\n            ]\n        BraceBlock ->\n          vsep\n            [ pretty resultVar <+> pretty (ldAssignOp desc) <+> pretty (ldNullLiteral desc) <> \";\"\n            , \"if\" <+> parens (printE condExpr) <+> \"{\"\n            , indent 4 (vsep (map go thenStmts ++ [pretty resultVar <+> pretty (ldAssignOp desc) <+> printE thenExpr <> \";\"]))\n            , \"} else {\"\n            , indent 4 (vsep (map go elseStmts ++ [pretty resultVar <+> pretty (ldAssignOp desc) <+> printE elseExpr <> \";\"]))\n            , \"}\"\n            ]\n        EndKeywordBlock ->\n          vsep\n                [ pretty resultVar <+> \"<-\" <+> \"if\" <+> parens (printE condExpr) <+> \"{\"\n                , indent 4 (vsep (map go thenStmts ++ [printE thenExpr]))\n                , \"} else {\"\n                , indent 4 (vsep (map go elseStmts ++ [printE elseExpr]))\n                , \"}\"\n                ]\n    go (IFunDef _ _ _ _) = error \"IFunDef not yet implemented for generic printer\"\n\n-- | Assemble a complete pool file from descriptor, template, and IProgram\nprintProgram :: LangDescriptor -> IProgram -> MDoc\nprintProgram desc prog =\n  format\n    (ldPoolTemplate desc)\n    (ldBreakMarker desc)\n    sections\n  where\n    sections =\n      [ vsep (map pretty (ipSources prog))\n      , vsep (map pretty (ipManifolds prog))\n      , templateDispatch\n      ]\n\n    templateDispatch = vsep [localD, remoteD]\n      where\n        localD =\n          let hdr = ldDispatchLocalHeader desc\n              entryTmpl = ldDispatchLocalEntry desc\n              ftr = ldDispatchLocalFooter desc\n              entries =\n                map\n                  ( \\(DispatchEntry i _) ->\n                      pretty $\n                        substituteT\n                          entryTmpl\n                          [ (\"mid\", T.pack (show i))\n                          , (\"name\", render (manNamer i))\n                          ]\n                  )\n                  (ipLocalDispatch prog)\n           in if T.null hdr && T.null entryTmpl\n                then mempty\n                else\n                  align . vsep $\n                    filter\n                      (not . isEmpty)\n                      [ pretty hdr\n                      , vsep entries\n                      , pretty ftr\n                      ]\n\n        remoteD =\n          let hdr = ldDispatchRemoteHeader desc\n              entryTmpl = ldDispatchRemoteEntry desc\n              ftr = ldDispatchRemoteFooter desc\n              entries =\n                map\n                  ( \\(DispatchEntry i _) ->\n                      pretty $\n                        substituteT\n                          entryTmpl\n                          [ (\"mid\", T.pack (show i))\n                          , (\"name\", render (manNamer i))\n                          ]\n                  )\n                  (ipRemoteDispatch prog)\n           in if T.null hdr && T.null entryTmpl\n                then mempty\n                else\n                  align . vsep $\n                    filter\n                      (not . isEmpty)\n                      [ pretty hdr\n                      , vsep entries\n                      , pretty ftr\n                      ]\n\n    isEmpty d = T.null (render d)\n\n-- | Generic pattern evaluation\ngenericEvalPattern :: LangDescriptor -> TypeF -> Pattern -> [MDoc] -> MDoc\ngenericEvalPattern desc _ (PatternText firstStr fragments) xs =\n  case ldPatternStyle desc of\n    FStringPattern ->\n      let qt = pretty (ldQuoteTerminator desc)\n          esc = escapeQuotes (ldQuoteTerminator desc) (ldQuoteTerminatorEsc desc) . escapeStringLit\n      in \"f\" <> qt <> hcat (pretty (esc firstStr) : [(\"{\" <> x <> \"}\" <> pretty (esc s)) | (x, s) <- zip xs fragments]) <> qt\n    ConcatCall ->\n      let qt = ldQuoteTerminator desc\n          esc = escapeQuotes qt (ldQuoteTerminatorEsc desc) . escapeStringLit\n          wrap t = pretty qt <> pretty t <> pretty qt\n      in pretty (ldConcatFn desc)\n        <> tupled (wrap (esc firstStr) : concat [[x, wrap (esc s)] | (x, s) <- zip xs fragments])\n-- getters (always have exactly one argument)\ngenericEvalPattern desc _ (PatternStruct (ungroup -> [ss])) [m] =\n  hcat (m : map (writeSelector desc) ss)\ngenericEvalPattern desc _ (PatternStruct (ungroup -> sss)) [m] =\n  case ldTupleConstructor desc of\n    \"\" -> tupled [hcat (m : map (writeSelector desc) ss) | ss <- sss]\n    name -> pretty name <> tupled [hcat (m : map (writeSelector desc) ss) | ss <- sss]\n-- setters\ngenericEvalPattern desc t0 (PatternStruct s0) (m0 : xs0) =\n  patternSetter makeTuple makeRecord accessTuple accessRecord m0 t0 s0 xs0\n  where\n    makeTuple _ xs = case ldTupleConstructor desc of\n      \"\" -> tupled xs\n      name -> pretty name <> tupled xs\n\n    makeRecord (NamF _ _ _ rs) xs =\n      pretty (ldRecordConstructor desc)\n        <> tupled\n          [makeRecordKey desc k <+> pretty (ldRecordSeparator desc) <+> x | (k, x) <- zip (map fst rs) xs]\n    makeRecord _ _ = error \"Incorrectly typed record setter\"\n\n    accessTuple _ m i = case ldIndexStyle desc of\n      ZeroBracket -> m <> \"[\" <> pretty i <> \"]\"\n      OneBracket -> m <> \"[\" <> pretty (i + 1) <> \"]\"\n      OneDoubleBracket -> m <> \"[[\" <> pretty (i + 1) <> \"]]\"\n\n    accessRecord (NamF o (FV _ cname) _ _) d k =\n      genericRecordAccessor desc o cname d (pretty k)\n    accessRecord t _ _ = error $ \"Invalid record type: \" <> show t\ngenericEvalPattern _ _ (PatternStruct _) [] = error \"Unreachable empty pattern\"\n\nwriteSelector :: LangDescriptor -> Either Int Text -> MDoc\nwriteSelector desc (Right k) = case ldKeyAccess desc of\n  \"double_bracket\" -> \"[[\" <> dquotes (pretty k) <> \"]]\"\n  _ -> \"[\" <> dquotes (pretty k) <> \"]\"\nwriteSelector desc (Left i) = case ldIndexStyle desc of\n  ZeroBracket -> \"[\" <> pretty i <> \"]\"\n  OneBracket -> \"[\" <> pretty (i + 1) <> \"]\"\n  OneDoubleBracket -> \"[[\" <> pretty (i + 1) <> \"]]\"\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Grammars/Translator/Imperative.hs",
    "content": "{-# LANGUAGE DeriveGeneric #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Grammars.Translator.Imperative\nDescription : Imperative IR for two-phase translation\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProvides an imperative IR for code generation. The fold from the compiler's\nSerialManifold/NativeExpr AST to IR is written once; per-language printers\nconvert IR to source code.\n-}\nmodule Morloc.CodeGenerator.Grammars.Translator.Imperative\n  ( -- * IR types\n    IStmt (..)\n  , IExpr (..)\n  , IParam (..)\n  , IType (..)\n  , IAccessor (..)\n  , IFunMeta (..)\n  , IProgram (..)\n\n    -- * IType rendering and conversion\n  , renderIType\n  , renderITypeText\n  , toIType\n\n    -- * Program construction\n  , buildProgram\n  , buildProgramM\n\n    -- * Lowering: serialize/deserialize expansion\n  , expandSerialize\n  , expandDeserialize\n\n    -- * Expression lowering\n  , lowerSerialExpr\n  , lowerNativeExpr\n\n    -- * Manifold lowering\n  , lowerSerialManifold\n  , lowerNativeManifold\n  , defaultFoldRules\n\n    -- * Full lowering config\n  , LowerConfig (..)\n\n    -- * Default serialize/deserialize (for Python/R)\n  , defaultSerialize\n  , defaultDeserialize\n\n    -- * Re-exported type alias\n  , IndexM\n  ) where\n\nimport Control.Monad.Identity (Identity)\nimport qualified Control.Monad.State as CMS\nimport Data.Binary (Binary)\nimport Data.Scientific (Scientific)\nimport Data.Text (Text)\nimport GHC.Generics (Generic)\nimport Morloc.CodeGenerator.Grammars.Common\n  ( DispatchEntry (..)\n  , PoolDocs (..)\n  , argNamer\n  , extractLocalDispatch\n  , extractRemoteDispatch\n  , helperNamer\n  , manNamer\n  , mergePoolDocs\n  , nvarNamer\n  , provideClosure\n  , svarNamer\n  )\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.CodeGenerator.Serial (isSerializable, serialAstToMsgpackSchema)\nimport Morloc.Data.Doc\nimport Morloc.Monad (IndexState)\n\n-- Statements\ndata IStmt\n  = IFunDef IFunMeta [IParam] [IStmt] IExpr\n  | IAssign Text (Maybe IType) IExpr\n  | -- | resultVar, resultType, iterVar, collection, bodyStmts, yieldExpr\n    -- Python/C++: resultVar = []; for iterVar in collection: bodyStmts; resultVar.append(yieldExpr)\n    -- R: resultVar <- lapply(collection, function(iterVar) { bodyStmts; yieldExpr })\n    -- resultType is used by C++ for typed declarations; Python/R pass Nothing\n    IMapList Text (Maybe IType) Text IExpr [IStmt] IExpr\n  | -- | resultVar, resultType, condition, thenStmts, thenExpr, elseStmts, elseExpr\n    -- Semantics: declare resultVar; if cond { thenStmts; resultVar = thenExpr } else { elseStmts; resultVar = elseExpr }\n    -- For elif chains, elseStmts contains another IIf and elseExpr is unused (IVar resultVar)\n    IIf Text (Maybe IType) IExpr [IStmt] IExpr [IStmt] IExpr\n  | IReturn IExpr\n  | IExprStmt IExpr\n\n-- Expressions\ndata IExpr\n  = ICall Text (Maybe [IType]) [[IExpr]]\n  | IVar Text\n  | IBoolLit Bool\n  | IIntLit (Maybe Text) Integer  -- concrete type name (e.g. \"int64_t\"), Nothing for default\n  | IRealLit (Maybe Text) Scientific  -- concrete type name (e.g. \"float\"), Nothing for default\n  | IStrLit Text\n  | INullLit (Maybe IType)\n  | IListLit [IExpr]\n  | ITupleLit [IExpr]\n  | IRecordLit NamType FVar [(Key, IExpr)]\n  | IAccess IExpr IAccessor\n  | ISerCall Text IExpr -- put_value(schema, expr)\n  | IDesCall Text (Maybe IType) IExpr -- get_value[<T>](schema, expr); type used by C++ template\n  | IForeignCall Text Int [IExpr]\n  | IRemoteCall Text Int RemoteResources [IExpr]\n  | ILambda [Text] IExpr\n  | IPack Text IExpr -- packer(expr)\n  | IRawExpr Text\n  | IDoBlock IExpr -- effect: lambda wrapping expression\n  | IEval IExpr -- eval: call effect with no args\n  | IIntrinsicSave Text Text IExpr IExpr -- format, schema, data, path\n  | IIntrinsicLoad Text (Maybe IType) IExpr -- schema, returnType, path -> result (nullable)\n  | IIntrinsicHash Text IExpr -- schema, data -> hex string\n  | IIntrinsicShow Text IExpr -- schema, data -> JSON string\n  | IIntrinsicRead Text (Maybe IType) IExpr -- schema, returnType, json_string -> typed data (nullable)\n\ndata IParam = IParam Text (Maybe IType)\n\n{- | Structured type representation for the IR.\nCarries enough information for any language's printer to render typed declarations.\n-}\ndata IType\n  = -- | Primitive type: \"int\", \"double\", \"std::string\", \"bool\", etc.\n    ITyPrim Text\n  | -- | List/vector type\n    ITyList IType\n  | -- | Tuple type\n    ITyTuple [IType]\n  | -- | Record: name, type params, fields\n    ITyRecord Text [IType] [(Key, IType)]\n  | -- | Function type\n    ITyFunction [IType] IType\n  | -- | Unit/void type\n    ITyUnit\n  | -- | Named type with parameters (e.g., Map k v)\n    ITyNamed Text [IType]\n  | -- | Serialized data (e.g., const uint8_t* in C++)\n    ITySerial\n  | -- | Optional type (e.g., std::optional<T> in C++)\n    ITyOptional IType\n  | -- | Type not known or not needed (Python, R)\n    ITyUnknown\n  deriving (Show, Eq, Ord, Generic)\n\ninstance Binary IType\n\n{- | Render an IType to an MDoc for use in code generation output.\nThis is used by printers that need the type as rendered text.\n-}\nrenderIType :: IType -> MDoc\nrenderIType (ITyPrim t) = pretty t\nrenderIType (ITyList t) = \"std::vector<\" <> renderIType t <> \">\"\nrenderIType (ITyTuple ts) = \"std::tuple<\" <> hcat (punctuate \", \" (map renderIType ts)) <> \">\"\nrenderIType (ITyRecord name [] _) = pretty name\nrenderIType (ITyRecord name params _) = pretty name <> encloseSep \"<\" \">\" \",\" (map renderIType params)\nrenderIType (ITyFunction args ret) = \"std::function<\" <> renderIType ret <> tupled (map renderIType args) <> \">\"\nrenderIType ITyUnit = \"void\"\nrenderIType (ITyNamed name []) = pretty name\nrenderIType (ITyNamed name params) = pretty name <> encloseSep \"<\" \">\" \",\" (map renderIType params)\nrenderIType (ITyOptional t) = \"std::optional<\" <> renderIType t <> \">\"\nrenderIType ITySerial = \"const uint8_t*\"\nrenderIType ITyUnknown = \"auto\"\n\n-- | Render an IType to Text (for macro expansion, etc.)\nrenderITypeText :: IType -> Text\nrenderITypeText = render . renderIType\n\n{- | Convert a rendered MDoc type to an opaque IType.\nThis is a transitional bridge: preserves the rendered form as ITyNamed.\nC++ currently produces rendered MDoc types; this wraps them for the new IR.\n-}\ntoIType :: MDoc -> IType\ntoIType d = ITyNamed (render d) []\n\ndata IAccessor\n  = IIdx Int\n  | IKey Key\n  | IField Text\n\ndata IFunMeta = IFunMeta\n  { ifName :: Text\n  , ifReturnType :: Maybe IType\n  , ifHeadForm :: Maybe HeadManifoldForm\n  }\n\ndata IProgram = IProgram\n  { ipSources :: [Text]\n  , ipManifolds :: [Text]\n  , ipLocalDispatch :: [DispatchEntry]\n  , ipRemoteDispatch :: [DispatchEntry]\n  }\n  deriving (Generic)\n\ninstance Binary IProgram\n\n-- | Build an IProgram from pre-rendered sources and manifolds (pure, for Python/R).\nbuildProgram :: [MDoc] -> [MDoc] -> [SerialManifold] -> IProgram\nbuildProgram sources manifolds es =\n  IProgram\n    { ipSources = map render sources\n    , ipManifolds = map render manifolds\n    , ipLocalDispatch = extractLocalDispatch es\n    , ipRemoteDispatch = extractRemoteDispatch es\n    }\n\n-- | Build an IProgram monadically (for C++ where translateSegment runs in a monad).\nbuildProgramM :: (Monad m) => [MDoc] -> [SerialManifold] -> (SerialManifold -> m MDoc) -> m IProgram\nbuildProgramM sources es translateSeg = do\n  manifolds <- mapM translateSeg es\n  return $ buildProgram sources manifolds es\n\n-- | Per-language configuration for lowering\ndata LowerConfig m = LowerConfig\n  { lcSrcName :: Source -> MDoc\n  , lcTypeOf :: TypeF -> m (Maybe IType)\n  , lcSerialAstType :: SerialAST -> m (Maybe IType)\n  -- ^ type of a SerialAST for serialization (used for C++ typed declarations)\n  , lcDeserialAstType :: SerialAST -> m (Maybe IType)\n  -- ^ type of a SerialAST for deserialization (for C++, uses shallowType)\n  , lcRawDeserialAstType :: SerialAST -> m (Maybe IType)\n  -- ^ raw deserialized type for the _get_value template parameter (C++ specific)\n  -- For records, C++ converts to std::tuple; for others, uses serialAstToType\n  , lcTypeMOf :: TypeM -> m (Maybe IType)\n  , lcPackerName :: Source -> MDoc\n  , lcUnpackerName :: Source -> MDoc\n  , lcRecordAccessor :: NamType -> CVar -> MDoc -> MDoc -> MDoc\n  , lcDeserialRecordAccessor :: Int -> Key -> MDoc -> MDoc\n  -- ^ How to access record fields during deserialization.\n  -- For Python/R: same as lcRecordAccessor (by key name)\n  -- For C++: uses tuple indexing since records are deserialized as tuples\n  , lcTupleAccessor :: Int -> MDoc -> MDoc\n  , lcNewIndex :: m Int\n  , -- expression/arg lowering fields\n    lcPrintExpr :: IExpr -> MDoc\n  , lcPrintStmt :: IStmt -> MDoc\n  , lcEvalPattern :: TypeF -> Pattern -> [MDoc] -> m MDoc\n  -- ^ Pattern evaluation (language-specific because patterns use\n  -- language-specific constructors for tuples/records)\n  , lcListConstructor :: FVar -> TypeF -> [MDoc] -> MDoc\n  -- ^ Build a list literal from rendered elements. R needs FVar to choose c() vs list().\n  , lcTupleConstructor :: FVar -> [MDoc] -> MDoc\n  , lcRecordConstructor :: TypeF -> NamType -> FVar -> [TypeF] -> [(Key, MDoc)] -> m PoolDocs\n  -- ^ Build a record literal. C++ needs type lookup + counter for temp var.\n  , lcForeignCall :: MDoc -> Int -> [MDoc] -> MDoc\n  , lcRemoteCall :: MDoc -> Int -> RemoteResources -> [MDoc] -> m PoolDocs\n  , lcMakeLet :: (Int -> MDoc) -> Int -> Maybe TypeF -> PoolDocs -> PoolDocs -> m PoolDocs\n  -- ^ Let binding assembly at the PoolDocs level\n  , lcReturn :: MDoc -> MDoc\n  , lcMakeIf :: NativeExpr -> PoolDocs -> PoolDocs -> PoolDocs -> m PoolDocs\n  -- ^ origExpr, condDocs, thenDocs, elseDocs -> result PoolDocs\n  -- Produces language-specific if/else structure using a temp result variable\n  , lcMakeDoBlock :: TypeF -> [MDoc] -> MDoc -> ([MDoc], MDoc)\n  -- ^ prior statements -> return expression -> (hoisted statements, effect expression)\n  , lcSerialize :: MDoc -> SerialAST -> m PoolDocs\n  , lcDeserialize :: TypeF -> MDoc -> SerialAST -> m (MDoc, [MDoc])\n  , -- manifold lowering fields\n    lcMakeFunction ::\n      MDoc ->\n      [Arg TypeM] ->\n      TypeM ->\n      [MDoc] ->\n      MDoc ->\n      Maybe HeadManifoldForm ->\n      m (Maybe MDoc)\n  -- ^ name, all args, manifold type, priorLines, body, headForm\n  -- Returns Nothing if dedup'd (C++), Just funcDef otherwise\n  , lcMakeLambda :: MDoc -> [MDoc] -> [MDoc] -> MDoc\n  -- ^ name, contextArgs, boundArgs → partial application expression\n  }\n\n{- | Expand serialization into IR statements.\nReturns (final expression representing the serialized value, prior statements).\n-}\nexpandSerialize :: (Monad m) => LowerConfig m -> MDoc -> SerialAST -> m (IExpr, [IStmt])\nexpandSerialize cfg v0 s0 = do\n  (stmts, vExpr) <- go v0 s0\n  let schema = render $ serialAstToMsgpackSchema s0\n  return (ISerCall schema vExpr, stmts)\n  where\n    go v s\n      | isSerializable s = return ([], IRawExpr (render v))\n      | otherwise = construct v s\n\n    construct v (SerialPack _ (p, s)) =\n      let unpacker = lcUnpackerName cfg (typePackerReverse p)\n       in go (unpacker <> parens v) s\n    construct v lst@(SerialList _ s) = do\n      idx <- lcNewIndex cfg\n      resultType <- lcSerialAstType cfg lst\n      let v' = render $ helperNamer idx\n          iterVar = render $ \"i\" <> pretty idx\n      (before, x) <- go (\"i\" <> pretty idx) s\n      return ([IMapList v' resultType iterVar (IRawExpr (render v)) before x], IVar v')\n    construct v tup@(SerialTuple _ ss) = do\n      results <- zipWithM (\\i s -> go (lcTupleAccessor cfg i v) s) [0 ..] ss\n      let (befores, exprs) = unzip results\n      idx <- lcNewIndex cfg\n      typeM <- lcSerialAstType cfg tup\n      let v' = render $ helperNamer idx\n      return (concat befores ++ [IAssign v' typeM (ITupleLit exprs)], IVar v')\n    construct v obj@(SerialObject namType fv@(FV _ constructor) _ rs) = do\n      let accessor = lcRecordAccessor cfg namType constructor\n      results <- mapM (\\(key, s) -> go (accessor v (pretty key)) s) rs\n      let (befores, exprs) = unzip results\n      idx <- lcNewIndex cfg\n      typeM <- lcSerialAstType cfg obj\n      let v' = render $ helperNamer idx\n      return\n        ( concat befores ++ [IAssign v' typeM (IRecordLit namType fv (zip (map fst rs) exprs))]\n        , IVar v'\n        )\n    construct _ _ = error \"Unreachable in expandSerialize\"\n\n{- | Expand deserialization into IR statements.\nReturns (final expression representing the deserialized value, prior statements).\n-}\nexpandDeserialize :: (Monad m) => LowerConfig m -> MDoc -> SerialAST -> m (IExpr, [IStmt])\nexpandDeserialize cfg v0 s0\n  | isSerializable s0 = do\n      let schema = render $ serialAstToMsgpackSchema s0\n      desType <- lcDeserialAstType cfg s0\n      return (IDesCall schema desType (IRawExpr (render v0)), [])\n  | otherwise = do\n      idx <- lcNewIndex cfg\n      rawType <- lcRawDeserialAstType cfg s0\n      let rawvar = render $ helperNamer idx\n          schema = render $ serialAstToMsgpackSchema s0\n      (x, befores) <- check (helperNamer idx) s0\n      return (x, IAssign rawvar rawType (IDesCall schema rawType (IRawExpr (render v0))) : befores)\n  where\n    check v s\n      | isSerializable s = return (IRawExpr (render v), [])\n      | otherwise = construct v s\n\n    construct v (SerialPack _ (p, s')) = do\n      (x, before) <- check v s'\n      let packer = render $ lcPackerName cfg (typePackerForward p)\n      return (IPack packer x, before)\n    construct v lst@(SerialList _ s) = do\n      idx <- lcNewIndex cfg\n      resultType <- lcDeserialAstType cfg lst\n      let v' = render $ helperNamer idx\n          iterVar = render $ \"i\" <> pretty idx\n      (x, before) <- check (\"i\" <> pretty idx) s\n      return (IVar v', [IMapList v' resultType iterVar (IRawExpr (render v)) before x])\n    construct v tup@(SerialTuple _ ss) = do\n      results <- zipWithM (\\i s -> check (lcTupleAccessor cfg i v) s) [0 ..] ss\n      let (exprs, befores) = unzip results\n      typeM <- lcDeserialAstType cfg tup\n      v' <- (render . helperNamer) <$> lcNewIndex cfg\n      return (IVar v', concat befores ++ [IAssign v' typeM (ITupleLit exprs)])\n    construct v (SerialObject namType fv@(FV _ _) _ rs) = do\n      let accessor = lcDeserialRecordAccessor cfg\n      results <- zipWithM (\\i (k, s) -> check (accessor i k v) s) [0 ..] rs\n      let (exprs, befores) = unzip results\n      typeM <- lcDeserialAstType cfg (SerialObject namType fv [] rs)\n      idx <- lcNewIndex cfg\n      let v' = render $ helperNamer idx\n      return\n        (IVar v', concat befores ++ [IAssign v' typeM (IRecordLit namType fv (zip (map fst rs) exprs))])\n    construct _ _ = error \"Unreachable in expandDeserialize\"\n\n-- | Lower a serial expression to PoolDocs via the IR.\nlowerSerialExpr ::\n  (Monad m) =>\n  LowerConfig m ->\n  SerialExpr ->\n  SerialExpr_ PoolDocs PoolDocs PoolDocs (TypeS, PoolDocs) (TypeM, PoolDocs) ->\n  m PoolDocs\nlowerSerialExpr _ _ (ManS_ f) = return f\nlowerSerialExpr cfg _ (AppPoolS_ _ (PoolCall mid (Socket _ _ socketFile) ForeignCall args) _) =\n  return $ defaultValue {poolExpr = lcForeignCall cfg socketFile mid (map argNamer args)}\nlowerSerialExpr cfg _ (AppPoolS_ _ (PoolCall mid (Socket _ _ socketFile) (RemoteCall res) args) _) =\n  lcRemoteCall cfg socketFile mid res (map argNamer args)\nlowerSerialExpr _ _ (AppRecS_ _ mid es) = do\n  return $ mergePoolDocs ((<>) (manNamer mid) . tupled) es\nlowerSerialExpr cfg _ (AppForeignRecS_ _ mid (Socket _ _ socketFile) es) = do\n  return $ mergePoolDocs (\\args -> lcForeignCall cfg socketFile mid args) es\nlowerSerialExpr cfg _ (ReturnS_ x) = return $ x {poolExpr = lcReturn cfg (poolExpr x)}\nlowerSerialExpr cfg _ (SerialLetS_ i e1 e2) =\n  lcMakeLet cfg svarNamer i Nothing e1 e2\nlowerSerialExpr cfg (NativeLetS _ (typeFof -> t) _) (NativeLetS_ i e1 e2) =\n  lcMakeLet cfg nvarNamer i (Just t) e1 e2\nlowerSerialExpr cfg _ (NativeLetS_ i e1 e2) =\n  lcMakeLet cfg nvarNamer i Nothing e1 e2\nlowerSerialExpr _ _ (LetVarS_ _ i) = return $ defaultValue {poolExpr = svarNamer i}\nlowerSerialExpr _ _ (BndVarS_ _ i) = return $ defaultValue {poolExpr = svarNamer i}\nlowerSerialExpr cfg _ (SerializeS_ s e) = do\n  se <- lcSerialize cfg (poolExpr e) s\n  return $ e {poolExpr = poolExpr se, poolPriorLines = poolPriorLines e <> poolPriorLines se}\n\n-- | Lower a native expression to PoolDocs via the IR.\nlowerNativeExpr ::\n  (Monad m) =>\n  LowerConfig m ->\n  NativeExpr ->\n  NativeExpr_ PoolDocs PoolDocs PoolDocs (TypeS, PoolDocs) (TypeM, PoolDocs) ->\n  m PoolDocs\n-- Binary operator: emit (lhs op rhs) instead of function call\nlowerNativeExpr _ _ (AppExeN_ _ (SrcCallP src) (map snd -> [lhs, rhs]))\n  | srcOperator src =\n      return $ mergePoolDocs (\\xs -> case xs of [l, r] -> parens (l <+> pretty (unSrcName (srcName src)) <+> r); _ -> error \"binary operator requires exactly 2 args\") [lhs, rhs]\nlowerNativeExpr cfg _ (AppExeN_ _ (SrcCallP src) (map snd -> es)) = do\n  let handleFunctionArgs =\n        (<>) (lcSrcName cfg src)\n          . hsep\n          . map tupled\n          . provideClosure src\n  return $ mergePoolDocs handleFunctionArgs es\nlowerNativeExpr cfg _ (AppExeN_ t (PatCallP p) xs) = do\n  let es = map snd xs\n  patResult <- lcEvalPattern cfg t p (map poolExpr es)\n  return $\n    PoolDocs\n      { poolCompleteManifolds = concatMap poolCompleteManifolds es\n      , poolExpr = patResult\n      , poolPriorLines = concatMap poolPriorLines es\n      , poolPriorExprs = concatMap poolPriorExprs es\n      }\nlowerNativeExpr _ _ (AppExeN_ _ (LocalCallP idx) (map snd -> es)) = do\n  return $ mergePoolDocs ((<>) (nvarNamer idx) . tupled) es\nlowerNativeExpr _ _ (AppExeN_ _ (RecCallP mid _) (map snd -> es)) = do\n  return $ mergePoolDocs ((<>) (manNamer mid) . tupled) es\nlowerNativeExpr _ _ (ManN_ call) = return call\nlowerNativeExpr cfg _ (ReturnN_ x) =\n  return $ x {poolExpr = lcReturn cfg (poolExpr x)}\nlowerNativeExpr cfg _ (SerialLetN_ i x1 x2) = lcMakeLet cfg svarNamer i Nothing x1 x2\nlowerNativeExpr cfg (NativeLetN _ (typeFof -> t) _) (NativeLetN_ i x1 x2) = lcMakeLet cfg nvarNamer i (Just t) x1 x2\nlowerNativeExpr cfg _ (NativeLetN_ i x1 x2) = lcMakeLet cfg nvarNamer i Nothing x1 x2\nlowerNativeExpr _ _ (LetVarN_ _ i) = return $ defaultValue {poolExpr = nvarNamer i}\nlowerNativeExpr _ _ (BndVarN_ _ i) = return $ defaultValue {poolExpr = nvarNamer i}\nlowerNativeExpr cfg _ (DeserializeN_ t s x) = do\n  (deserialized, assignments) <- lcDeserialize cfg t (poolExpr x) s\n  return $\n    x\n      { poolExpr = deserialized\n      , poolPriorLines = poolPriorLines x <> assignments\n      }\nlowerNativeExpr cfg _ (ExeN_ _ (SrcCallP src)) = return $ defaultValue {poolExpr = lcSrcName cfg src}\nlowerNativeExpr _ _ (ExeN_ _ (PatCallP _)) = error \"Unreachable: patterns are always used in applications\"\nlowerNativeExpr _ _ (ExeN_ _ (LocalCallP idx)) = return $ defaultValue {poolExpr = nvarNamer idx}\nlowerNativeExpr _ _ (ExeN_ _ (RecCallP mid _)) = return $ defaultValue {poolExpr = manNamer mid}\nlowerNativeExpr cfg _ (ListN_ v t xs) = return $ mergePoolDocs (lcListConstructor cfg v t) xs\nlowerNativeExpr cfg _ (TupleN_ v xs) = return $ mergePoolDocs (lcTupleConstructor cfg v) xs\nlowerNativeExpr cfg origExpr (RecordN_ o v ps rs) = do\n  let es = map snd rs\n      recType = typeFof origExpr\n  rec' <- lcRecordConstructor cfg recType o v ps (zip (map fst rs) (map poolExpr es))\n  return $\n    rec'\n      { poolCompleteManifolds = concatMap poolCompleteManifolds es <> poolCompleteManifolds rec'\n      , poolPriorLines = concatMap poolPriorLines es <> poolPriorLines rec'\n      , poolPriorExprs = concatMap poolPriorExprs es <> poolPriorExprs rec'\n      }\nlowerNativeExpr cfg _ (LogN_ _ v) = return $ defaultValue {poolExpr = lcPrintExpr cfg (IBoolLit v)}\nlowerNativeExpr cfg _ (RealN_ (FV _ cv) v) = return $ defaultValue {poolExpr = lcPrintExpr cfg (IRealLit (Just (unCVar cv)) v)}\nlowerNativeExpr cfg _ (IntN_ (FV _ cv) v) = return $ defaultValue {poolExpr = lcPrintExpr cfg (IIntLit (Just (unCVar cv)) v)}\nlowerNativeExpr cfg _ (StrN_ _ v) = return $ defaultValue {poolExpr = lcPrintExpr cfg (IStrLit v)}\nlowerNativeExpr cfg _ (NullN_ fv) = do\n  mayT <- lcTypeOf cfg (VarF fv)\n  return $ defaultValue {poolExpr = lcPrintExpr cfg (INullLit mayT)}\nlowerNativeExpr cfg _ (DoBlockN_ t x) =\n  let (hoisted, effectExpr) = lcMakeDoBlock cfg t (poolPriorLines x) (poolExpr x)\n   in return\n        defaultValue\n          { poolExpr = effectExpr\n          , poolCompleteManifolds = poolCompleteManifolds x\n          , poolPriorLines = hoisted\n          , poolPriorExprs = poolPriorExprs x\n          }\nlowerNativeExpr cfg _ (EvalN_ _ x) = return $ x {poolExpr = lcPrintExpr cfg (IEval (IRawExpr (render (poolExpr x))))}\n-- CoerceToOptional is a noop in all target languages: T is a valid ?T\nlowerNativeExpr _ _ (CoerceN_ CoerceToOptional _ x) = return x\n-- CoerceToEffect wraps the value in a suspend (thunk/lambda)\nlowerNativeExpr cfg _ (CoerceN_ (CoerceToEffect _) _ x) =\n  return $ x {poolExpr = lcPrintExpr cfg (IDoBlock (IRawExpr (render (poolExpr x))))}\nlowerNativeExpr cfg origExpr (IfN_ _ condDocs thenDocs elseDocs) =\n  lcMakeIf cfg origExpr condDocs thenDocs elseDocs\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrHash (Just schema) [dataDocs]) =\n  return $ dataDocs {poolExpr = lcPrintExpr cfg (IIntrinsicHash schema (IRawExpr (render (poolExpr dataDocs))))}\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrSave (Just schema) [dataDocs, pathDocs]) =\n  let fmt = \"voidstar\"\n   in return $ mergePoolDocs (const $ lcPrintExpr cfg (IIntrinsicSave fmt schema (IRawExpr (render (poolExpr dataDocs))) (IRawExpr (render (poolExpr pathDocs))))) [dataDocs, pathDocs]\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrSaveM (Just schema) [dataDocs, pathDocs]) =\n  let fmt = \"msgpack\"\n   in return $ mergePoolDocs (const $ lcPrintExpr cfg (IIntrinsicSave fmt schema (IRawExpr (render (poolExpr dataDocs))) (IRawExpr (render (poolExpr pathDocs))))) [dataDocs, pathDocs]\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrSaveJ (Just schema) [dataDocs, pathDocs]) =\n  let fmt = \"json\"\n   in return $ mergePoolDocs (const $ lcPrintExpr cfg (IIntrinsicSave fmt schema (IRawExpr (render (poolExpr dataDocs))) (IRawExpr (render (poolExpr pathDocs))))) [dataDocs, pathDocs]\nlowerNativeExpr cfg origExpr (IntrinsicN_ _ IntrLoad (Just schema) [pathDocs]) = do\n  innerType <- case typeFof origExpr of\n    OptionalF t -> lcTypeOf cfg t\n    _ -> return Nothing\n  return $ pathDocs {poolExpr = lcPrintExpr cfg (IIntrinsicLoad schema innerType (IRawExpr (render (poolExpr pathDocs))))}\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrShow (Just schema) [dataDocs]) =\n  return $ dataDocs {poolExpr = lcPrintExpr cfg (IIntrinsicShow schema (IRawExpr (render (poolExpr dataDocs))))}\nlowerNativeExpr cfg origExpr (IntrinsicN_ _ IntrRead (Just schema) [strDocs]) = do\n  innerType <- case typeFof origExpr of\n    OptionalF t -> lcTypeOf cfg t\n    _ -> return Nothing\n  return $ strDocs {poolExpr = lcPrintExpr cfg (IIntrinsicRead schema innerType (IRawExpr (render (poolExpr strDocs))))}\n-- @schema and @typeof erase their argument: the result is a compile-time\n-- constant string (the schema or user-facing type name), already resolved\n-- into the Intrinsic node's schema slot by Serialize.hs. Emit it as a\n-- string literal and discard the data expression; the type of the\n-- argument is all that matters.\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrSchema (Just s) [dataDocs]) =\n  return $ dataDocs {poolExpr = lcPrintExpr cfg (IStrLit s)}\nlowerNativeExpr cfg _ (IntrinsicN_ _ IntrTypeof (Just s) [dataDocs]) =\n  return $ dataDocs {poolExpr = lcPrintExpr cfg (IStrLit s)}\nlowerNativeExpr _ _ (IntrinsicN_ _ intr _ _) =\n  error $ \"Runtime intrinsic @\" <> show intr <> \" reached code generation without schema\"\n\n{- | Lower a serial manifold to PoolDocs.\nReplaces translateManifold from Common.hs for serial manifolds.\n-}\nlowerSerialManifold ::\n  (Monad m) =>\n  LowerConfig m ->\n  SerialManifold ->\n  SerialManifold_ PoolDocs ->\n  m PoolDocs\nlowerSerialManifold cfg sm (SerialManifold_ m _ form headForm e) =\n  lowerManifold cfg m form (Just headForm) (typeMof sm) e\n\n{- | Lower a native manifold to PoolDocs.\nReplaces translateManifold from Common.hs for native manifolds.\n-}\nlowerNativeManifold ::\n  (Monad m) =>\n  LowerConfig m ->\n  NativeManifold ->\n  NativeManifold_ PoolDocs ->\n  m PoolDocs\nlowerNativeManifold cfg nm (NativeManifold_ m _ form e) =\n  lowerManifold cfg m form Nothing (typeMof nm) e\n\nlowerManifold ::\n  (Monad m, HasTypeM t) =>\n  LowerConfig m ->\n  Int ->\n  ManifoldForm (Or TypeS TypeF) t ->\n  Maybe HeadManifoldForm ->\n  TypeM ->\n  PoolDocs ->\n  m PoolDocs\nlowerManifold cfg m form headForm manifoldType (PoolDocs completeManifolds body priorLines priorExprs) = do\n  let args = typeMofForm form\n      mname = manNamer m\n  maybeNewManifold <- lcMakeFunction cfg mname args manifoldType priorLines body headForm\n  let call = case form of\n        (ManifoldPass _) -> mname\n        (ManifoldFull rs) -> mname <> tupled (map argNamer (typeMofRs rs))\n        (ManifoldPart rs vs) ->\n          lcMakeLambda\n            cfg\n            mname\n            (map argNamer (typeMofRs rs))\n            [argNamer (Arg i (typeMof t)) | Arg i t <- vs]\n  return $\n    PoolDocs\n      { poolCompleteManifolds = completeManifolds <> maybeToList maybeNewManifold\n      , poolExpr = call\n      , poolPriorLines = []\n      , poolPriorExprs = priorExprs\n      }\n\n-- | Bundle all six fold callbacks into a single FoldWithManifoldM record.\ndefaultFoldRules ::\n  (Monad m) =>\n  LowerConfig m ->\n  FoldWithManifoldM m PoolDocs PoolDocs PoolDocs PoolDocs (TypeS, PoolDocs) (TypeM, PoolDocs)\ndefaultFoldRules cfg =\n  FoldWithManifoldM\n    { opFoldWithSerialManifoldM = lowerSerialManifold cfg\n    , opFoldWithNativeManifoldM = lowerNativeManifold cfg\n    , opFoldWithSerialExprM = lowerSerialExpr cfg\n    , opFoldWithNativeExprM = lowerNativeExpr cfg\n    , opFoldWithSerialArgM = \\sr sa -> return $ case sa of\n        SerialArgManifold_ x -> (typeSof sr, x)\n        SerialArgExpr_ x -> (typeSof sr, x)\n    , opFoldWithNativeArgM = \\nr na -> return $ case na of\n        NativeArgManifold_ x -> (typeMof nr, x)\n        NativeArgExpr_ x -> (typeMof nr, x)\n    }\n\n-- | Default serialization for languages without custom PoolDocs logic (Python, R).\ndefaultSerialize :: (Monad m) => LowerConfig m -> MDoc -> SerialAST -> m PoolDocs\ndefaultSerialize cfg v s = do\n  (expr, stmts) <- expandSerialize cfg v s\n  return $\n    defaultValue\n      { poolExpr = lcPrintExpr cfg expr\n      , poolPriorLines = map (lcPrintStmt cfg) stmts\n      }\n\n-- | Default deserialization for languages without custom logic (Python, R).\ndefaultDeserialize :: (Monad m) => LowerConfig m -> MDoc -> SerialAST -> m (MDoc, [MDoc])\ndefaultDeserialize cfg v s = do\n  (expr, stmts) <- expandDeserialize cfg v s\n  return (lcPrintExpr cfg expr, map (lcPrintStmt cfg) stmts)\n\ntype IndexM = CMS.StateT IndexState Identity\n\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Grammars/Translator/PseudoCode.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Grammars.Translator.PseudoCode\nDescription : Pseudocode renderer for diagnostic output of manifold trees\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nRenders 'SerialManifold' and 'NativeManifold' trees as human-readable\npseudocode for debugging and diagnostic dumps. Not a real translator --\nproduces no executable output.\n-}\nmodule Morloc.CodeGenerator.Grammars.Translator.PseudoCode\n  ( pseudocodeNativeManifold\n  , pseudocodeSerialManifold\n  , pseudocodeSerialArg\n  , pseudocodeNativeArg\n  , pseudocodeSerialExpr\n  , pseudocodeNativeExpr\n  ) where\n\nimport qualified Control.Monad.Identity as MI\nimport Morloc.CodeGenerator.Grammars.Common\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.Data.Doc\n\nprettyFoldManifold ::\n  (Monad m) => FoldWithManifoldM m PoolDocs PoolDocs PoolDocs PoolDocs PoolDocs PoolDocs\nprettyFoldManifold =\n  FoldWithManifoldM\n    { opFoldWithSerialManifoldM = makeSerialManifold\n    , opFoldWithNativeManifoldM = makeNativeManifold\n    , opFoldWithSerialExprM = makeSerialExpr\n    , opFoldWithNativeExprM = makeNativeExpr\n    , opFoldWithSerialArgM = makeSerialArg\n    , opFoldWithNativeArgM = makeNativeArg\n    }\n  where\n    makeSerialManifold :: (Monad m) => SerialManifold -> SerialManifold_ PoolDocs -> m PoolDocs\n    makeSerialManifold _ (SerialManifold_ m _ form headForm x) =\n      return $ pseudoManifold (makeFunction \"SerialManifold\") makeLambda m form (Just headForm) x\n\n    makeNativeManifold :: (Monad m) => NativeManifold -> NativeManifold_ PoolDocs -> m PoolDocs\n    makeNativeManifold _ (NativeManifold_ m _ form x) =\n      return $ pseudoManifold (makeFunction \"NativeManifold\") makeLambda m form Nothing x\n\n    makeSerialExpr ::\n      (Monad m) => SerialExpr -> SerialExpr_ PoolDocs PoolDocs PoolDocs PoolDocs PoolDocs -> m PoolDocs\n    makeSerialExpr _ (ManS_ m) = return m\n    makeSerialExpr _ (AppPoolS_ t (PoolCall mid (Socket _ _ socketFile) remote _) args) = return $ mergePoolDocs makePoolCall args\n      where\n        makePoolCall xs' = case remote of\n          ForeignCall ->\n            parens (pretty t) <+> \"__foreign_call__\"\n              <> tupled [dquotes socketFile, dquotes (pretty mid), list xs']\n          (RemoteCall _) -> \"REMOTE_CALL\"\n    makeSerialExpr _ (AppRecS_ _ mid es) = return $ mergePoolDocs ((<>) (manNamer mid) . tupled) es\n    makeSerialExpr _ (AppForeignRecS_ _ mid (Socket _ _ socketFile) es) = return $ mergePoolDocs makeForeignRecCall es\n      where\n        makeForeignRecCall xs' =\n          \"__foreign_rec_call__\" <> tupled [dquotes socketFile, dquotes (pretty mid), list xs']\n    makeSerialExpr _ (ReturnS_ x) = return $ x {poolExpr = \"ReturnS(\" <> poolExpr x <> \")\"}\n    makeSerialExpr _ (SerialLetS_ i e1 e2) = return $ makeLet letNamerS \"SerialLetS\" i e1 e2\n    makeSerialExpr _ (NativeLetS_ i e1 e2) = return $ makeLet letNamerN \"NativeLetS\" i e1 e2\n    makeSerialExpr _ (LetVarS_ _ i) = return $ defaultValue {poolExpr = letNamerS i}\n    makeSerialExpr _ (BndVarS_ _ i) = return $ defaultValue {poolExpr = bndNamerS i}\n    makeSerialExpr _ (SerializeS_ _ e) = return $ e {poolExpr = \"SerializeS\" <> parens (poolExpr e)}\n\n    makeNativeExpr ::\n      (Monad m) => NativeExpr -> NativeExpr_ PoolDocs PoolDocs PoolDocs PoolDocs PoolDocs -> m PoolDocs\n    makeNativeExpr _ (AppExeN_ _ (SrcCallP src) xs) =\n      return $ mergePoolDocs ((<>) (pretty $ srcName src) . tupled) xs\n    makeNativeExpr _ (AppExeN_ _ (PatCallP pat) xs) =\n      return $ mergePoolDocs ((<>) (pretty pat) . tupled) xs\n    makeNativeExpr _ (AppExeN_ _ (LocalCallP idx) xs) =\n      return $ mergePoolDocs ((<>) (letNamerN idx) . tupled) xs\n    makeNativeExpr _ (AppExeN_ _ (RecCallP mid _) xs) =\n      return $ mergePoolDocs ((<>) (manNamer mid) . tupled) xs\n    makeNativeExpr _ (ManN_ call) = return call\n    makeNativeExpr _ (ReturnN_ x) =\n      return $ x {poolExpr = \"ReturnN(\" <> poolExpr x <> \")\"}\n    makeNativeExpr _ (SerialLetN_ i x1 x2) = return $ makeLet letNamerS \"SerialLetN\" i x1 x2\n    makeNativeExpr _ (NativeLetN_ i x1 x2) = return $ makeLet letNamerN \"NativeLetN\" i x1 x2\n    makeNativeExpr _ (LetVarN_ _ i) = return $ defaultValue {poolExpr = letNamerN i}\n    makeNativeExpr _ (BndVarN_ _ i) = return $ defaultValue {poolExpr = bndNamerN i}\n    makeNativeExpr _ (DeserializeN_ _ _ e) = return $ e {poolExpr = \"DeserializeN\" <> parens (poolExpr e)}\n    makeNativeExpr _ (ExeN_ _ (SrcCallP src)) = return $ defaultValue {poolExpr = pretty (srcName src)}\n    makeNativeExpr _ (ExeN_ _ (PatCallP pat)) = return $ defaultValue {poolExpr = pretty pat}\n    makeNativeExpr _ (ExeN_ _ (LocalCallP idx)) = return $ defaultValue {poolExpr = letNamerN idx}\n    makeNativeExpr _ (ExeN_ _ (RecCallP mid _)) = return $ defaultValue {poolExpr = manNamer mid}\n    makeNativeExpr _ (ListN_ _ _ xs) = return $ mergePoolDocs list xs\n    makeNativeExpr _ (TupleN_ _ xs) = return $ mergePoolDocs tupled xs\n    makeNativeExpr _ (RecordN_ _ _ _ rs) =\n      return $ mergePoolDocs pyDict (map snd rs)\n      where\n        pyDict es' =\n          let entries' = zipWith (\\k v -> pretty k <> \"=\" <> v) (map fst rs) es'\n           in \"OrderedDict\" <> tupled entries'\n    makeNativeExpr _ (LogN_ _ v) = return $ defaultValue {poolExpr = if v then \"True\" else \"False\"}\n    makeNativeExpr _ (RealN_ _ v) = return $ defaultValue {poolExpr = viaShow v}\n    makeNativeExpr _ (IntN_ _ v) = return $ defaultValue {poolExpr = viaShow v}\n    makeNativeExpr _ (StrN_ _ v) = return $ defaultValue {poolExpr = dquotes $ pretty v}\n    makeNativeExpr _ (NullN_ _) = return $ defaultValue {poolExpr = \"None\"}\n    makeNativeExpr _ _ = return $ defaultValue {poolExpr = \"<unhandled>\"}\n\n    makeSerialArg :: (Monad m) => SerialArg -> SerialArg_ PoolDocs PoolDocs -> m PoolDocs\n    makeSerialArg _ (SerialArgManifold_ x) = return x\n    makeSerialArg _ (SerialArgExpr_ x) = return x\n\n    makeNativeArg :: (Monad m) => NativeArg -> NativeArg_ PoolDocs PoolDocs -> m PoolDocs\n    makeNativeArg _ (NativeArgManifold_ x) = return x\n    makeNativeArg _ (NativeArgExpr_ x) = return x\n\n    makeFunction :: MDoc -> MDoc -> [Arg TypeM] -> [MDoc] -> MDoc -> Maybe HeadManifoldForm -> MDoc\n    makeFunction manStr mname args priorLines body headForm =\n      block 4 def (vsep $ priorLines <> [body])\n      where\n        makeExt (Just HeadManifoldFormRemoteWorker) = \"_remote\"\n        makeExt _ = \"\"\n\n        def =\n          manStr <+> mname\n            <> makeExt headForm\n            <> tupled [argName r <+> \":\" <+> pretty t | r@(Arg _ t) <- args]\n            <> \":\"\n\n    makeLambda :: MDoc -> [MDoc] -> [MDoc] -> MDoc\n    makeLambda mname contextArgs boundArgs =\n      let functionCall = mname <> tupled (contextArgs <> boundArgs)\n       in \"lambda\" <+> tupled boundArgs <> \":\" <+> functionCall\n\n    makeLet :: (Int -> MDoc) -> MDoc -> Int -> PoolDocs -> PoolDocs -> PoolDocs\n    makeLet letNamer letStr i (PoolDocs ms1' e1' rs1 pes1) (PoolDocs ms2' e2' rs2 pes2) =\n      let rs = rs1 ++ [letStr <+> letNamer i <+> \"=\" <+> e1'] ++ rs2\n       in PoolDocs (ms1' <> ms2') e2' rs (pes1 <> pes2)\n\n    letNamerS :: Int -> MDoc\n    letNamerS i = \"letSvar_\" <> pretty i\n\n    letNamerN :: Int -> MDoc\n    letNamerN i = \"letNvar_\" <> pretty i\n\n    bndNamerS :: Int -> MDoc\n    bndNamerS i = \"bndSvar\" <> pretty i\n\n    bndNamerN :: Int -> MDoc\n    bndNamerN i = \"bndNvar\" <> pretty i\n\n    argName :: Arg TypeM -> MDoc\n    argName (Arg i (Native _)) = bndNamerN i\n    argName (Arg i _) = bndNamerS i\n\npseudoManifold ::\n  (HasTypeM t) =>\n  (MDoc -> [Arg TypeM] -> [MDoc] -> MDoc -> Maybe HeadManifoldForm -> MDoc) ->\n  (MDoc -> [MDoc] -> [MDoc] -> MDoc) ->\n  Int ->\n  ManifoldForm (Or TypeS TypeF) t ->\n  Maybe HeadManifoldForm ->\n  PoolDocs ->\n  PoolDocs\npseudoManifold makeFunc makeLam m form headForm (PoolDocs completeManifolds body priorLines priorExprs) =\n  let args = typeMofForm form\n      mname = manNamer m\n      newManifold = makeFunc mname args priorLines body headForm\n      call = case form of\n        (ManifoldPass _) -> mname\n        (ManifoldFull rs) -> mname <> tupled (map argNamer (typeMofRs rs))\n        (ManifoldPart rs vs) ->\n          makeLam\n            mname\n            (map argNamer (typeMofRs rs))\n            [argNamer (Arg i (typeMof t)) | Arg i t <- vs]\n   in PoolDocs\n        { poolCompleteManifolds = newManifold : completeManifolds\n        , poolExpr = call\n        , poolPriorLines = []\n        , poolPriorExprs = priorExprs\n        }\n\nprettyThing :: (p -> MI.Identity PoolDocs) -> p -> Doc ()\nprettyThing f a =\n  let e = MI.runIdentity $ f a\n   in vsep . punctuate line $ poolPriorExprs e <> poolCompleteManifolds e\n\npseudocodeNativeManifold :: NativeManifold -> MDoc\npseudocodeNativeManifold = prettyThing (foldWithNativeManifoldM prettyFoldManifold)\n\npseudocodeSerialManifold :: SerialManifold -> MDoc\npseudocodeSerialManifold = prettyThing (foldWithSerialManifoldM prettyFoldManifold)\n\npseudocodeSerialArg :: SerialArg -> MDoc\npseudocodeSerialArg = prettyThing (foldWithSerialArgM prettyFoldManifold)\n\npseudocodeNativeArg :: NativeArg -> MDoc\npseudocodeNativeArg = prettyThing (foldWithNativeArgM prettyFoldManifold)\n\npseudocodeSerialExpr :: SerialExpr -> MDoc\npseudocodeSerialExpr = prettyThing (foldWithSerialExprM prettyFoldManifold)\n\npseudocodeNativeExpr :: NativeExpr -> MDoc\npseudocodeNativeExpr = prettyThing (foldWithNativeExprM prettyFoldManifold)\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Infer.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Infer\nDescription : Infer concrete (language-specific) types from type aliases\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nMaps general types to their concrete counterparts by evaluating type\naliases in the language-specific scope. Used by 'Express' and 'Serialize'\nto determine how values are represented in each target language.\n-}\nmodule Morloc.CodeGenerator.Infer\n  ( getScope\n  , inferConcreteType\n  , inferConcreteTypeUniversal\n  , inferConcreteTypeU\n  , inferConcreteVar\n  , evalGeneralStep\n  ) where\n\nimport qualified Control.Monad.State as CMS\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.TypeEval as T\n\n-- TODO: do not use global scope here\ngetScope :: Int -> Lang -> MorlocMonad (Scope, Scope)\ngetScope _ lang = do\n  cscope <- MM.getConcreteUniversalScope lang\n  gscope <- MM.getGeneralUniversalScope\n  MM.sayVVV $ \"cscope:\" <+> viaShow cscope\n  return (cscope, gscope)\n\nevalGeneralStep :: Int -> TypeU -> MorlocMonad (Maybe TypeU)\nevalGeneralStep i t = T.evaluateStep <$> MM.getGeneralScope i <*> pure t\n\ninferConcreteTypeU :: Lang -> Indexed TypeU -> MorlocMonad TypeU\ninferConcreteTypeU lang (Idx i t0) = do\n  attemptT <- inferConcreteTypeU' t0 <$> getScope i lang\n  case attemptT of\n    (Right t') -> return t'\n    (Left _) -> do\n      gscopeUni <- MM.getGeneralUniversalScope\n      cscopeUni <- MM.getConcreteUniversalScope lang\n      case inferConcreteTypeU' t0 (cscopeUni, gscopeUni) of\n        (Right t') -> return t'\n        (Left (SystemError e2)) -> MM.throwSourcedError i e2\n        (Left e2) -> MM.throwError e2\n\ninferConcreteTypeU' :: TypeU -> (Scope, Scope) -> Either MorlocError TypeU\ninferConcreteTypeU' generalType (cscope, gscope) = T.pairEval cscope gscope generalType\n\ninferConcreteType :: Lang -> Indexed Type -> MorlocMonad TypeF\ninferConcreteType _ (Idx i (UnkT _)) =\n  MM.throwSourcedError i \"Cannot infer concrete type for UnkT. This may be an unsolved generic term\"\ninferConcreteType lang (Idx i (type2typeu -> generalType)) = do\n  concreteType <- inferConcreteTypeU lang (Idx i generalType)\n  (_, gscope) <- getScope i lang\n  case weave gscope generalType concreteType of\n    (Right tf) -> return tf\n    (Left _) -> do\n      gscopeUni <- CMS.gets stateUniversalGeneralTypedefs\n      case weave gscopeUni generalType concreteType of\n        (Right tf) -> return tf\n        (Left _) -> do\n          -- Evaluate the general type one level and try again\n          --\n          -- Weaving will fail for parameterize type definitions, such as\n          --   type (Foo a) = [(a, Str)]\n          -- Here the primitive type (e.g., \"std::vector<std::tuple<$1,std::string>>\" a)\n          -- cannot be woven with the `Foo a` type. So `Foo a` needs to be\n          -- substituted for [(a, Str)], which can be woven.\n          mayReducedGType <- evalGeneralStep i generalType\n          case mayReducedGType of\n            (Just reducedGType) -> inferConcreteType lang (Idx i (typeOf reducedGType))\n            Nothing ->\n              MM.throwSourcedError i $\n                \"Cannot infer concrete type for\" <+> pretty generalType <> \"\\nCould not reduce type\"\n\ninferConcreteTypeUniversal :: Lang -> Type -> MorlocMonad TypeF\ninferConcreteTypeUniversal lang t@(type2typeu -> generalType) = do\n  gscopeUni <- CMS.gets stateUniversalGeneralTypedefs\n  concreteType <- inferConcreteTypeUUniversal lang generalType\n  case weave gscopeUni generalType concreteType of\n    (Right tf) -> return tf\n    (Left _) -> do\n      -- Evaluate the general type one level and try again\n      case T.evaluateStep gscopeUni generalType of\n        (Just reducedGType) ->\n          if reducedGType == generalType\n            then\n              MM.throwSystemError $\n                \"Failed to resolve concrete type for\" <+> pretty t <+> \"and cannot evaluate any further\"\n            else\n              MM.throwSystemError $\n                \"Failed to infer concrete type for\" <+> pretty generalType\n                  <> \": Cannot unify with\" <+> pretty reducedGType\n        Nothing ->\n          MM.throwSystemError $\n            \"Failed to infer concrete type for\" <+> pretty t <+> \": Could not reduce type in broadest scope\"\n\ninferConcreteTypeUUniversal :: Lang -> TypeU -> MorlocMonad TypeU\ninferConcreteTypeUUniversal lang generalType = do\n  gscopeUni <- CMS.gets stateUniversalGeneralTypedefs\n  cscopeUni <- CMS.gets stateUniversalConcreteTypedefs |>> fromMaybe Map.empty . Map.lookup lang\n  let attemptUni = inferConcreteTypeU' generalType (cscopeUni, gscopeUni)\n  case attemptUni of\n    (Right t) -> return t\n    (Left (SystemError e2)) ->\n      MM.throwSystemError $\n        \"Failed to infer concrete universal type for lang\"\n          <+> pretty lang\n          <+> \"for type\"\n          <+> pretty generalType\n          <> \":\" <+> e2\n    (Left e) -> MM.throwError e\n\nweave :: Scope -> TypeU -> TypeU -> Either MDoc TypeF\nweave gscope = w\n  where\n    w (VarU v1) (VarU (TV v2)) = return $ VarF (FV v1 (CV v2))\n    w (FunU ts1 t1) (FunU ts2 t2) = FunF <$> zipWithM w ts1 ts2 <*> w t1 t2\n    w (AppU t1 ts1) (AppU t2 ts2) = AppF <$> w t1 t2 <*> weaveArgs ts1 ts2\n    w t1@(NamU o1 v1 ts1 rs1) t2@(NamU o2 v2 ts2 rs2)\n      | o1 == o2 && length ts1 == length ts2 && length rs1 == length rs2 =\n          NamF o1 (FV v1 (CV (unTVar v2)))\n            <$> zipWithM w ts1 ts2\n            <*> zipWithM (\\(_, t1') (k2', t2') -> (,) k2' <$> w t1' t2') rs1 rs2\n      | otherwise = Left $ \"failed to weave:\" <+> \"\\n  t1:\" <+> pretty t1 <+> \"\\n  t2:\" <+> pretty t2\n    w (EffectU effs t1) (EffectU _ t2) = EffectF (resolveEffectSet effs) <$> w t1 t2\n    w (OptionalU t1) (OptionalU t2) = OptionalF <$> w t1 t2\n    w (NatLitU n) (NatLitU _) = return $ NatLitF n\n    w (NatLitU n) _ = return $ NatLitF n  -- Nat params may be erased in concrete type\n    w (NatAddU _ _) _ = return $ NatLitF 0  -- Nat arithmetic erased in concrete type\n    w (NatMulU _ _) _ = return $ NatLitF 0  -- Nat arithmetic erased in concrete type\n    w (NatSubU _ _) _ = return $ NatLitF 0  -- Nat arithmetic erased in concrete type\n    w (NatDivU _ _) _ = return $ NatLitF 0  -- Nat arithmetic erased in concrete type\n    w (NatVarU _) _ = return $ NatLitF 0  -- Nat variable erased in concrete type\n    w (LabeledU _ t1) t2 = w t1 t2\n    w (ForallU v (VarU v')) _ | v == v' = return $ NatLitF 0  -- Unresolved variable (UnkT pattern)\n    w t1 t2 = case T.evaluateStep gscope t1 of\n      Nothing -> Left $ \"failed to weave:\" <+> \"\\n  t1:\" <+> pretty t1 <> \"\\n  t2:\" <> pretty t2\n      (Just t1') ->\n        if t1 == t1'\n          then Left (\"failed to weave:\" <> pretty t1 <+> \"vs\" <+> pretty t1')\n          else do\n            w t1' t2\n\n    -- Weave type arguments, handling Nat params that may be erased in concrete type.\n    -- Nat-kinded general args have no concrete counterpart, so we consume them\n    -- without advancing the concrete list, but still emit a NatLitF placeholder.\n    weaveArgs :: [TypeU] -> [TypeU] -> Either MDoc [TypeF]\n    weaveArgs [] [] = Right []\n    weaveArgs [] _ = Left \"concrete type has more args than general type in weave\"\n    weaveArgs (NatLitU n : gs) cs = (NatLitF n :) <$> weaveArgs gs cs\n    weaveArgs (NatAddU _ _ : gs) cs = (NatLitF 0 :) <$> weaveArgs gs cs\n    weaveArgs (NatMulU _ _ : gs) cs = (NatLitF 0 :) <$> weaveArgs gs cs\n    weaveArgs (NatSubU _ _ : gs) cs = (NatLitF 0 :) <$> weaveArgs gs cs\n    weaveArgs (NatDivU _ _ : gs) cs = (NatLitF 0 :) <$> weaveArgs gs cs\n    -- Unresolved nat dimension variable (opaque output dims): treat as erased\n    weaveArgs (NatVarU _ : gs) cs = (NatLitF 0 :) <$> weaveArgs gs cs\n    weaveArgs (ForallU v (VarU v') : gs) cs | v == v' = (NatLitF 0 :) <$> weaveArgs gs cs\n    weaveArgs (g:gs) (c:cs) = (:) <$> w g c <*> weaveArgs gs cs\n    weaveArgs _ [] = Left \"general type has more non-Nat args than concrete type in weave\"\n\ninferConcreteVar :: Lang -> Indexed TVar -> MorlocMonad FVar\ninferConcreteVar lang t0@(Idx i v) = do\n  MM.sayVVV $ \"inferConcreteVar\" <+> pretty lang <+> pretty t0\n  localScope <- MM.getConcreteScope i lang\n  globalScope <- MM.getConcreteUniversalScope lang\n  case Map.lookup v localScope of\n    (Just ((_, t, _, True) : _)) -> return $ FV v (CV . unTVar $ extractKey t)\n    (Just ((_, t, _, False) : _)) -> error $ \"Substituting the non-terminal \" <> show (extractKey t) <> \" into type \" <> show t\n    _ -> case Map.lookup v globalScope of\n      (Just ((_, t, _, True) : _)) -> do\n        -- TODO fix this, the types should be in scope\n        MM.sayVVV $ \"WARNING: using global definition for v=\" <> pretty v\n        return $ FV v (CV . unTVar $ extractKey t)\n      (Just ((_, t, _, False) : _)) -> error $ \"Substituting the non-terminal \" <> show (extractKey t) <> \" into type \" <> show t\n      _ -> do\n        -- Try transitive resolution: expand through general scope\n        (cscope, gscope) <- getScope i lang\n        case T.pairEval cscope gscope (VarU v) of\n          Right (VarU v') -> return $ FV v (CV (unTVar v'))\n          Right _ -> error $ \"Transitive resolution of \" <> show (unTVar v)\n                          <> \" yielded non-variable type\"\n          Left _ -> error $ \"Cannot find type variable \"\n                         <> show (unTVar v) <> \" in scope\"\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/LambdaEval.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.LambdaEval\nDescription : Beta-reduce applied lambdas in the codegen AST\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nPerforms beta-reduction on lambda applications in the 'AnnoS' tree so\nthat the code generator sees only fully-applied function calls or\nunapplied lambdas, never @(\\\\x -> body) arg@.\n-}\nmodule Morloc.CodeGenerator.LambdaEval\n  ( applyLambdas\n  ) where\n\nimport Morloc.CodeGenerator.Namespace\n\n-- {- | Remove lambdas introduced through substitution\n--\n-- For example:\n--\n--  bif x = add x 10\n--  bar py :: \"int\" -> \"int\"\n--  bar y = add y 30\n--  f z = bar (bif z)\n--\n-- In Treeify.hs, the morloc declarations will be substituted in as lambdas. But\n-- we want to preserve the link to any annotations (in this case, the annotation\n-- that `bar` should be in terms of python ints). The morloc declarations can be\n-- substituted in as follows:\n--\n--  f z = (\\y -> add y 30) ((\\x -> add x 10) z)\n--\n-- The indices for bif and bar that link the annotations to the functions are\n-- relative to the lambda expressions, so this substitution preserves the link.\n-- Typechecking can proceed safely.\n--\n-- The expression can be simplified:\n--\n--  f z = (\\y -> add y 30) ((\\x -> add x 10) z)\n--  f z = (\\y -> add y 30) (add z 10)            -- [z / x]\n--  f z = add (add z 10) 30                      -- [add z 10 / y]\n--\n-- The simplified expression is what should be written in the generated code. It\n-- would also be easier to typecheck and debug. So should these substitutions be\n-- done immediately after parsing? We need to preserve\n--  1. links to locations in the original source code (for error messages)\n--  2. type annotations.\n--  3. declaration names for generated comments and subcommands\n--\n-- Here is the original expression again, but annotated and indexed\n--\n--  (\\x -> add_2 x_3 10_4)_1\n--  (\\y -> add_6 y_7 30_8)_5\n--  (\\z -> bar_10 (bif_11 z_12))_9\n--\n--  1: name=\"bif\"\n--  5: name=\"bar\", type=\"int\"@py -> \"int\"@py\n--  9: name=\"f\"\n--\n-- Each add is also associated with a type defined in a signature in an\n-- unmentioned imported library, but those will be looked up by the typechecker\n-- and will not be affected by rewriting.\n--\n-- Substitution requires reindexing. A definition can be used multiple times and\n-- we need to distinguish between the use cases.\n--\n-- Replace bif and bar with their definition and create fresh indices:\n--\n--  (\\z -> (\\y -> add_18 y_19 30_20)_17 ((\\x -> add_14 x_15 10_16)_13 z_12)_9\n--\n--  13,1: name=\"bif\"\n--  17,5: name=\"bar\", type=\"int\"@py -> \"int\"@py\n--  9: name=\"f\"\n--\n-- Now we can substitute for y\n--\n--  (\\z -> add_18 ((\\x -> add_14 x_15 10_16)_13 z_12)_9 30_20)\n--\n-- But this destroyed index 17 and the link to the python annotation. We can\n-- preserve the type by splitting the annotation of bar.\n--\n--  13,1: name=\"bif\"\n--  18,17,5: name=\"bar\"\n--  12: \"int\"@py\n--  13: \"int\"@py\n--  9: name=\"f\"\n--\n-- Index 18 should be associated with the *name* \"bar\", but not the type, since it\n-- has been applied. The type of bar is now split between indices 12 and 13.\n--\n-- This case works fine, but it breaks down when types are polymorphic. If the\n-- annotation of bar had been `a -> a`, then how would we type 12 and 13? We can't\n-- say that `12 :: forall a . a` and `13 :: forall a . a`, since this\n-- eliminates the constraint that the `a`s must be the same.\n--\n-- If instead we rewrite lambdas after typechecking, then everything works out.\n--\n-- Thus applyLambdas is done here, rather than in Treeify.hs or Desugar.hs.\n--\n-- Lambda application can also NOT be done before collapsing from Many to One in\n-- AnnoS. The reason is that in ((VarS (Many es)) 42), the values in es\n-- may contain `CallS src` or `LamS vs e` types. The CallS terms cannot be\n-- reduced but the lambdas can. So applying here would lead to divergence.\n--\n-- It also must be done BEFORE conversion to ExprM in `express`, where manifolds\n-- are resolved.\n-- -}\napplyLambdas ::\n  AnnoS (Indexed Type) One a ->\n  MorlocMonad (AnnoS (Indexed Type) One a)\n-- eliminate empty lambdas\napplyLambdas (AnnoS g1 _ (AppS (AnnoS _ _ (LamS [] (AnnoS _ c2 e))) [])) = applyLambdas $ AnnoS g1 c2 e\n-- eliminate empty applications\napplyLambdas (AnnoS g1 _ (AppS (AnnoS _ c2 e) [])) = applyLambdas $ AnnoS g1 c2 e\n-- substitute applied lambdas\napplyLambdas\n  ( AnnoS\n      i1\n      tb1\n      ( AppS\n          ( AnnoS\n              (Idx i2 (FunT (_ : tas) tb2))\n              c\n              (LamS (v : vs) e2)\n            )\n          (e1 : es)\n        )\n    ) =\n    let e2' = substituteAnnoS v e1 e2\n     in applyLambdas\n          ( AnnoS\n              i1\n              tb1\n              ( AppS\n                  ( AnnoS\n                      (Idx i2 (FunT tas tb2))\n                      c\n                      (LamS vs e2')\n                  )\n                  es\n              )\n          )\n-- propagate the changes\napplyLambdas (AnnoS g c (AppS f es)) = do\n  f' <- applyLambdas f\n  es' <- mapM applyLambdas es\n  return (AnnoS g c (AppS f' es'))\napplyLambdas (AnnoS g c (LamS vs e)) = AnnoS g c . LamS vs <$> applyLambdas e\napplyLambdas (AnnoS g c (LstS es)) = AnnoS g c . LstS <$> mapM applyLambdas es\napplyLambdas (AnnoS g c (TupS es)) = AnnoS g c . TupS <$> mapM applyLambdas es\napplyLambdas (AnnoS g c (NamS rs)) = AnnoS g c . NamS <$> mapM (secondM applyLambdas) rs\napplyLambdas (AnnoS g c (VarS v (One e))) = AnnoS g c . VarS v . One <$> applyLambdas e\n-- Inline let-bound lambdas: the nexus evaluator cannot serialize function types,\n-- so substitute the lambda for all references and re-process to beta-reduce\napplyLambdas (AnnoS g c (LetS v e1@(AnnoS _ _ (LamS _ _)) e2)) = do\n  e1' <- applyLambdas e1\n  let e2' = substituteAnnoS v e1' e2\n  inner <- applyLambdas e2'\n  let AnnoS _ _ innerExpr = inner\n  return (AnnoS g c innerExpr)\napplyLambdas (AnnoS g c (LetS v e1 e2)) = do\n  e1' <- applyLambdas e1\n  e2' <- applyLambdas e2\n  return (AnnoS g c (LetS v e1' e2'))\napplyLambdas (AnnoS g c (IfS cond thenE elseE)) = do\n  cond' <- applyLambdas cond\n  thenE' <- applyLambdas thenE\n  elseE' <- applyLambdas elseE\n  return (AnnoS g c (IfS cond' thenE' elseE'))\napplyLambdas (AnnoS g c (DoBlockS e)) = AnnoS g c . DoBlockS <$> applyLambdas e\n-- cancel force-suspend: !{e} --> e, preserving outer annotation\napplyLambdas (AnnoS g c (EvalS (AnnoS _ _ (DoBlockS e)))) = do\n  e' <- applyLambdas e\n  let AnnoS _ _ inner = e'\n  return (AnnoS g c inner)\napplyLambdas (AnnoS g c (EvalS e)) = AnnoS g c . EvalS <$> applyLambdas e\napplyLambdas (AnnoS g c (CoerceS co e)) = AnnoS g c . CoerceS co <$> applyLambdas e\napplyLambdas (AnnoS g c (IntrinsicS intr es)) = AnnoS g c . IntrinsicS intr <$> mapM applyLambdas es\napplyLambdas (AnnoS g c (CallS v)) = return (AnnoS g c (CallS v))\napplyLambdas x = return x\n\nsubstituteAnnoS ::\n  EVar ->\n  AnnoS (Indexed Type) One a ->\n  AnnoS (Indexed Type) One a ->\n  AnnoS (Indexed Type) One a\nsubstituteAnnoS v r = f\n  where\n    f e@(AnnoS _ _ (BndS v'))\n      | v == v' = r\n      | otherwise = e\n    -- propagate the changes\n    f (AnnoS g c (AppS e es)) =\n      let f' = f e\n          es' = map f es\n       in AnnoS g c (AppS f' es')\n    f e0@(AnnoS g c (LamS vs e))\n      | v `elem` vs = e0 -- the replacement term is shadowed\n      | otherwise =\n          let e' = f e\n           in AnnoS g c (LamS vs e')\n    f (AnnoS g c (LstS es)) =\n      let es' = map f es\n       in AnnoS g c (LstS es')\n    f (AnnoS g c (TupS es)) =\n      let es' = map f es\n       in AnnoS g c (TupS es')\n    f (AnnoS g c (NamS rs)) =\n      let es' = map (f . snd) rs\n       in AnnoS g c (NamS (zip (map fst rs) es'))\n    f e@(AnnoS _ _ (LetBndS v'))\n      | v == v' = r\n      | otherwise = e\n    f e0@(AnnoS g c (LetS v' e1 e2))\n      | v == v' = e0 -- shadowed by let binding\n      | otherwise = AnnoS g c (LetS v' (f e1) (f e2))\n    f (AnnoS g c (IfS cond thenE elseE)) = AnnoS g c (IfS (f cond) (f thenE) (f elseE))\n    f (AnnoS g c (DoBlockS e)) = AnnoS g c (DoBlockS (f e))\n    f (AnnoS g c (EvalS e)) = AnnoS g c (EvalS (f e))\n    f (AnnoS g c (CoerceS co e)) = AnnoS g c (CoerceS co (f e))\n    f (AnnoS g c (IntrinsicS intr es)) = AnnoS g c (IntrinsicS intr (map f es))\n    -- CallS is a recursive back-edge, not a variable reference to substitute\n    f x = x\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/LanguageDescriptor.hs",
    "content": "{-# LANGUAGE DeriveGeneric #-}\n{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.CodeGenerator.LanguageDescriptor\nDescription : Language descriptor for generic code generation\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nDefines a descriptor format that captures the syntactic differences between\ndynamically-typed interpreted languages. Used by the generic translator to\ngenerate pool code without language-specific Haskell modules.\n-}\nmodule Morloc.CodeGenerator.LanguageDescriptor\n  ( LangDescriptor (..)\n  , IndexStyle (..)\n  , FieldAccessStyle (..)\n  , BlockStyle (..)\n  , MapStyle (..)\n  , ListStyle (..)\n  , PatternStyle (..)\n  , loadLangDescriptor\n  , loadLangDescriptorFromText\n  , defaultLangDescriptor\n  ) where\n\nimport qualified Data.Aeson as Aeson\nimport qualified Data.Aeson.Key as AesonKey\nimport qualified Data.Aeson.KeyMap as KM\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Data.Text.Encoding as TE\nimport qualified Data.Yaml as Y\nimport GHC.Generics (Generic)\n\n-- | How to access tuple/list elements by index\ndata IndexStyle\n  = ZeroBracket -- e[i]     (Python, JS, Julia, Lua)\n  | OneBracket -- e[i+1]   (R-like with [], 1-indexed)\n  | OneDoubleBracket -- e[[i+1]] (R with [[]], 1-indexed)\n  deriving (Eq, Show, Generic)\n\n-- | How to access record fields\ndata FieldAccessStyle\n  = DotAccess -- e.field  (Python, JS, Julia)\n  | DollarAccess -- e$field  (R)\n  deriving (Eq, Show, Generic)\n\n-- | Block structure for function definitions\ndata BlockStyle\n  = IndentBlock -- Python-style: header + indented body\n  | BraceBlock -- R/JS-style: header { body }\n  | EndKeywordBlock -- Julia/Ruby-style: header body end\n  deriving (Eq, Show, Generic)\n\n-- | List iteration / map style\ndata MapStyle\n  = LoopAppend -- for x in col: result.append(...)\n  | ApplyCallback -- lapply(col, function(x) ...)\n  | ListComprehension -- [body for x in col]\n  deriving (Eq, Show, Generic)\n\n-- | List constructor style\ndata ListStyle\n  = BracketList -- [a, b, c]\n  | FunctionCallList -- list(a, b, c)\n  | TypeDependentList -- c(a,b) for atomic types, list(a,b) for complex\n  deriving (Eq, Show, Generic)\n\n-- | String interpolation / pattern style\ndata PatternStyle\n  = FStringPattern -- f\"prefix{var}suffix\"\n  | ConcatCall -- paste0(\"prefix\", var, \"suffix\") or string(...)\n  deriving (Eq, Show, Generic)\n\n-- | Complete language descriptor\ndata LangDescriptor = LangDescriptor\n  { -- Identity\n    ldName :: !Text\n  , ldExtension :: !String\n  , -- Literals\n    ldBoolTrue :: !Text\n  , ldBoolFalse :: !Text\n  , ldNullLiteral :: !Text\n  , -- Constructors\n    ldListStyle :: !ListStyle\n  , ldTupleConstructor :: !Text -- \"tuple\" or \"list\" or \"\"\n  , ldRecordConstructor :: !Text -- \"dict\" or \"OrderedDict\" or \"list\"\n  , ldRecordSeparator :: !Text -- \"=\" for Python/R, \"=>\" for Julia\n  , -- Access styles\n    ldIndexStyle :: !IndexStyle\n  , ldKeyAccess :: !Text -- \"bracket\" -> e[\"k\"], \"double_bracket\" -> e[[\"k\"]]\n  , ldFieldAccess :: !FieldAccessStyle\n  , -- Serialize/deserialize function names\n    ldSerializeFn :: !Text -- \"morloc.put_value\" or \"morloc_put_value\"\n  , ldDeserializeFn :: !Text -- \"morloc.get_value\" or \"morloc_get_value\"\n  , -- Intrinsic function prefix (for mlc_show, mlc_hash, etc.)\n    ldIntrinsicPrefix :: !Text -- \"morloc.\" or \"morloc_\" or \"MorlocRuntime.\"\n  , -- Foreign call template\n    ldForeignCallFn :: !Text -- \"morloc.foreign_call\" or \"morloc_foreign_call\"\n  , ldForeignCallIntSuffix :: !Text -- \"L\" for R, \"\" for others\n  , ldIntLiteralSuffix :: !Text -- \"L\" for R, \"\" for others\n  , -- Remote call\n    ldRemoteCallFn :: !Text -- \"morloc.remote_call\" or \"morloc_remote_call\"\n  , -- Record access\n    ldDictStyleRecords :: !Bool -- True: NamRecord/dict use bracket access, others use dot (Python)\n  , ldQuoteRecordKeys :: !Bool -- True: \"k\" => v (Julia), False: k=v (Python, R)\n  , -- Import syntax\n    ldQualifiedImports :: !Bool -- True: qualify source names with module path (Python)\n  , ldIncludeRelToFile :: !Bool -- True if include() resolves relative to file (Julia), False for CWD (R)\n  , -- Pool template\n    ldPoolTemplate :: !Text -- pool template content\n  , ldBreakMarker :: !Text -- \"# <<<BREAK>>>\"\n  , ldCommentMarker :: !Text -- \"#\" or \"//\" or \"--\"\n  , -- Execution\n    ldRunCommand :: ![Text] -- command to run pool, e.g. [\"python3\"]\n  , ldIsCompiled :: !Bool\n  , -- External codegen (optional)\n    ldCodegenCommand :: !(Maybe Text) -- e.g. \"morloc-codegen-generic\"\n  , -- == Template fields (Layer 1 & 2) ==\n\n    -- Assignment\n    ldAssignOp :: !Text -- \"=\" or \"<-\"\n  , -- Lambda\n    ldLambdaTemplate :: !Text -- e.g. \"lambda {{args}}: {{body}}\"\n  , -- Do-block (effect wrapper)\n    ldDoBlockExpr :: !Text -- e.g. \"(lambda: {{expr}})\"\n  , ldDoBlockBlock :: !Text -- e.g. \"function(){\\n{{body}}\\n}\" or \"\" for pass-through\n  , -- Partial application\n    ldPartialTemplate :: !Text -- e.g. \"functools.partial({{fn_with_context}})\"\n  , -- Import\n    ldImportTemplate :: !Text -- e.g. \"{{namespace}} = importlib.import_module(\\\"{{module_path}}\\\")\"\n  , -- Socket path\n    ldSocketPathTemplate :: !Text -- e.g. \"os.path.join(global_state[\\\"tmpdir\\\"], {{socket}})\"\n  , -- Resource packing for remote calls\n    ldResourcePackTemplate :: !Text -- e.g. \"struct.pack('iiii', {{mem}}, {{time}}, {{cpus}}, {{gpus}})\"\n  , -- Return statement\n    ldReturnTemplate :: !Text -- e.g. \"return({{expr}})\"\n  , -- Function definition\n    ldFuncDefHeader :: !Text -- e.g. \"def {{name}}({{args}}):\"\n  , ldBlockStyle :: !BlockStyle\n  , ldBlockEnd :: !Text -- \"\" or \"end\"\n  , -- Error wrapping\n    ldErrorWrapOpen :: !Text -- \"try:\" for Python, \"\" for others\n  , ldErrorWrapClose :: ![Text] -- Except block lines with {{name}} template var\n  , -- Pattern/string interpolation support\n    ldPatternStyle :: !PatternStyle\n  , ldConcatFn :: !Text -- For ConcatCall: \"paste0\", \"string\"\n  , ldQuoteTerminator :: !Text -- String delimiter: \"\\\"\" for R/C++, \"\\\"\\\"\\\"\" for Python\n  , ldQuoteTerminatorEsc :: !Text -- Escaped form: \"\\\\\\\"\" for R/C++, \"\\\\\\\"\\\\\\\"\\\\\\\"\" for Python\n  , -- List constructor support\n    ldAtomicTypes :: ![Text] -- For TypeDependentList: [\"integer\", \"numeric\", ...]\n  , ldAtomicListFn :: !Text -- For TypeDependentList: \"c\"\n  , ldGenericListFn :: !Text -- For FunctionCallList/TypeDependentList: \"list\"\n  , -- Map iteration style (kept as enum - needs different code structure)\n    ldMapStyle :: !MapStyle\n  , -- Dispatch table templates\n    ldDispatchLocalHeader :: !Text -- e.g. \"dispatch = {\"\n  , ldDispatchLocalEntry :: !Text -- e.g. \"    {{mid}}: {{name}},\"\n  , ldDispatchLocalFooter :: !Text -- e.g. \"}\"\n  , ldDispatchRemoteHeader :: !Text -- e.g. \"remote_dispatch = {\"\n  , ldDispatchRemoteEntry :: !Text -- e.g. \"    {{mid}}: {{name}}_remote,\"\n  , ldDispatchRemoteFooter :: !Text -- e.g. \"}\"\n  }\n  deriving (Eq, Show, Generic)\n\n-- YAML instances\n\ninstance Y.FromJSON IndexStyle where\n  parseJSON = Y.withText \"IndexStyle\" $ \\t -> case t of\n    \"zero_bracket\" -> pure ZeroBracket\n    \"one_bracket\" -> pure OneBracket\n    \"one_double_bracket\" -> pure OneDoubleBracket\n    _ -> fail $ \"Unknown IndexStyle: \" <> T.unpack t\n\ninstance Y.FromJSON FieldAccessStyle where\n  parseJSON = Y.withText \"FieldAccessStyle\" $ \\t -> case t of\n    \"dot\" -> pure DotAccess\n    \"dollar\" -> pure DollarAccess\n    _ -> fail $ \"Unknown FieldAccessStyle: \" <> T.unpack t\n\ninstance Y.FromJSON BlockStyle where\n  parseJSON = Y.withText \"BlockStyle\" $ \\t -> case t of\n    \"indent\" -> pure IndentBlock\n    \"braces\" -> pure BraceBlock\n    \"end_keyword\" -> pure EndKeywordBlock\n    _ -> fail $ \"Unknown BlockStyle: \" <> T.unpack t\n\ninstance Y.FromJSON MapStyle where\n  parseJSON = Y.withText \"MapStyle\" $ \\t -> case t of\n    \"loop_append\" -> pure LoopAppend\n    \"apply_callback\" -> pure ApplyCallback\n    \"list_comprehension\" -> pure ListComprehension\n    _ -> fail $ \"Unknown MapStyle: \" <> T.unpack t\n\ninstance Y.FromJSON ListStyle where\n  parseJSON = Y.withText \"ListStyle\" $ \\t -> case t of\n    \"bracket\" -> pure BracketList\n    \"function_call\" -> pure FunctionCallList\n    \"type_dependent\" -> pure TypeDependentList\n    _ -> fail $ \"Unknown ListStyle: \" <> T.unpack t\n\ninstance Y.FromJSON PatternStyle where\n  parseJSON = Y.withText \"PatternStyle\" $ \\t -> case t of\n    \"fstring\" -> pure FStringPattern\n    \"concat_call\" -> pure ConcatCall\n    _ -> fail $ \"Unknown PatternStyle: \" <> T.unpack t\n\n-- | Custom FromJSON that injects defaults for optional fields.\ninstance Y.FromJSON LangDescriptor where\n  parseJSON = Y.withObject \"LangDescriptor\" $ \\obj -> do\n    let ins k v = KM.insertWith (\\_ old -> old) (AesonKey.fromText k) v\n        -- Map registry metadata fields to descriptor fields if not already present\n        nameVal = KM.lookup (AesonKey.fromText \"name\") obj\n        extVal = KM.lookup (AesonKey.fromText \"extension\") obj\n        runCmdVal = KM.lookup (AesonKey.fromText \"run_command\") obj\n        isCompiledVal = KM.lookup (AesonKey.fromText \"is_compiled\") obj\n        -- ins keeps old value if key exists, so insert specific overrides first\n        withDefaults =\n          maybe id (ins \"ldName\") nameVal\n            . maybe id (ins \"ldExtension\") extVal\n            . maybe id (ins \"ldRunCommand\") runCmdVal\n            . maybe id (ins \"ldIsCompiled\") isCompiledVal\n            . ins \"ldCodegenCommand\" Y.Null\n            . ins \"ldIntLiteralSuffix\" (Y.String \"\")\n            . ins \"ldIntrinsicPrefix\" (Y.String \"\")\n            . ins \"ldRemoteCallFn\" (Y.String \"\")\n            . ins \"ldDictStyleRecords\" (Y.Bool False)\n            . ins \"ldQuoteRecordKeys\" (Y.Bool True)\n            . ins \"ldQualifiedImports\" (Y.Bool False)\n            . ins \"ldRunCommand\" (Y.Array mempty)\n            . ins \"ldIsCompiled\" (Y.Bool False)\n            -- Template field defaults\n            . ins \"ldAssignOp\" (Y.String \"=\")\n            . ins \"ldLambdaTemplate\" (Y.String \"({{args}}) -> {{body}}\")\n            . ins \"ldDoBlockExpr\" (Y.String \"(() -> {{expr}})\")\n            . ins \"ldDoBlockBlock\" (Y.String \"\")\n            . ins \"ldPartialTemplate\" (Y.String \"({{bound_args}}) -> {{fn}}({{all_args}})\")\n            . ins \"ldImportTemplate\" (Y.String \"\")\n            . ins \"ldSocketPathTemplate\" (Y.String \"\")\n            . ins \"ldResourcePackTemplate\" (Y.String \"[{{mem}}, {{time}}, {{cpus}}, {{gpus}}]\")\n            . ins \"ldReturnTemplate\" (Y.String \"return({{expr}})\")\n            . ins \"ldFuncDefHeader\" (Y.String \"\")\n            . ins \"ldBlockStyle\" (Y.String \"indent\")\n            . ins \"ldBlockEnd\" (Y.String \"\")\n            . ins \"ldErrorWrapOpen\" (Y.String \"\")\n            . ins \"ldErrorWrapClose\" (Y.Array mempty)\n            . ins \"ldPatternStyle\" (Y.String \"fstring\")\n            . ins \"ldConcatFn\" (Y.String \"\")\n            . ins \"ldQuoteTerminator\" (Y.String \"\\\"\")\n            . ins \"ldQuoteTerminatorEsc\" (Y.String \"\\\\\\\"\")\n            . ins \"ldAtomicTypes\" (Y.Array mempty)\n            . ins \"ldAtomicListFn\" (Y.String \"\")\n            . ins \"ldGenericListFn\" (Y.String \"list\")\n            . ins \"ldMapStyle\" (Y.String \"loop_append\")\n            . ins \"ldDispatchLocalHeader\" (Y.String \"\")\n            . ins \"ldDispatchLocalEntry\" (Y.String \"\")\n            . ins \"ldDispatchLocalFooter\" (Y.String \"\")\n            . ins \"ldDispatchRemoteHeader\" (Y.String \"\")\n            . ins \"ldDispatchRemoteEntry\" (Y.String \"\")\n            . ins \"ldDispatchRemoteFooter\" (Y.String \"\")\n            $ obj\n    Aeson.genericParseJSON Aeson.defaultOptions (Y.Object withDefaults)\n\n-- | Load a language descriptor from a YAML file\nloadLangDescriptor :: FilePath -> IO (Either String LangDescriptor)\nloadLangDescriptor path = do\n  result <- Y.decodeFileEither path\n  return $ case result of\n    Left err -> Left $ Y.prettyPrintParseException err\n    Right desc -> Right desc\n\n-- | Load a language descriptor from YAML text\nloadLangDescriptorFromText :: Text -> Either String LangDescriptor\nloadLangDescriptorFromText content =\n  case Y.decodeEither' (TE.encodeUtf8 content) of\n    Left err -> Left $ Y.prettyPrintParseException err\n    Right desc -> Right desc\n\n-- | Default descriptor\ndefaultLangDescriptor :: Text -> String -> LangDescriptor\ndefaultLangDescriptor name ext =\n  LangDescriptor\n    { ldName = name\n    , ldExtension = ext\n    , ldBoolTrue = \"True\"\n    , ldBoolFalse = \"False\"\n    , ldNullLiteral = \"None\"\n    , ldListStyle = BracketList\n    , ldTupleConstructor = \"\"\n    , ldRecordConstructor = \"dict\"\n    , ldRecordSeparator = \"=\"\n    , ldIndexStyle = ZeroBracket\n    , ldKeyAccess = \"bracket\"\n    , ldFieldAccess = DotAccess\n    , ldSerializeFn = \"morloc.put_value\"\n    , ldDeserializeFn = \"morloc.get_value\"\n    , ldIntrinsicPrefix = \"\"\n    , ldForeignCallFn = \"morloc.foreign_call\"\n    , ldForeignCallIntSuffix = \"\"\n    , ldIntLiteralSuffix = \"\"\n    , ldRemoteCallFn = \"\"\n    , ldDictStyleRecords = False\n    , ldQuoteRecordKeys = True\n    , ldQualifiedImports = False\n    , ldIncludeRelToFile = False\n    , ldPoolTemplate = \"\"\n    , ldBreakMarker = \"# <<<BREAK>>>\"\n    , ldCommentMarker = \"#\"\n    , ldRunCommand = []\n    , ldIsCompiled = False\n    , ldCodegenCommand = Nothing\n    , -- Template fields\n      ldAssignOp = \"=\"\n    , ldLambdaTemplate = \"({{args}}) -> {{body}}\"\n    , ldDoBlockExpr = \"(() -> {{expr}})\"\n    , ldDoBlockBlock = \"\"\n    , ldPartialTemplate = \"({{bound_args}}) -> {{fn}}({{all_args}})\"\n    , ldImportTemplate = \"\"\n    , ldSocketPathTemplate = \"\"\n    , ldResourcePackTemplate = \"[{{mem}}, {{time}}, {{cpus}}, {{gpus}}]\"\n    , ldReturnTemplate = \"return({{expr}})\"\n    , ldFuncDefHeader = \"\"\n    , ldBlockStyle = IndentBlock\n    , ldBlockEnd = \"\"\n    , ldErrorWrapOpen = \"\"\n    , ldErrorWrapClose = []\n    , ldPatternStyle = FStringPattern\n    , ldConcatFn = \"\"\n    , ldQuoteTerminator = \"\\\"\"\n    , ldQuoteTerminatorEsc = \"\\\\\\\"\"\n    , ldAtomicTypes = []\n    , ldAtomicListFn = \"\"\n    , ldGenericListFn = \"list\"\n    , ldMapStyle = LoopAppend\n    , ldDispatchLocalHeader = \"\"\n    , ldDispatchLocalEntry = \"\"\n    , ldDispatchLocalFooter = \"\"\n    , ldDispatchRemoteHeader = \"\"\n    , ldDispatchRemoteEntry = \"\"\n    , ldDispatchRemoteFooter = \"\"\n    }\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Namespace.hs",
    "content": "{-# LANGUAGE FlexibleInstances #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE TypeFamilies #-}\n{-# LANGUAGE UndecidableInstances #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Namespace\nDescription : Types and re-exports for the code generation pipeline\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nAggregates core namespace modules and defines code-generator-specific types:\n'SerialManifold' (the serialization-annotated call tree), 'SerialAST'\n(serialization plans), pool/nexus configuration records, and the 'Script'\noutput type. This is the prelude import for all codegen modules.\n-}\nmodule Morloc.CodeGenerator.Namespace\n  ( module Morloc.Namespace.Prim\n  , module Morloc.Namespace.Type\n  , module Morloc.Namespace.Expr\n  , module Morloc.Namespace.State\n\n    -- ** Types used in final translations\n  , TypeM (..)\n  , TypeF (..)\n  , TypeS (..)\n  , FVar (..)\n\n    -- ** Typeclasses\n  , HasTypeF (..)\n  , MayHaveTypeF (..)\n  , HasTypeS (..)\n  , HasTypeM (..)\n  , typeMofRs\n  , typeMofForm\n  , Arg\n  , ArgGeneral (..)\n  , ManifoldForm (..)\n  , HeadManifoldForm (..)\n  , manifoldContext\n  , manifoldBound\n  , ArgTypes (..)\n  , argTypesToTypeM\n\n    -- ** Manifold data types\n  , ExecutableExpressionPool (..)\n  , RemoteForm (..)\n  , PolyHead (..)\n  , PolyExpr (..)\n  , MonoHead (..)\n  , MonoExpr (..)\n  , PoolCall (..)\n  , MFunctor (..)\n  , GateMap (..)\n  , ManifoldMap (..)\n  , NativeManifold (..)\n  , SerialManifold (..)\n  , SerialArg (..)\n  , NativeArg (..)\n  , SerialExpr (..)\n  , NativeExpr (..)\n  -- unrecursive types\n  , FoldManifoldM (..)\n  , SurroundManifoldM (..)\n  , NativeManifold_ (..)\n  , SerialManifold_ (..)\n  , SerialArg_ (..)\n  , NativeArg_ (..)\n  , SerialExpr_ (..)\n  , NativeExpr_ (..)\n  , foldlSM\n  , foldlNM\n  , foldlSE\n  , foldlNE\n  , foldlSA\n  , foldlNA\n\n    -- ** Serialization AST\n  , SerialAST (..)\n  , TypePacker (..)\n\n    -- ** Simple fold over expressions\n  , foldSerialManifoldM\n  , foldNativeManifoldM\n  , foldSerialExprM\n  , foldNativeExprM\n  , foldNativeArgM\n  , foldSerialArgM\n\n    -- ** Contextual fold over expressions\n  , surroundFoldSerialManifoldM\n  , surroundFoldNativeManifoldM\n  , surroundFoldSerialExprM\n  , surroundFoldNativeExprM\n  , surroundFoldNativeArgM\n  , surroundFoldSerialArgM\n\n    -- ** fold withs\n  , FoldWithManifoldM (..)\n  , foldWithSerialManifoldM\n  , foldWithNativeManifoldM\n  , foldWithSerialArgM\n  , foldWithNativeArgM\n  , foldWithSerialExprM\n  , foldWithNativeExprM\n\n    -- ** arg magic\n  , abimapM\n  , abilistM\n  , afirstM\n  , asecondM\n  , abilist\n  , abimap\n  , afirst\n  , asecond\n  , abiappendM\n  , abiappend\n\n    -- ** docstrings\n  , CmdArg (..)\n  , CmdDocSet (..)\n  , RecDocSet (..)\n  , ArgOptDocSet (..)\n  , ArgFlagDocSet (..)\n  , ArgPosDocSet (..)\n\n    -- ** weird baby schemes\n  , MonoidFold (..)\n  , makeMonoidFoldDefault\n  ) where\n\nimport Control.Monad.Identity (runIdentity)\nimport Data.Scientific (Scientific)\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport Morloc.Data.Doc\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport Morloc.Namespace.Type\n\n-- The final types used in code generation. The language annotation is removed,\n-- since the language for all types within a pool are the same.\n--\n-- The general type annotation will be used for documentation only\ndata FVar = FV TVar CVar\n  deriving (Show, Ord, Eq)\n\n-- The most minimal type that contains both general and concrete types\ndata TypeF\n  = UnkF FVar -- this should be parameterized by `Type`, since the general type should be known\n  | VarF FVar\n  | FunF [TypeF] TypeF\n  | AppF TypeF [TypeF]\n  | NamF NamType FVar [TypeF] [(Key, TypeF)]\n  | EffectF (Set.Set EffectLabel) TypeF\n  | OptionalF TypeF\n  | NatLitF Integer\n  deriving (Show, Ord, Eq)\n\ndata TypeM\n  = -- | serialized data that is not deserialized (and may not be representable) in this segment\n    Passthrough\n  | -- | serialized data that may be deserialized in this language\n    Serial TypeF\n  | -- | an unserialized native data type\n    Native TypeF\n  | -- | a function of n inputs and one output (cannot be serialized)\n    Function [TypeM] TypeM\n  deriving (Show, Eq, Ord)\n\n-- | TypeS is a subset of TypeM that does not allow native types\ndata TypeS\n  = PassthroughS\n  | SerialS TypeF\n  | FunctionS [TypeM] TypeS -- This is the type of a manifold\n  deriving (Show, Eq, Ord)\n\n-- | A tree describing how to (de)serialize an object\ndata SerialAST\n  = -- | use an (un)pack function to simplify an object\n    SerialPack FVar (TypePacker, SerialAST)\n  | SerialList FVar SerialAST\n  | -- | Dense N-dimensional tensor. ndim is the rank (1=vector, 2=matrix, etc.)\n    -- and the inner SerialAST is the element type (must be a numeric primitive).\n    SerialTensor FVar Int SerialAST\n  | SerialTuple FVar [SerialAST]\n  | -- | Make a record, table, or object. The parameters indicate\n    --   1) NamType - record/table/object\n    --   2) FVar - telling the name of the object (e.g., \"Person\")\n    --   3) [TypeF] - the types of the parameters (used as parameters in C++ templates, e.g., map<int, map<int,string>>)\n    --   4) [(FVar, SerialAST)] - entries with keys for concrete and general cases\n    SerialObject NamType FVar [TypeF] [(Key, SerialAST)]\n  | SerialReal FVar\n  | SerialFloat32 FVar\n  | SerialFloat64 FVar\n  | SerialInt FVar\n  | SerialInt8 FVar\n  | SerialInt16 FVar\n  | SerialInt32 FVar\n  | SerialInt64 FVar\n  | SerialUInt FVar\n  | SerialUInt8 FVar\n  | SerialUInt16 FVar\n  | SerialUInt32 FVar\n  | SerialUInt64 FVar\n  | SerialBool FVar\n  | SerialString FVar\n  | SerialNull FVar\n  | SerialOptional FVar SerialAST\n  | -- | depending on the language, this may or may not raise an error down the\n    -- line, the parameter contains the variable name, which is useful only for\n    -- source code comments.\n    SerialUnknown FVar\n  deriving (Ord, Eq, Show)\n\ninstance Pretty SerialAST where\n  pretty (SerialPack v (packer, s)) =\n    parens $\n      \"SerialPack\"\n        <+> pretty v\n        <+> braces (vsep [pretty packer, pretty s])\n  pretty (SerialList _ ef) = parens $ \"SerialList\" <+> pretty ef\n  pretty (SerialTensor v ndim s) = parens (\"SerialTensor\" <+> pretty v <+> pretty ndim <+> pretty s)\n  pretty (SerialTuple _ efs) = parens $ \"SerialTuple\" <+> tupled (map pretty efs)\n  pretty (SerialObject o _ vs rs) =\n    parens $\n      \"SerialObject\"\n        <+> pretty o\n        <+> tupled (map pretty vs)\n        <+> encloseSep \"{\" \"}\" \",\" [pretty k <+> \"=\" <+> pretty p | (k, p) <- rs]\n  pretty (SerialReal v) = parens (\"SerialReal\" <+> pretty v)\n  pretty (SerialFloat32 v) = parens (\"SerialFloat32\" <+> pretty v)\n  pretty (SerialFloat64 v) = parens (\"SerialFloat64\" <+> pretty v)\n  pretty (SerialInt v) = parens (\"SerialInt\" <+> pretty v)\n  pretty (SerialInt8 v) = parens (\"SerialInt8\" <+> pretty v)\n  pretty (SerialInt16 v) = parens (\"SerialInt16\" <+> pretty v)\n  pretty (SerialInt32 v) = parens (\"SerialInt32\" <+> pretty v)\n  pretty (SerialInt64 v) = parens (\"SerialInt64\" <+> pretty v)\n  pretty (SerialUInt v) = parens (\"SerialUInt\" <+> pretty v)\n  pretty (SerialUInt8 v) = parens (\"SerialUInt8\" <+> pretty v)\n  pretty (SerialUInt16 v) = parens (\"SerialUInt16\" <+> pretty v)\n  pretty (SerialUInt32 v) = parens (\"SerialUInt32\" <+> pretty v)\n  pretty (SerialUInt64 v) = parens (\"SerialUInt64\" <+> pretty v)\n  pretty (SerialBool v) = parens (\"SerialBool\" <+> pretty v)\n  pretty (SerialString v) = parens (\"SerialString\" <+> pretty v)\n  pretty (SerialNull v) = parens (\"SerialNull\" <+> pretty v)\n  pretty (SerialOptional v s) = parens (\"SerialOptional\" <+> pretty v <+> pretty s)\n  pretty (SerialUnknown v) = parens (\"SerialUnknown\" <+> pretty v)\n\ndata ExecutableExpressionPool\n  = SrcCallP Source -- source code\n  | PatCallP Pattern -- pattern function\n  | LocalCallP Int -- a locally defined function\n  | RecCallP Int (Maybe Lang)\n  -- ^ Recursive call to manifold. Nothing = same pool, Just lang = foreign pool.\n  deriving (Show, Ord, Eq)\n\ninstance Pretty ExecutableExpressionPool where\n  pretty (SrcCallP src) = pretty src\n  pretty (PatCallP pat) = pretty pat\n  pretty (LocalCallP i) = \"x\" <> pretty i\n  pretty (RecCallP i Nothing) = \"rec_m\" <> pretty i\n  pretty (RecCallP i (Just lang)) = \"rec_foreign_m\" <> pretty i <> \"@\" <> pretty lang\n\ndata TypePacker = TypePacker\n  { typePackerPacked :: TypeF\n  , typePackerUnpacked :: TypeF\n  , typePackerForward :: Source\n  , typePackerReverse :: Source\n  }\n  deriving (Show, Ord, Eq)\n\ninstance Pretty TypePacker where\n  pretty p =\n    \"TypePacker\"\n      <+> encloseSep\n        \"{\"\n        \"}\"\n        \",\"\n        [ \"typePackerPacked\" <+> \"=\" <+> pretty (typePackerPacked p)\n        , \"typePackerUnpacked\" <+> \"=\" <+> pretty (typePackerUnpacked p)\n        , \"typePackerForward\" <+> \"=\" <+> pretty (typePackerForward p)\n        , \"typePackerReverse\" <+> \"=\" <+> pretty (typePackerReverse p)\n        ]\n\ntype Arg = ArgGeneral Int\n\ndata ArgGeneral k a = Arg k a\n  deriving (Show, Eq, Ord)\n\ninstance Annotated ArgGeneral where\n  val (Arg _ x) = x\n  ann (Arg i _) = i\n  annotate i x = Arg i x\n\ninstance Functor (ArgGeneral k) where\n  fmap f (Arg i x) = Arg i (f x)\n\ninstance Bifunctor ArgGeneral where\n  bimapM f g (Arg k x) = Arg <$> f k <*> g x\n\ninstance HasTypeM TypeS where\n  typeMof PassthroughS = Passthrough\n  typeMof (SerialS t) = Serial t\n  typeMof (FunctionS ts t) = Function (map typeMof ts) (typeMof t)\n\nclass HasTypeS a where\n  typeSof :: a -> TypeS\n\n{- | Stores arguments to a manifold\n\nExamples:\n\n#1: ManifoldFull [x1 = (runif 0 1), x2 = var \"x\"]\n  source py \"foo.py\" (\"add\", \"runif\")\n  foo x = add (runif 0 1) x\n\nAdd and runif are both fully applied and their arguments are associated with\nexpressions.\n\n#2: for `add`:  ManifoldPass [x1 : \"float\", x2 : \"float\"]\n  source py \"foo.py\" (\"add\")\n  foo xs = zipWith add xs\n\nx1 and x2 are supplied by the source function\n\n#3: ManifoldPart [x1 = (runif 0 1), x2 = var \"x\"] [x2 : \"float\"]\n  source py \"foo.py\" (\"add\", \"runif\")\n  foo xs = map (add (runif 0 1)) xs\n\nadd takes one expression and one bound argument.\n\nThe \"context\" comes first here, it can be partially applied\n-}\ndata ManifoldForm context bound\n  = -- | Unapplied function passed as argument.\n    ManifoldPass [Arg bound]\n  | -- | Fully applied function.\n    ManifoldFull [Arg context]\n  | -- | Partially applied function\n    ManifoldPart [Arg context] [Arg bound]\n  deriving (Show, Eq, Ord)\n\nmanifoldContext :: ManifoldForm a b -> [Arg a]\nmanifoldContext (ManifoldFull xs) = xs\nmanifoldContext (ManifoldPass _) = []\nmanifoldContext (ManifoldPart xs _) = xs\n\nmanifoldBound :: ManifoldForm a b -> [Arg b]\nmanifoldBound (ManifoldFull _) = []\nmanifoldBound (ManifoldPass xs) = xs\nmanifoldBound (ManifoldPart _ ys) = ys\n\ninstance Bifunctor ManifoldForm where\n  bimapM f _ (ManifoldFull xs) = ManifoldFull <$> mapM (\\(Arg i x) -> Arg i <$> f x) xs\n  bimapM _ g (ManifoldPass xs) = ManifoldPass <$> mapM (\\(Arg i x) -> Arg i <$> g x) xs\n  bimapM f g (ManifoldPart xs ys) =\n    ManifoldPart\n      <$> mapM (\\(Arg i x) -> Arg i <$> f x) xs\n      <*> mapM (\\(Arg i x) -> Arg i <$> g x) ys\n\ninstance Bifoldable ManifoldForm where\n  bilistM f _ (ManifoldFull xs) = mapM (f . val) xs\n  bilistM _ g (ManifoldPass xs) = mapM (g . val) xs\n  bilistM f g (ManifoldPart xs ys) = (<>) <$> mapM (f . val) xs <*> mapM (g . val) ys\n\nabimapM ::\n  (Monad m) => (Int -> a -> m a') -> (Int -> b -> m b') -> ManifoldForm a b -> m (ManifoldForm a' b')\nabimapM f _ (ManifoldFull xs) = ManifoldFull <$> mapM (\\t -> annotate (ann t) <$> f (ann t) (val t)) xs\nabimapM _ g (ManifoldPass xs) = ManifoldPass <$> mapM (\\t -> annotate (ann t) <$> g (ann t) (val t)) xs\nabimapM f g (ManifoldPart xs ys) =\n  ManifoldPart\n    <$> mapM (\\t -> annotate (ann t) <$> f (ann t) (val t)) xs\n    <*> mapM (\\t -> annotate (ann t) <$> g (ann t) (val t)) ys\n\nafirstM :: (Monad m) => (Int -> a -> m a') -> ManifoldForm a b -> m (ManifoldForm a' b)\nafirstM f = abimapM f (return2 seq)\n\nasecondM :: (Monad m) => (Int -> b -> m b') -> ManifoldForm a b -> m (ManifoldForm a b')\nasecondM = abimapM (return2 seq)\n\nabimap :: (Int -> a -> a') -> (Int -> b -> b') -> ManifoldForm a b -> ManifoldForm a' b'\nabimap f g = runIdentity . abimapM (return2 f) (return2 g)\n\nafirst :: (Int -> a -> a') -> ManifoldForm a b -> ManifoldForm a' b\nafirst f = runIdentity . afirstM (return2 f)\n\nasecond :: (Int -> b -> b') -> ManifoldForm a b -> ManifoldForm a b'\nasecond f = runIdentity . asecondM (return2 f)\n\nabilistM :: (Monad m) => (Int -> a -> m c) -> (Int -> b -> m c) -> ManifoldForm a b -> m [c]\nabilistM f _ (ManifoldFull xs) = mapM (annappM f) xs\nabilistM _ g (ManifoldPass xs) = mapM (annappM g) xs\nabilistM f g (ManifoldPart xs ys) = (<>) <$> mapM (annappM f) xs <*> mapM (annappM g) ys\n\nabilist :: (Int -> a -> c) -> (Int -> b -> c) -> ManifoldForm a b -> [c]\nabilist f g = runIdentity . abilistM (return2 f) (return2 g)\n\nabiappendM ::\n  (Monad m, Monoid c) => (Int -> a -> m c) -> (Int -> b -> m c) -> ManifoldForm a b -> m c\nabiappendM f g = fmap mconcat . abilistM f g\n\nabiappend :: (Monoid c) => (Int -> a -> c) -> (Int -> b -> c) -> ManifoldForm a b -> c\nabiappend f g = runIdentity . abiappendM (return2 f) (return2 g)\n\ninstance Pretty FVar where\n  pretty (FV _ c) = pretty c\n\ndata RemoteForm = ForeignCall | RemoteCall RemoteResources\n  deriving (Show, Eq)\n\ndata HeadManifoldForm\n  = HeadManifoldFormLocalRoot\n  | HeadManifoldFormRemoteWorker\n  | HeadManifoldFormLocalForeign\n  deriving (Show, Eq)\n\ndata PolyHead = PolyHead Lang Int [Arg None] PolyExpr\n\n-- no serialization and no argument types\ndata PolyExpr\n  = -- organizational terms that may have undefined types\n    PolyManifold Lang Int (ManifoldForm None (Maybe Type)) PolyExpr\n  | PolyRemoteInterface\n      Lang -- foreign language\n      (Indexed Type) -- return type in calling language\n      [Int] -- argument ids\n      RemoteForm\n      PolyExpr -- foreign expression\n  | PolyLet Int PolyExpr PolyExpr\n  | PolyReturn PolyExpr\n  | PolyApp PolyExpr [PolyExpr]\n  | -- variables in the original tree will all be typed\n    -- but I also may need to generate passthrough terms\n    PolyBndVar\n      ( Three\n          Lang -- no type information is known\n          Type -- the general type is known, but this is a passing variable without an locally identifiable concrete index\n          (Indexed Type)\n      )\n      Int\n  | -- The Let variables are generated only in partialExpress, where the type is known\n    PolyLetVar (Indexed Type) Int\n  | -- terms that map 1:1 versus SAnno; have defined types in one language\n    PolyExe (Indexed Type) ExecutableExpressionPool\n  | -- data types\n    PolyList (Indexed TVar) (Indexed Type) [PolyExpr]\n  | PolyTuple (Indexed TVar) [(Indexed Type, PolyExpr)]\n  | PolyRecord NamType (Indexed TVar) [Indexed Type] [(Key, (Indexed Type, PolyExpr))]\n  | PolyLog (Indexed TVar) Bool\n  | PolyReal (Indexed TVar) Scientific\n  | PolyInt (Indexed TVar) Integer\n  | PolyStr (Indexed TVar) Text\n  | PolyNull (Indexed TVar)\n  | PolyDoBlock (Indexed Type) PolyExpr\n  | PolyEval (Indexed Type) PolyExpr\n  | PolyCoerce Coercion (Indexed Type) PolyExpr\n  | PolyIf PolyExpr PolyExpr PolyExpr\n  | PolyIntrinsic (Indexed Type) Intrinsic [PolyExpr]\n\ndata MonoHead = MonoHead Lang Int [Arg None] HeadManifoldForm MonoExpr\n\ndata MonoExpr\n  = -- organizational terms that may have undefined types\n    MonoManifold Int (ManifoldForm None (Maybe Type)) MonoExpr\n  | MonoPoolCall\n      (Indexed Type) -- return type in calling language\n      Int -- foreign manifold id\n      Socket -- shell command components that preceed the passed data\n      RemoteForm\n      [Arg None] -- arguments\n  | MonoLet Int MonoExpr MonoExpr\n  | MonoLetVar (Indexed Type) Int\n  | MonoReturn MonoExpr\n  | MonoApp MonoExpr [MonoExpr]\n  | -- terms that map 1:1 versus SAnno; have defined types in one language\n    MonoExe (Indexed Type) ExecutableExpressionPool\n  | MonoBndVar (Three None Type (Indexed Type)) Int -- (Three Lang Type (Indexed Type)) Int  -- (Maybe (Indexed Type))\n  -- data types\n  | MonoRecord NamType (Indexed TVar) [Indexed Type] [(Key, (Indexed Type, MonoExpr))]\n  | MonoList (Indexed TVar) (Indexed Type) [MonoExpr]\n  | MonoTuple (Indexed TVar) [(Indexed Type, MonoExpr)]\n  | MonoLog (Indexed TVar) Bool\n  | MonoReal (Indexed TVar) Scientific\n  | MonoInt (Indexed TVar) Integer\n  | MonoStr (Indexed TVar) Text\n  | MonoNull (Indexed TVar)\n  | MonoDoBlock (Indexed Type) MonoExpr\n  | MonoEval (Indexed Type) MonoExpr\n  | MonoCoerce Coercion (Indexed Type) MonoExpr\n  | MonoIf MonoExpr MonoExpr MonoExpr\n  | MonoIntrinsic (Indexed Type) Intrinsic [MonoExpr]\n\ndata PoolCall\n  = PoolCall\n      Int -- foreign manifold id\n      Socket\n      RemoteForm\n      [Arg TypeM] -- contextual argument that are passed to the foreign function\n      -- (not the main arguments to the foreign function)\n  deriving (Show)\n\n{- | Represents a single data value that may be passed as an argument through a\npool. It may be serialized, native, or both. If it is serialized only, then\nit may be a passthrough type, in whch case its type is not known.\n-}\ndata ArgTypes\n  = SerialOnly TypeS\n  | NativeOnly TypeF\n  | SerialAndNative TypeF\n  deriving (Show)\n\ndata NativeManifold = NativeManifold Int Lang (ManifoldForm (Or TypeS TypeF) TypeF) NativeExpr\n  deriving (Show)\n\ndata SerialManifold\n  = SerialManifold Int Lang (ManifoldForm (Or TypeS TypeF) TypeS) HeadManifoldForm SerialExpr\n  deriving (Show)\n\ndata SerialArg = SerialArgManifold SerialManifold | SerialArgExpr SerialExpr\n  deriving (Show)\n\ndata NativeArg = NativeArgManifold NativeManifold | NativeArgExpr NativeExpr\n  deriving (Show)\n\ndata SerialExpr\n  = ManS SerialManifold\n  | AppPoolS TypeF PoolCall [SerialArg]\n  | AppRecS TypeF Int [SerialExpr]\n    -- ^ Same-language recursive call: return type, manifold ID, serialized args\n  | AppForeignRecS TypeF Int Socket [SerialExpr]\n    -- ^ Cross-language recursive call: return type, manifold ID, socket, serialized args\n  | ReturnS SerialExpr\n  | SerialLetS Int SerialExpr SerialExpr\n  | NativeLetS Int NativeExpr SerialExpr\n  | LetVarS (Maybe TypeF) Int\n  | BndVarS (Maybe TypeF) Int\n  | SerializeS SerialAST NativeExpr\n  deriving (Show)\n\ndata NativeExpr\n  = ManN NativeManifold\n  | AppExeN TypeF ExecutableExpressionPool [NativeArg]\n  | ReturnN NativeExpr\n  | SerialLetN Int SerialExpr NativeExpr\n  | NativeLetN Int NativeExpr NativeExpr\n  | LetVarN TypeF Int\n  | BndVarN TypeF Int\n  | DeserializeN TypeF SerialAST SerialExpr\n  | ExeN TypeF ExecutableExpressionPool\n  | -- data types\n    ListN FVar TypeF [NativeExpr]\n  | TupleN FVar [NativeExpr]\n  | RecordN NamType FVar [TypeF] [(Key, NativeExpr)]\n  | LogN FVar Bool\n  | RealN FVar Scientific\n  | IntN FVar Integer\n  | StrN FVar Text\n  | NullN FVar\n  | DoBlockN TypeF NativeExpr\n  | EvalN TypeF NativeExpr\n  | CoerceN Coercion TypeF NativeExpr\n  | IfN TypeF NativeExpr NativeExpr NativeExpr\n  | IntrinsicN TypeF Intrinsic (Maybe Text) [NativeExpr]\n  -- ^ The Maybe Text is the precomputed msgpack schema string for the data arg\n  -- (Nothing for compile-time intrinsics that are resolved by Reduce)\n  deriving (Show)\n\nfoldlSM :: (b -> a -> b) -> b -> SerialManifold_ a -> b\nfoldlSM f b (SerialManifold_ _ _ _ _ se) = f b se\n\nfoldlNM :: (b -> a -> b) -> b -> NativeManifold_ a -> b\nfoldlNM f b (NativeManifold_ _ _ _ ne) = f b ne\n\nfoldlSA :: (b -> a -> b) -> b -> SerialArg_ a a -> b\nfoldlSA f b (SerialArgManifold_ sm) = f b sm\nfoldlSA f b (SerialArgExpr_ se) = f b se\n\nfoldlNA :: (b -> a -> b) -> b -> NativeArg_ a a -> b\nfoldlNA f b (NativeArgManifold_ nm) = f b nm\nfoldlNA f b (NativeArgExpr_ ne) = f b ne\n\nfoldlSE :: (b -> a -> b) -> b -> SerialExpr_ a a a a a -> b\nfoldlSE f b (ManS_ x) = f b x\nfoldlSE f b (AppPoolS_ _ _ xs) = foldl f b xs\nfoldlSE f b (AppRecS_ _ _ xs) = foldl f b xs\nfoldlSE f b (AppForeignRecS_ _ _ _ xs) = foldl f b xs\nfoldlSE f b (ReturnS_ x) = f b x\nfoldlSE f b (SerialLetS_ _ x1 x2) = foldl f b [x1, x2]\nfoldlSE f b (NativeLetS_ _ x1 x2) = foldl f b [x1, x2]\nfoldlSE _ b (LetVarS_ _ _) = b\nfoldlSE _ b (BndVarS_ _ _) = b\nfoldlSE f b (SerializeS_ _ x) = f b x\n\nfoldlNE :: (b -> a -> b) -> b -> NativeExpr_ a a a a a -> b\nfoldlNE f b (AppExeN_ _ _ xs) = foldl f b xs\nfoldlNE f b (ManN_ x) = f b x\nfoldlNE f b (ReturnN_ x) = f b x\nfoldlNE f b (SerialLetN_ _ x1 x2) = foldl f b [x1, x2]\nfoldlNE f b (NativeLetN_ _ x1 x2) = foldl f b [x1, x2]\nfoldlNE _ b (LetVarN_ _ _) = b\nfoldlNE _ b (BndVarN_ _ _) = b\nfoldlNE f b (DeserializeN_ _ _ x) = f b x\nfoldlNE _ b (ExeN_ _ _) = b\nfoldlNE f b (ListN_ _ _ xs) = foldl f b xs\nfoldlNE f b (TupleN_ _ xs) = foldl f b xs\nfoldlNE f b (RecordN_ _ _ _ rs) = foldl (\\b' (_, a') -> f b' a') b rs\nfoldlNE _ b (LogN_ _ _) = b\nfoldlNE _ b (RealN_ _ _) = b\nfoldlNE _ b (IntN_ _ _) = b\nfoldlNE _ b (StrN_ _ _) = b\nfoldlNE _ b (NullN_ _) = b\nfoldlNE f b (DoBlockN_ _ x) = f b x\nfoldlNE f b (EvalN_ _ x) = f b x\nfoldlNE f b (CoerceN_ _ _ x) = f b x\nfoldlNE f b (IfN_ _ c t e) = foldl f b [c, t, e]\nfoldlNE f b (IntrinsicN_ _ _ _ xs) = foldl f b xs\n\ndata MonoidFold m a = MonoidFold\n  { monoidSerialManifold :: SerialManifold_ (a, SerialExpr) -> m (a, SerialManifold)\n  , monoidNativeManifold :: NativeManifold_ (a, NativeExpr) -> m (a, NativeManifold)\n  , monoidSerialArg :: SerialArg_ (a, SerialManifold) (a, SerialExpr) -> m (a, SerialArg)\n  , monoidNativeArg :: NativeArg_ (a, NativeManifold) (a, NativeExpr) -> m (a, NativeArg)\n  , monoidSerialExpr ::\n      SerialExpr_ (a, SerialManifold) (a, SerialExpr) (a, NativeExpr) (a, SerialArg) (a, NativeArg) ->\n      m (a, SerialExpr)\n  , monoidNativeExpr ::\n      NativeExpr_ (a, NativeManifold) (a, SerialExpr) (a, NativeExpr) (a, SerialArg) (a, NativeArg) ->\n      m (a, NativeExpr)\n  }\n\nmakeMonoidFoldDefault :: (Monad m) => a -> (a -> a -> a) -> MonoidFold m a\nmakeMonoidFoldDefault mempty' mappend' =\n  MonoidFold\n    { monoidSerialManifold = monoidSerialManifold'\n    , monoidNativeManifold = monoidNativeManifold'\n    , monoidSerialArg = monoidSerialArg'\n    , monoidNativeArg = monoidNativeArg'\n    , monoidSerialExpr = monoidSerialExpr'\n    , monoidNativeExpr = monoidNativeExpr'\n    }\n  where\n    monoidSerialManifold' (SerialManifold_ m lang form headForm (req, ne)) = do\n      return (req, SerialManifold m lang form headForm ne)\n\n    monoidNativeManifold' (NativeManifold_ m lang form (req, ne)) = do\n      return (req, NativeManifold m lang form ne)\n\n    monoidSerialArg' (SerialArgManifold_ (req, sm)) = return (req, SerialArgManifold sm)\n    monoidSerialArg' (SerialArgExpr_ (req, se)) = return (req, SerialArgExpr se)\n\n    monoidNativeArg' (NativeArgManifold_ (req, nm)) = return (req, NativeArgManifold nm)\n    monoidNativeArg' (NativeArgExpr_ (req, ne)) = return (req, NativeArgExpr ne)\n\n    monoidSerialExpr' (ManS_ (req, sm)) = return (req, ManS sm)\n    monoidSerialExpr' (AppPoolS_ t p (unzip -> (reqs, es))) = return (foldl mappend' mempty' reqs, AppPoolS t p es)\n    monoidSerialExpr' (AppRecS_ t m (unzip -> (reqs, es))) = return (foldl mappend' mempty' reqs, AppRecS t m es)\n    monoidSerialExpr' (AppForeignRecS_ t m s (unzip -> (reqs, es))) = return (foldl mappend' mempty' reqs, AppForeignRecS t m s es)\n    monoidSerialExpr' (ReturnS_ (req, se)) = return (req, ReturnS se)\n    monoidSerialExpr' (SerialLetS_ i (req1, se1) (req2, se2)) = return (mappend' req1 req2, SerialLetS i se1 se2)\n    monoidSerialExpr' (NativeLetS_ i (req1, ne) (req2, se)) = return (mappend' req1 req2, NativeLetS i ne se)\n    monoidSerialExpr' (LetVarS_ mayT i) = return (mempty', LetVarS mayT i)\n    monoidSerialExpr' (BndVarS_ mayT i) = return (mempty', BndVarS mayT i)\n    monoidSerialExpr' (SerializeS_ s (req, ne)) = return (req, SerializeS s ne)\n\n    monoidNativeExpr' (ManN_ (req, nm)) = return (req, ManN nm)\n    monoidNativeExpr' (AppExeN_ t exe (unzip -> (reqs, es))) = return (foldl mappend' mempty' reqs, AppExeN t exe es)\n    monoidNativeExpr' (ReturnN_ (req, ne)) = return (req, ReturnN ne)\n    monoidNativeExpr' (SerialLetN_ i (req1, se) (req2, ne)) = return (mappend' req1 req2, SerialLetN i se ne)\n    monoidNativeExpr' (NativeLetN_ i (req1, ne1) (req2, ne2)) = return (mappend' req1 req2, NativeLetN i ne1 ne2)\n    monoidNativeExpr' (LetVarN_ t i) = return (mempty', LetVarN t i)\n    monoidNativeExpr' (BndVarN_ t i) = return (mempty', BndVarN t i)\n    monoidNativeExpr' (DeserializeN_ t s (req, e)) = return (req, DeserializeN t s e)\n    monoidNativeExpr' (ExeN_ t exe) = return (mempty', ExeN t exe)\n    monoidNativeExpr' (ListN_ v t xs) = return (foldl mappend' mempty' (map fst xs), ListN v t (map snd xs))\n    monoidNativeExpr' (TupleN_ v xs) = return (foldl mappend' mempty' (map fst xs), TupleN v $ map snd xs)\n    monoidNativeExpr' (RecordN_ o v ps rs) =\n      return\n        ( foldl mappend' mempty' $ map (fst . snd) rs\n        , RecordN o v ps (map (second snd) rs)\n        )\n    monoidNativeExpr' (LogN_ v x) = return (mempty', LogN v x)\n    monoidNativeExpr' (RealN_ v x) = return (mempty', RealN v x)\n    monoidNativeExpr' (IntN_ v x) = return (mempty', IntN v x)\n    monoidNativeExpr' (StrN_ v x) = return (mempty', StrN v x)\n    monoidNativeExpr' (NullN_ v) = return (mempty', NullN v)\n    monoidNativeExpr' (DoBlockN_ t (a, ne)) = return (a, DoBlockN t ne)\n    monoidNativeExpr' (EvalN_ t (a, ne)) = return (a, EvalN t ne)\n    monoidNativeExpr' (CoerceN_ c t (a, ne)) = return (a, CoerceN c t ne)\n    monoidNativeExpr' (IfN_ t (a1, c) (a2, thenE) (a3, elseE)) =\n      return (foldl mappend' mempty' [a1, a2, a3], IfN t c thenE elseE)\n    monoidNativeExpr' (IntrinsicN_ t intr msch (unzip -> (reqs, es))) =\n      return (foldl mappend' mempty' reqs, IntrinsicN t intr msch es)\n\n-- where\n--  * m - monad\n--  * sm - SerialManifold folded type\n--  * nm - NativeManifold\n--  * se - SerialExpr\n--  * ne - NativeExpr\n--  * sr - SerialArg\n--  * nr - NativeArg\ndata FoldManifoldM m sm nm se ne sr nr = FoldManifoldM\n  { opSerialManifoldM :: SerialManifold_ se -> m sm\n  , opNativeManifoldM :: NativeManifold_ ne -> m nm\n  , opSerialExprM :: SerialExpr_ sm se ne sr nr -> m se\n  , opNativeExprM :: NativeExpr_ nm se ne sr nr -> m ne\n  , opSerialArgM :: SerialArg_ sm se -> m sr\n  , opNativeArgM :: NativeArg_ nm ne -> m nr\n  }\n\ninstance (Monoid a, Monad m, a ~ b, a ~ c, a ~ d, a ~ e, a ~ f) => Defaultable (FoldManifoldM m a b c d e f) where\n  defaultValue =\n    FoldManifoldM\n      { opSerialManifoldM = return . foldlSM mappend mempty\n      , opNativeManifoldM = return . foldlNM mappend mempty\n      , opSerialExprM = return . foldlSE mappend mempty\n      , opNativeExprM = return . foldlNE mappend mempty\n      , opSerialArgM = return . foldlSA mappend mempty\n      , opNativeArgM = return . foldlNA mappend mempty\n      }\n\ninstance\n  (Monoid a, Monad m, a ~ b, a ~ c, a ~ d, a ~ e, a ~ f) =>\n  Defaultable (FoldWithManifoldM m a b c d e f)\n  where\n  defaultValue =\n    FoldWithManifoldM\n      { opFoldWithSerialManifoldM = \\_ e -> return . foldlSM mappend mempty $ e\n      , opFoldWithNativeManifoldM = \\_ e -> return . foldlNM mappend mempty $ e\n      , opFoldWithSerialExprM = \\_ e -> return . foldlSE mappend mempty $ e\n      , opFoldWithNativeExprM = \\_ e -> return . foldlNE mappend mempty $ e\n      , opFoldWithSerialArgM = \\_ e -> return . foldlSA mappend mempty $ e\n      , opFoldWithNativeArgM = \\_ e -> return . foldlNA mappend mempty $ e\n      }\n\ndata FoldWithManifoldM m sm nm se ne sr nr = FoldWithManifoldM\n  { opFoldWithSerialManifoldM :: SerialManifold -> SerialManifold_ se -> m sm\n  , opFoldWithNativeManifoldM :: NativeManifold -> NativeManifold_ ne -> m nm\n  , opFoldWithSerialExprM :: SerialExpr -> SerialExpr_ sm se ne sr nr -> m se\n  , opFoldWithNativeExprM :: NativeExpr -> NativeExpr_ nm se ne sr nr -> m ne\n  , opFoldWithSerialArgM :: SerialArg -> SerialArg_ sm se -> m sr\n  , opFoldWithNativeArgM :: NativeArg -> NativeArg_ nm ne -> m nr\n  }\n\ndata SurroundManifoldM m sm nm se ne sr nr = SurroundManifoldM\n  { surroundSerialManifoldM :: (SerialManifold -> m sm) -> SerialManifold -> m sm\n  , surroundNativeManifoldM :: (NativeManifold -> m nm) -> NativeManifold -> m nm\n  , surroundSerialExprM :: (SerialExpr -> m se) -> SerialExpr -> m se\n  , surroundNativeExprM :: (NativeExpr -> m ne) -> NativeExpr -> m ne\n  , surroundSerialArgM :: (SerialArg -> m sr) -> SerialArg -> m sr\n  , surroundNativeArgM :: (NativeArg -> m nr) -> NativeArg -> m nr\n  }\n\ninstance Defaultable (SurroundManifoldM m sm nm se ne sr nr) where\n  defaultValue =\n    SurroundManifoldM\n      { surroundSerialManifoldM = \\f x -> f x\n      , surroundNativeManifoldM = \\f x -> f x\n      , surroundSerialExprM = \\f x -> f x\n      , surroundNativeExprM = \\f x -> f x\n      , surroundSerialArgM = \\f x -> f x\n      , surroundNativeArgM = \\f x -> f x\n      }\n\ninstance (HasTypeF a) => HasTypeM (Maybe a) where\n  typeMof (Just x) = Serial (typeFof x)\n  typeMof Nothing = Passthrough\n\nclass MayHaveTypeF a where\n  mayHaveTypeF :: a -> Maybe TypeF\n\ninstance MayHaveTypeF TypeF where\n  mayHaveTypeF = Just\n\ninstance MayHaveTypeF TypeS where\n  mayHaveTypeF PassthroughS = Nothing\n  mayHaveTypeF (SerialS t) = Just t\n  mayHaveTypeF (FunctionS ts t) = FunF <$> mapM mayHaveTypeF ts <*> mayHaveTypeF t\n\ninstance MayHaveTypeF TypeM where\n  mayHaveTypeF Passthrough = Nothing\n  mayHaveTypeF (Serial t) = Just t\n  mayHaveTypeF (Native t) = Just t\n  mayHaveTypeF (Function ts t) = FunF <$> mapM mayHaveTypeF ts <*> mayHaveTypeF t\n\ndata NativeManifold_ ne = NativeManifold_ Int Lang (ManifoldForm (Or TypeS TypeF) TypeF) ne\ndata SerialManifold_ se\n  = SerialManifold_ Int Lang (ManifoldForm (Or TypeS TypeF) TypeS) HeadManifoldForm se\ndata SerialArg_ sm se = SerialArgManifold_ sm | SerialArgExpr_ se\ndata NativeArg_ nm ne = NativeArgManifold_ nm | NativeArgExpr_ ne\n\ntypeMofRs :: [Arg (Or TypeS TypeF)] -> [Arg TypeM]\ntypeMofRs rs = concat [[Arg i t | t <- bilist typeMof typeMof orT] | (Arg i orT) <- rs]\n\ntypeMofForm :: (HasTypeM t) => ManifoldForm (Or TypeS TypeF) t -> [Arg TypeM]\ntypeMofForm =\n  concat . abilist (\\i r -> [Arg i t | t <- bilist typeMof typeMof r]) (\\i r -> [Arg i (typeMof r)])\n\ndata SerialExpr_ sm se ne sr nr\n  = ManS_ sm\n  | AppPoolS_ TypeF PoolCall [sr]\n  | AppRecS_ TypeF Int [se]\n  | AppForeignRecS_ TypeF Int Socket [se]\n  | ReturnS_ se\n  | SerialLetS_ Int se se\n  | NativeLetS_ Int ne se\n  | LetVarS_ (Maybe TypeF) Int\n  | BndVarS_ (Maybe TypeF) Int\n  | SerializeS_ SerialAST ne\n\ndata NativeExpr_ nm se ne sr nr\n  = AppExeN_ TypeF ExecutableExpressionPool [nr]\n  | ManN_ nm\n  | ReturnN_ ne\n  | SerialLetN_ Int se ne\n  | NativeLetN_ Int ne ne\n  | LetVarN_ TypeF Int\n  | BndVarN_ TypeF Int\n  | DeserializeN_ TypeF SerialAST se\n  | ExeN_ TypeF ExecutableExpressionPool\n  | -- data types\n    ListN_ FVar TypeF [ne]\n  | TupleN_ FVar [ne]\n  | RecordN_ NamType FVar [TypeF] [(Key, ne)]\n  | LogN_ FVar Bool\n  | RealN_ FVar Scientific\n  | IntN_ FVar Integer\n  | StrN_ FVar Text\n  | NullN_ FVar\n  | DoBlockN_ TypeF ne\n  | EvalN_ TypeF ne\n  | CoerceN_ Coercion TypeF ne\n  | IfN_ TypeF ne ne ne\n  | IntrinsicN_ TypeF Intrinsic (Maybe Text) [ne]\n\nmanifoldFoldToFoldWith :: FoldManifoldM m sm nm se ne sr nr -> FoldWithManifoldM m sm nm se ne sr nr\nmanifoldFoldToFoldWith fm =\n  FoldWithManifoldM\n    { opFoldWithSerialManifoldM = \\_ e -> opSerialManifoldM fm e\n    , opFoldWithNativeManifoldM = \\_ e -> opNativeManifoldM fm e\n    , opFoldWithSerialExprM = \\_ e -> opSerialExprM fm e\n    , opFoldWithNativeExprM = \\_ e -> opNativeExprM fm e\n    , opFoldWithSerialArgM = \\_ e -> opSerialArgM fm e\n    , opFoldWithNativeArgM = \\_ e -> opNativeArgM fm e\n    }\n\nfoldSerialManifoldM :: (Monad m) => FoldManifoldM m sm nm se ne sr nr -> SerialManifold -> m sm\nfoldSerialManifoldM = surroundFoldSerialManifoldM defaultValue . manifoldFoldToFoldWith\n\nfoldNativeManifoldM :: (Monad m) => FoldManifoldM m sm nm se ne sr nr -> NativeManifold -> m nm\nfoldNativeManifoldM = surroundFoldNativeManifoldM defaultValue . manifoldFoldToFoldWith\n\nfoldSerialArgM :: (Monad m) => FoldManifoldM m sm nm se ne sr nr -> SerialArg -> m sr\nfoldSerialArgM = surroundFoldSerialArgM defaultValue . manifoldFoldToFoldWith\n\nfoldNativeArgM :: (Monad m) => FoldManifoldM m sm nm se ne sr nr -> NativeArg -> m nr\nfoldNativeArgM = surroundFoldNativeArgM defaultValue . manifoldFoldToFoldWith\n\nfoldSerialExprM :: (Monad m) => FoldManifoldM m sm nm se ne sr nr -> SerialExpr -> m se\nfoldSerialExprM = surroundFoldSerialExprM defaultValue . manifoldFoldToFoldWith\n\nfoldNativeExprM :: (Monad m) => FoldManifoldM m sm nm se ne sr nr -> NativeExpr -> m ne\nfoldNativeExprM = surroundFoldNativeExprM defaultValue . manifoldFoldToFoldWith\n\nfoldWithSerialManifoldM ::\n  (Monad m) => FoldWithManifoldM m sm nm se ne sr nr -> SerialManifold -> m sm\nfoldWithSerialManifoldM = surroundFoldSerialManifoldM defaultValue\n\nfoldWithNativeManifoldM ::\n  (Monad m) => FoldWithManifoldM m sm nm se ne sr nr -> NativeManifold -> m nm\nfoldWithNativeManifoldM = surroundFoldNativeManifoldM defaultValue\n\nfoldWithSerialArgM :: (Monad m) => FoldWithManifoldM m sm nm se ne sr nr -> SerialArg -> m sr\nfoldWithSerialArgM = surroundFoldSerialArgM defaultValue\n\nfoldWithNativeArgM :: (Monad m) => FoldWithManifoldM m sm nm se ne sr nr -> NativeArg -> m nr\nfoldWithNativeArgM = surroundFoldNativeArgM defaultValue\n\nfoldWithSerialExprM :: (Monad m) => FoldWithManifoldM m sm nm se ne sr nr -> SerialExpr -> m se\nfoldWithSerialExprM = surroundFoldSerialExprM defaultValue\n\nfoldWithNativeExprM :: (Monad m) => FoldWithManifoldM m sm nm se ne sr nr -> NativeExpr -> m ne\nfoldWithNativeExprM = surroundFoldNativeExprM defaultValue\n\nsurroundFoldSerialManifoldM ::\n  (Monad m) =>\n  SurroundManifoldM m sm nm se ne sr nr ->\n  FoldWithManifoldM m sm nm se ne sr nr ->\n  SerialManifold ->\n  m sm\nsurroundFoldSerialManifoldM sfm fm = surroundSerialManifoldM sfm f\n  where\n    f full@(SerialManifold m lang form headForm e) = do\n      e' <- surroundFoldSerialExprM sfm fm e\n      opFoldWithSerialManifoldM fm full $ SerialManifold_ m lang form headForm e'\n\nsurroundFoldNativeManifoldM ::\n  (Monad m) =>\n  SurroundManifoldM m sm nm se ne sr nr ->\n  FoldWithManifoldM m sm nm se ne sr nr ->\n  NativeManifold ->\n  m nm\nsurroundFoldNativeManifoldM sfm fm = surroundNativeManifoldM sfm f\n  where\n    f full@(NativeManifold m lang form e) = do\n      e' <- surroundFoldNativeExprM sfm fm e\n      opFoldWithNativeManifoldM fm full $ NativeManifold_ m lang form e'\n\nsurroundFoldSerialArgM ::\n  (Monad m) =>\n  SurroundManifoldM m sm nm se ne sr nr ->\n  FoldWithManifoldM m sm nm se ne sr nr ->\n  SerialArg ->\n  m sr\nsurroundFoldSerialArgM sfm fm = surroundSerialArgM sfm f\n  where\n    f full@(SerialArgManifold sm) = do\n      sm' <- surroundFoldSerialManifoldM sfm fm sm\n      opFoldWithSerialArgM fm full $ SerialArgManifold_ sm'\n    f full@(SerialArgExpr se) = do\n      se' <- surroundFoldSerialExprM sfm fm se\n      opFoldWithSerialArgM fm full $ SerialArgExpr_ se'\n\nsurroundFoldNativeArgM ::\n  (Monad m) =>\n  SurroundManifoldM m sm nm se ne sr nr ->\n  FoldWithManifoldM m sm nm se ne sr nr ->\n  NativeArg ->\n  m nr\nsurroundFoldNativeArgM sfm fm = surroundNativeArgM sfm f\n  where\n    f full@(NativeArgManifold nm) = do\n      nm' <- surroundFoldNativeManifoldM sfm fm nm\n      opFoldWithNativeArgM fm full $ NativeArgManifold_ nm'\n    f full@(NativeArgExpr ne) = do\n      ne' <- surroundFoldNativeExprM sfm fm ne\n      opFoldWithNativeArgM fm full $ NativeArgExpr_ ne'\n\nsurroundFoldSerialExprM ::\n  (Monad m) =>\n  SurroundManifoldM m sm nm se ne sr nr ->\n  FoldWithManifoldM m sm nm se ne sr nr ->\n  SerialExpr ->\n  m se\nsurroundFoldSerialExprM sfm fm = surroundSerialExprM sfm f\n  where\n    f full@(ManS e) = do\n      e' <- surroundFoldSerialManifoldM sfm fm e\n      opFoldWithSerialExprM fm full $ ManS_ e'\n    f full@(AppPoolS t pool es) = do\n      es' <- mapM (surroundFoldSerialArgM sfm fm) es\n      opFoldWithSerialExprM fm full $ AppPoolS_ t pool es'\n    f full@(AppRecS t m es) = do\n      es' <- mapM (surroundFoldSerialExprM sfm fm) es\n      opFoldWithSerialExprM fm full $ AppRecS_ t m es'\n    f full@(AppForeignRecS t m s es) = do\n      es' <- mapM (surroundFoldSerialExprM sfm fm) es\n      opFoldWithSerialExprM fm full $ AppForeignRecS_ t m s es'\n    f full@(ReturnS e) = do\n      e' <- surroundFoldSerialExprM sfm fm e\n      opFoldWithSerialExprM fm full $ ReturnS_ e'\n    f full@(SerialLetS i sa sb) = do\n      sa' <- surroundFoldSerialExprM sfm fm sa\n      sb' <- surroundFoldSerialExprM sfm fm sb\n      opFoldWithSerialExprM fm full $ SerialLetS_ i sa' sb'\n    f full@(NativeLetS i na sb) = do\n      sa' <- surroundFoldNativeExprM sfm fm na\n      nb' <- surroundFoldSerialExprM sfm fm sb\n      opFoldWithSerialExprM fm full $ NativeLetS_ i sa' nb'\n    f full@(LetVarS t i) = opFoldWithSerialExprM fm full (LetVarS_ t i)\n    f full@(BndVarS t i) = opFoldWithSerialExprM fm full (BndVarS_ t i)\n    f full@(SerializeS s e) = do\n      e' <- surroundFoldNativeExprM sfm fm e\n      opFoldWithSerialExprM fm full $ SerializeS_ s e'\n\nsurroundFoldNativeExprM ::\n  (Monad m) =>\n  SurroundManifoldM m sm nm se ne sr nr ->\n  FoldWithManifoldM m sm nm se ne sr nr ->\n  NativeExpr ->\n  m ne\nsurroundFoldNativeExprM sfm fm = surroundNativeExprM sfm f\n  where\n    f full@(AppExeN t exe nativeArgs) = do\n      nativeArgs' <- mapM (surroundFoldNativeArgM sfm fm) nativeArgs\n      opFoldWithNativeExprM fm full $ AppExeN_ t exe nativeArgs'\n    f full@(ManN nativeManifold) = do\n      nativeManifold' <- surroundFoldNativeManifoldM sfm fm nativeManifold\n      opFoldWithNativeExprM fm full $ ManN_ nativeManifold'\n    f full@(ReturnN ne) = do\n      ne' <- surroundFoldNativeExprM sfm fm ne\n      opFoldWithNativeExprM fm full $ ReturnN_ ne'\n    f full@(SerialLetN i se1 ne2) = do\n      se1' <- surroundFoldSerialExprM sfm fm se1\n      ne2' <- surroundFoldNativeExprM sfm fm ne2\n      opFoldWithNativeExprM fm full (SerialLetN_ i se1' ne2')\n    f full@(NativeLetN i ne1 ne2) = do\n      ne1' <- surroundFoldNativeExprM sfm fm ne1\n      ne2' <- surroundFoldNativeExprM sfm fm ne2\n      opFoldWithNativeExprM fm full (NativeLetN_ i ne1' ne2')\n    f full@(LetVarN t i) = opFoldWithNativeExprM fm full (LetVarN_ t i)\n    f full@(BndVarN t i) = opFoldWithNativeExprM fm full (BndVarN_ t i)\n    f full@(DeserializeN t s se) = do\n      se' <- surroundFoldSerialExprM sfm fm se\n      opFoldWithNativeExprM fm full (DeserializeN_ t s se')\n    f full@(ExeN t exe) = opFoldWithNativeExprM fm full (ExeN_ t exe)\n    f full@(ListN v t nes) = do\n      nes' <- mapM (surroundFoldNativeExprM sfm fm) nes\n      opFoldWithNativeExprM fm full (ListN_ v t nes')\n    f full@(TupleN t nes) = do\n      nes' <- mapM (surroundFoldNativeExprM sfm fm) nes\n      opFoldWithNativeExprM fm full (TupleN_ t nes')\n    f full@(RecordN o n ps rs) = do\n      rs' <- mapM (onSndM (surroundFoldNativeExprM sfm fm)) rs\n      opFoldWithNativeExprM fm full (RecordN_ o n ps rs')\n      where\n        onSndM :: (Monad m) => (b -> m b') -> (a, b) -> m (a, b')\n        onSndM g (a, b) = (,) a <$> g b\n    f full@(LogN t x) = opFoldWithNativeExprM fm full (LogN_ t x)\n    f full@(RealN t x) = opFoldWithNativeExprM fm full (RealN_ t x)\n    f full@(IntN t x) = opFoldWithNativeExprM fm full (IntN_ t x)\n    f full@(StrN t x) = opFoldWithNativeExprM fm full (StrN_ t x)\n    f full@(NullN t) = opFoldWithNativeExprM fm full (NullN_ t)\n    f full@(DoBlockN t ne) = do\n      ne' <- surroundFoldNativeExprM sfm fm ne\n      opFoldWithNativeExprM fm full (DoBlockN_ t ne')\n    f full@(EvalN t ne) = do\n      ne' <- surroundFoldNativeExprM sfm fm ne\n      opFoldWithNativeExprM fm full (EvalN_ t ne')\n    f full@(CoerceN c t ne) = do\n      ne' <- surroundFoldNativeExprM sfm fm ne\n      opFoldWithNativeExprM fm full (CoerceN_ c t ne')\n    f full@(IfN t cond thenE elseE) = do\n      cond' <- surroundFoldNativeExprM sfm fm cond\n      thenE' <- surroundFoldNativeExprM sfm fm thenE\n      elseE' <- surroundFoldNativeExprM sfm fm elseE\n      opFoldWithNativeExprM fm full (IfN_ t cond' thenE' elseE')\n    f full@(IntrinsicN t intr msch nes) = do\n      nes' <- mapM (surroundFoldNativeExprM sfm fm) nes\n      opFoldWithNativeExprM fm full (IntrinsicN_ t intr msch nes')\n\nclass HasTypeF a where\n  typeFof :: a -> TypeF\n\ninstance HasTypeF TypeF where\n  typeFof = id\n\ninstance HasTypeF NativeExpr where\n  typeFof (ManN nm) = typeFof nm\n  typeFof (AppExeN t _ _) = t\n  typeFof (ReturnN e) = typeFof e\n  typeFof (SerialLetN _ _ e) = typeFof e\n  typeFof (NativeLetN _ _ e) = typeFof e\n  typeFof (LetVarN t _) = t\n  typeFof (BndVarN t _) = t\n  typeFof (DeserializeN t _ _) = t\n  typeFof (ExeN t _) = t\n  typeFof (ListN v p _) = AppF (VarF v) [p]\n  typeFof (TupleN v (map typeFof -> ps)) = AppF (VarF v) ps\n  typeFof (RecordN o n ps (map (second typeFof) -> rs)) = NamF o n ps rs\n  typeFof (LogN v _) = VarF v\n  typeFof (RealN v _) = VarF v\n  typeFof (IntN v _) = VarF v\n  typeFof (StrN v _) = VarF v\n  typeFof (NullN v) = VarF v\n  typeFof (DoBlockN t _) = t\n  typeFof (EvalN t _) = t\n  typeFof (CoerceN _ t _) = t\n  typeFof (IfN t _ _ _) = t\n  typeFof (IntrinsicN t _ _ _) = t\n\nclass HasTypeM e where\n  typeMof :: e -> TypeM\n\ninstance HasTypeM TypeM where\n  typeMof = id\n\ninstance HasTypeM TypeF where\n  typeMof (FunF ts t) = Function (map typeMof ts) (typeMof t)\n  typeMof (UnkF _) = Passthrough\n  typeMof t = Native t\n\ninstance HasTypeM NativeExpr where\n  typeMof = typeMof . typeFof\n\ninstance HasTypeS TypeF where\n  typeSof (FunF ts t) = FunctionS (map typeMof ts) (typeSof t)\n  typeSof t = SerialS t\n\ninstance HasTypeS (Maybe TypeF) where\n  typeSof (Just t) = typeSof t\n  typeSof Nothing = PassthroughS\n\n-- TODO: fix this - the type of a native manifold should be the full function\n-- type, but the manifold function type may not be entirely native\ninstance HasTypeF NativeManifold where\n  typeFof (NativeManifold _ _ _ ne) = typeFof ne\n\ninstance HasTypeS SerialExpr where\n  typeSof (ManS sm) = typeSof sm\n  typeSof (AppPoolS t _ sargs) = FunctionS (map typeMof sargs) (SerialS t)\n  typeSof (AppRecS t _ _) = SerialS t\n  typeSof (AppForeignRecS t _ _ _) = SerialS t\n  typeSof (ReturnS e) = typeSof e\n  typeSof (SerialLetS _ _ e) = typeSof e\n  typeSof (NativeLetS _ _ e) = typeSof e\n  typeSof (LetVarS t _) = maybe PassthroughS SerialS t\n  typeSof (BndVarS t _) = maybe PassthroughS SerialS t\n  typeSof (SerializeS _ e) = SerialS (typeFof e)\n\ninstance HasTypeM SerialExpr where\n  typeMof = typeMof . typeSof\n\ninstance HasTypeM NativeManifold where\n  typeMof (NativeManifold _ _ form e) = typeOfManifold form (typeMof e)\n\ninstance HasTypeM SerialManifold where\n  typeMof (SerialManifold _ _ form _ e) = typeOfManifold form (typeMof e)\n\ninstance HasTypeS SerialManifold where\n  typeSof (SerialManifold _ _ form _ e) =\n    let inputTypes = concat $ bilist (bilist typeMof typeMof) (return . typeMof) form\n     in case inputTypes of\n          [] -> typeSof e\n          _ -> FunctionS inputTypes (typeSof e)\n\ninstance HasTypeS SerialArg where\n  typeSof (SerialArgManifold x) = typeSof x\n  typeSof (SerialArgExpr x) = typeSof x\n\ntypeOfManifold :: (HasTypeM e) => ManifoldForm (Or TypeS TypeF) e -> TypeM -> TypeM\ntypeOfManifold form outputType =\n  let inputTypes = concat $ bilist (bilist typeMof typeMof) (return . typeMof) form\n   in case inputTypes of\n        [] -> outputType\n        _ -> Function inputTypes outputType\n\ninstance HasTypeM SerialArg where\n  typeMof (SerialArgManifold sm) = typeMof sm\n  typeMof (SerialArgExpr e) = typeMof e\n\ninstance HasTypeM NativeArg where\n  typeMof (NativeArgManifold sm) = typeMof sm\n  typeMof (NativeArgExpr e) = typeMof e\n\n{- | Generate one or two types from an ArgType. These types may be native,\nserial, or (serial, native) (in that order). The serial types are rendered in\nthe serial form, currently strings. Note that this operation erases the type\nannotation for the serial type, if it exists.\n-}\nargTypesToTypeM :: ArgTypes -> [TypeM]\nargTypesToTypeM (SerialOnly (typeMof -> t)) = [t]\nargTypesToTypeM (NativeOnly (typeMof -> t)) = [t]\nargTypesToTypeM (SerialAndNative t) = argTypesToTypeM (SerialOnly (SerialS t)) <> argTypesToTypeM (NativeOnly t)\n\ndata ManifoldMap = ManifoldMap\n  { mapSerialManifold :: SerialManifold -> SerialManifold\n  , mapNativeManifold :: NativeManifold -> NativeManifold\n  , mapSerialExpr :: SerialExpr -> SerialExpr\n  , mapNativeExpr :: NativeExpr -> NativeExpr\n  , mapSerialArg :: SerialArg -> SerialArg\n  , mapNativeArg :: NativeArg -> NativeArg\n  }\n\ninstance Defaultable ManifoldMap where\n  defaultValue =\n    ManifoldMap\n      { mapSerialManifold = id\n      , mapNativeManifold = id\n      , mapSerialExpr = id\n      , mapNativeExpr = id\n      , mapSerialArg = id\n      , mapNativeArg = id\n      }\n\ndata GateMap = GateMap\n  { gateSerialManifold :: SerialManifold -> Bool\n  , gateNativeManifold :: NativeManifold -> Bool\n  , gateSerialExpr :: SerialExpr -> Bool\n  , gateNativeExpr :: NativeExpr -> Bool\n  , gateSerialArg :: SerialArg -> Bool\n  , gateNativeArg :: NativeArg -> Bool\n  }\n\ninstance Defaultable GateMap where\n  defaultValue =\n    GateMap\n      { gateSerialManifold = const True\n      , gateNativeManifold = const True\n      , gateSerialExpr = const True\n      , gateNativeExpr = const True\n      , gateSerialArg = const True\n      , gateNativeArg = const True\n      }\n\nclass MFunctor a where\n  mgatedMap :: GateMap -> ManifoldMap -> a -> a\n\n  mmap :: ManifoldMap -> a -> a\n  mmap = mgatedMap defaultValue\n\ninstance MFunctor NativeManifold where\n  mgatedMap g f nm@(NativeManifold m l form ne)\n    | gateNativeManifold g nm = mapNativeManifold f $ NativeManifold m l form (mgatedMap g f ne)\n    | otherwise = mapNativeManifold f nm\n\ninstance MFunctor SerialManifold where\n  mgatedMap g f sm@(SerialManifold m l form headForm se)\n    | gateSerialManifold g sm =\n        mapSerialManifold f $ SerialManifold m l form headForm (mgatedMap g f se)\n    | otherwise = mapSerialManifold f sm\n\ninstance MFunctor SerialArg where\n  mgatedMap g f sr\n    | gateSerialArg g sr = case sr of\n        (SerialArgManifold sm) -> mapSerialArg f $ SerialArgManifold (mgatedMap g f sm)\n        (SerialArgExpr se) -> mapSerialArg f $ SerialArgExpr (mgatedMap g f se)\n    | otherwise = mapSerialArg f sr\n\ninstance MFunctor NativeArg where\n  mgatedMap g f nr\n    | gateNativeArg g nr = case nr of\n        (NativeArgManifold nm) -> mapNativeArg f $ NativeArgManifold (mgatedMap g f nm)\n        (NativeArgExpr ne) -> mapNativeArg f $ NativeArgExpr (mgatedMap g f ne)\n    | otherwise = mapNativeArg f nr\n\ninstance MFunctor SerialExpr where\n  mgatedMap g f se0\n    | gateSerialExpr g se0 = case se0 of\n        (ManS sm) -> mapSerialExpr f $ ManS (mgatedMap g f sm)\n        (AppPoolS t p serialArgs) -> mapSerialExpr f $ AppPoolS t p (map (mgatedMap g f) serialArgs)\n        (AppRecS t m es) -> mapSerialExpr f $ AppRecS t m (map (mgatedMap g f) es)\n        (AppForeignRecS t m s es) -> mapSerialExpr f $ AppForeignRecS t m s (map (mgatedMap g f) es)\n        (ReturnS se) -> mapSerialExpr f $ ReturnS (mgatedMap g f se)\n        (SerialLetS i se1 se2) -> mapSerialExpr f $ SerialLetS i (mgatedMap g f se1) (mgatedMap g f se2)\n        (NativeLetS i ne1 se2) -> mapSerialExpr f $ NativeLetS i (mgatedMap g f ne1) (mgatedMap g f se2)\n        e@(LetVarS _ _) -> mapSerialExpr f e\n        e@(BndVarS _ _) -> mapSerialExpr f e\n        (SerializeS s ne) -> mapSerialExpr f $ SerializeS s (mgatedMap g f ne)\n    | otherwise = mapSerialExpr f se0\n\n-- WARNING - mapping must not change the type of any argument\ninstance MFunctor NativeExpr where\n  mgatedMap g f ne0\n    | gateNativeExpr g ne0 = case ne0 of\n        (AppExeN t exe nativeArgs) -> mapNativeExpr f $ AppExeN t exe (map (mgatedMap g f) nativeArgs)\n        (ManN nm) -> mapNativeExpr f $ ManN (mgatedMap g f nm)\n        (ReturnN ne) -> mapNativeExpr f $ ReturnN (mgatedMap g f ne)\n        (SerialLetN i se ne) -> mapNativeExpr f $ SerialLetN i (mgatedMap g f se) (mgatedMap g f ne)\n        (NativeLetN i ne1 ne2) -> mapNativeExpr f $ NativeLetN i (mgatedMap g f ne1) (mgatedMap g f ne2)\n        e@(LetVarN _ _) -> mapNativeExpr f e\n        e@(BndVarN _ _) -> mapNativeExpr f e\n        (DeserializeN t s se) -> mapNativeExpr f $ DeserializeN t s (mgatedMap g f se)\n        e@(ExeN _ _) -> mapNativeExpr f e\n        (ListN v t nes) -> mapNativeExpr f $ ListN v t (map (mgatedMap g f) nes)\n        (TupleN v xs) -> mapNativeExpr f $ TupleN v (map (mgatedMap g f) xs)\n        (RecordN o v ps rs) -> mapNativeExpr f $ RecordN o v ps (map (second (mgatedMap g f)) rs)\n        e@(LogN _ _) -> mapNativeExpr f e\n        e@(RealN _ _) -> mapNativeExpr f e\n        e@(IntN _ _) -> mapNativeExpr f e\n        e@(StrN _ _) -> mapNativeExpr f e\n        e@(NullN _) -> mapNativeExpr f e\n        (DoBlockN t ne) -> mapNativeExpr f $ DoBlockN t (mgatedMap g f ne)\n        (EvalN t ne) -> mapNativeExpr f $ EvalN t (mgatedMap g f ne)\n        (CoerceN c t ne) -> mapNativeExpr f $ CoerceN c t (mgatedMap g f ne)\n        (IfN t c thenE elseE) -> mapNativeExpr f $ IfN t (mgatedMap g f c) (mgatedMap g f thenE) (mgatedMap g f elseE)\n        (IntrinsicN t intr msch nes) -> mapNativeExpr f $ IntrinsicN t intr msch (map (mgatedMap g f) nes)\n    | otherwise = mapNativeExpr f ne0\n\ninstance (Pretty a) => Pretty (Arg a) where\n  pretty (Arg i x) = \"x\" <> pretty i <> braces (pretty x)\n\ninstance Pretty TypeF where\n  pretty = viaShow\n\ninstance Pretty TypeM where\n  pretty Passthrough = \"Passthrough\"\n  pretty (Serial c) = \"Serial{\" <> pretty c <> \"}\"\n  pretty (Native c) = \"Native{\" <> pretty c <> \"}\"\n  pretty (Function ts t) =\n    nest 4 (vsep $ [\"Function{\"] <> map (\\x -> pretty x <+> \"->\") ts <> [pretty t <> \"}\"])\n\ninstance Pretty TypeS where\n  pretty PassthroughS = \"PassthroughS\"\n  pretty (SerialS t) = \"SeralS{\" <> pretty t <> \"}\"\n  pretty (FunctionS ts t) =\n    nest 4 (vsep $ [\"Function{\"] <> map (\\x -> pretty x <+> \"->\") ts <> [pretty t <> \"}\"])\n\ninstance Pretty PolyHead where\n  pretty _ = \"PolyHead stub\"\n\ninstance Pretty PolyExpr where\n  pretty (PolyManifold _ _ _ _) = \"PolyManifold\"\n  pretty (PolyRemoteInterface _ _ _ _ _) = \"PolyRemoteInterface\"\n  pretty (PolyLet i e1 e2) = \"PolyLet<\" <> pretty i <> \">\" <+> list [pretty e1, pretty e2]\n  pretty (PolyReturn e) = \"PolyReturn\" <+> parens (pretty e)\n  pretty (PolyApp e es) = \"PolyApp\" <+> list (map pretty (e : es))\n  pretty (PolyBndVar _ _) = \"PolyBndVar\"\n  pretty (PolyLetVar _ _) = \"PolyLetVar\"\n  pretty (PolyExe _ (SrcCallP src)) = \"PolyExe<\" <> pretty (srcAlias src) <> \">\"\n  pretty (PolyExe _ (PatCallP _)) = \"PolyExe<pattern>\"\n  pretty (PolyExe _ (LocalCallP _)) = \"PolyExe<local>\"\n  pretty (PolyExe _ (RecCallP i _)) = \"PolyExe<rec_m\" <> pretty i <> \">\"\n  pretty (PolyList _ _ _) = \"PolyList\"\n  pretty (PolyTuple _ xs) = \"PolyTuple\" <+> pretty (length xs)\n  pretty (PolyRecord _ _ _ _) = \"PolyRecord\"\n  pretty (PolyLog _ _) = \"PolyLog\"\n  pretty (PolyReal _ _) = \"PolyReal\"\n  pretty (PolyInt _ _) = \"PolyInt\"\n  pretty (PolyStr _ _) = \"PolyStr\"\n  pretty (PolyNull _) = \"PolyNull\"\n  pretty (PolyDoBlock _ e) = \"PolyDoBlock\" <+> pretty e\n  pretty (PolyEval _ e) = \"PolyEval\" <+> pretty e\n  pretty (PolyCoerce _ _ e) = \"PolyCoerce\" <+> pretty e\n  pretty (PolyIf c t e) = \"PolyIf\" <+> pretty c <+> pretty t <+> pretty e\n  pretty (PolyIntrinsic _ intr es) = \"@\" <> pretty (intrinsicName intr) <+> list (map pretty es)\n\ninstance Pretty MonoExpr where\n  pretty (MonoManifold i form e) =\n    block 4 (\"m\" <> pretty i <> tupled (abilist contextArg boundArg form)) (pretty e)\n    where\n      contextArg j _ = \"c\" <> pretty j\n      boundArg j _ = \"b\" <> pretty j\n  pretty (MonoPoolCall t i _ _ _) = \"PoolCall\" <> parens (pretty i) <> parens (pretty t)\n  pretty (MonoLet i e1 e2) = vsep [\"let\" <+> \"x\" <> pretty i <+> \"=\" <+> pretty e1, pretty e2]\n  pretty (MonoLetVar t i) = parens $ \"x\" <> pretty i <> \" :: \" <> pretty t\n  pretty (MonoReturn e) = \"return\" <> parens (pretty e)\n  pretty (MonoApp e es) = parens (pretty e) <+> hsep (map (parens . pretty) es)\n  pretty (MonoExe _ exe) = pretty exe\n  pretty (MonoBndVar (A _) i) = parens $ \"x\" <> pretty i <+> \":\" <+> \"<unknown>\"\n  pretty (MonoBndVar (B t) i) = parens $ \"x\" <> pretty i <+> \":\" <+> pretty t\n  pretty (MonoBndVar (C t) i) = parens $ \"x\" <> pretty i <+> \":\" <+> pretty t\n  pretty (MonoList _ _ es) = list (map pretty es)\n  pretty (MonoTuple v es) = pretty v <+> tupled (map pretty es)\n  pretty (MonoRecord o v fs _) =\n    block 4 (pretty o <+> pretty v <> encloseSep \"<\" \">\" \",\" (map pretty fs)) \"manifold record stub\"\n  pretty (MonoLog _ x) = viaShow x\n  pretty (MonoReal _ x) = viaShow x\n  pretty (MonoInt _ x) = viaShow x\n  pretty (MonoStr _ x) = viaShow x\n  pretty (MonoNull _) = \"NULL\"\n  pretty (MonoDoBlock _ e) = \"{\" <> pretty e <> \"}\"\n  pretty (MonoEval _ e) = \"!\" <> pretty e\n  pretty (MonoCoerce _ _ e) = \"coerce(\" <> pretty e <> \")\"\n  pretty (MonoIf c t e) = \"if\" <+> pretty c <+> \"then\" <+> pretty t <+> \"else\" <+> pretty e\n  pretty (MonoIntrinsic _ intr es) = \"@\" <> pretty (intrinsicName intr) <+> list (map pretty es)\n\ninstance Pretty MonoHead where\n  pretty (MonoHead lang i args headForm e) =\n    block 4 \"MonoHead\" $\n      encloseSep\n        \"{\"\n        \"}\"\n        \",\"\n        [ \"lang:\" <+> pretty lang\n        , \"index:\" <+> pretty i\n        , \"args:\" <+> list (map pretty args)\n        , \"headForm:\" <+> viaShow headForm\n        , \"expr:\" <+> pretty e\n        ]\n\ninstance Pretty PoolCall where\n  pretty _ = \"PoolCall stub\"\n\ninstance (Pretty context, Pretty bound) => Pretty (ManifoldForm context bound) where\n  pretty (ManifoldPass args) = \"ManifoldPass\" <+> list (map pretty args)\n  pretty (ManifoldFull args) = \"ManifoldFull\" <+> list (map pretty args)\n  pretty (ManifoldPart cargs bargs) =\n    \"ManifoldFull\"\n      <+> \"{context:\"\n      <+> list (map pretty cargs)\n      <> \",\"\n        <+> \"bound:\"\n        <+> list (map pretty bargs)\n      <> \"}\"\n\ndata CmdArg\n  = CmdArgPos ArgPosDocSet\n  | -- positional argument\n    CmdArgOpt ArgOptDocSet\n  | -- optional argument\n    CmdArgGrp RecDocSet\n  | -- argument group (made from a record)\n    CmdArgFlag ArgFlagDocSet\n  -- flag option\n  deriving (Show, Ord, Eq)\n\ndata CmdDocSet = CmdDocSet\n  { cmdDocDesc :: [Text]\n  , -- free description, the first line is used in the top-level help statement\n    cmdDocName :: Maybe Text\n  , -- an alternative name to give this subcommand (defaults to the function name)\n    cmdDocArgs :: [CmdArg]\n  , -- one element for each argument to the function\n    cmdDocRet :: (Type, [Text])\n    -- description of the return data\n  }\n  deriving (Show, Ord, Eq)\n\ndata RecDocSet = RecDocSet\n  { recDocType :: Type\n  , -- fully resolved type\n    recDocDesc :: [Text]\n  , -- free description\n    recDocMetavar :: Text\n  , -- name of the record used in docs, with record type in uppercase as the default\n    recDocOpt :: Maybe CliOpt\n  , -- optional argument that expects the entire record\n    recDocEntries :: [(Key, Either ArgFlagDocSet ArgOptDocSet)]\n    -- all options for this record\n  }\n  deriving (Show, Ord, Eq)\n\ndata ArgOptDocSet = ArgOptDocSet\n  { argOptDocType :: Type\n  , -- argument type\n    argOptDocDesc :: [Text]\n  , -- free description\n    argOptDocMetavar :: Text\n  , -- a variable used in the interface to refer to this argument term\n    argOptDocLiteral :: Maybe Bool\n  , -- if Just True, require an argument be literal rather than from a file\n    -- if Just False, require an argument be from a file\n    -- if Nothing, infer as usual\n    argOptDocArg :: CliOpt\n  , -- the option\n    argOptDocDefault :: Text\n    -- the required default vale for an argument\n  }\n  deriving (Show, Ord, Eq)\n\ndata ArgFlagDocSet = ArgFlagDocSet\n  { argFlagDocDesc :: [Text]\n  , -- free description\n    argFlagDocOpt :: CliOpt\n  , -- invert the default value\n    argFlagDocOptRev :: Maybe CliOpt\n  , -- force the default value\n    argFlagDocDefault :: Text\n    -- the possibly inferred default value\n  }\n  deriving (Show, Ord, Eq)\n\ndata ArgPosDocSet = ArgPosDocSet\n  { argPosDocType :: Type\n  , argPosDocDesc :: [Text]\n  , -- free description\n    argPosDocMetavar :: Maybe Text\n  , -- a variable used in the interface to refer to this argument term\n    argPosDocLiteral :: Maybe Bool\n    -- if Just True, require an argument be literal rather than from a file\n    -- if Just False, require an argument be from a file\n    -- if Nothing, infer as usual\n  }\n  deriving (Show, Ord, Eq)\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Nexus.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Nexus\nDescription : Generate the @.manifest@ JSON file consumed by the pre-compiled nexus\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProduces the JSON manifest that the static nexus binary reads at startup.\nThe manifest describes all exported subcommands, their argument types,\nhelp text, and which pool executables to dispatch to.\n-}\nmodule Morloc.CodeGenerator.Nexus\n  ( generate\n  ) where\n\nimport qualified Control.Monad as CM\nimport qualified Control.Monad.State as CMS\nimport qualified Data.Map as Map\nimport Data.Text (Text)\nimport qualified Data.Text as MT\nimport qualified Data.Time.Clock\nimport qualified Data.Time.Clock.POSIX as Time\nimport qualified Data.Time.Format\nimport qualified Morloc.BaseTypes as MBT\nimport qualified Morloc.CodeGenerator.Infer as Infer\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.CodeGenerator.Serial as Serial\nimport qualified Morloc.Config as MC\nimport Morloc.Data.Doc (pretty, render)\nimport Morloc.Data.Json\nimport qualified Morloc.LangRegistry as LR\nimport qualified Morloc.Language as ML\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.Version\nimport qualified System.Directory as Dir\n\n-- ======================================================================\n-- Data types\n-- ======================================================================\n\ncLang :: ML.Lang\ncLang = Lang \"c\" \"c\"\n\ndata FData = FData\n  { fdataSocket :: Socket\n  , fdataSubcommand :: Text\n  , fdataMid :: Int\n  , fdataType :: Type\n  , fdataSubSockets :: [Socket]\n  , fdataArgSchemas :: [Text]\n  , fdataReturnSchema :: Text\n  , fdataCmdDocSet :: CmdDocSet\n  }\n\ndata GastData = GastData\n  { commandName :: Text\n  , commandMid :: Int\n  , commandType :: Type\n  , commandDocs :: CmdDocSet\n  , commandExpr :: NexusExpr\n  , commandReturnSchema :: Text\n  , commandArgSchemas :: [Text]\n  }\n\ndata NexusExpr\n  = AppX Text NexusExpr [NexusExpr]\n  | LamX [Text] NexusExpr\n  | BndX Text Text\n  | PatX Text Pattern\n  | LstX Text [NexusExpr]\n  | TupX Text [NexusExpr]\n  | NamX Text [(Text, NexusExpr)]\n  | StrX Text Text\n  | LitX LitType Text\n  | ShowX Text NexusExpr  -- schema (return type = Str), child expression\n  | ReadX Text NexusExpr  -- schema (return type = ?a), child expression\n  | HashX Text NexusExpr  -- schema + child -> Str (xxhash hex)\n  | SaveX Text Text NexusExpr NexusExpr  -- format + schema + value + path -> ()\n  | LoadX Text NexusExpr  -- schema + path -> ?a\n\ndata LitType = F32X | F64X | I8X | I16X | I32X | I64X | U8X | U16X | U32X | U64X | BoolX | NullX\n\n-- ======================================================================\n-- Data extraction\n-- ======================================================================\n\nmakeFData ::\n  (AnnoS (Indexed Type) One (Indexed Lang), CmdDocSet) ->\n  MorlocMonad (Type, Int, Lang, CmdDocSet, [Socket])\nmakeFData (e@(AnnoS (Idx i t) (Idx _ lang) _), d) = do\n  sockets <- findSockets e\n  return (t, i, lang, d, sockets)\n\nfindSockets :: AnnoS e One (Indexed Lang) -> MorlocMonad [Socket]\nfindSockets rAST = do\n  config <- MM.ask\n  registry <- MM.gets stateLangRegistry\n  return . map (MC.setupServerAndSocket config registry) . unique $ findAllLangsSAnno rAST\n\nfindAllLangsSAnno :: AnnoS e One (Indexed Lang) -> [Lang]\nfindAllLangsSAnno (AnnoS _ (Idx _ lang) e) = lang : findAllLangsExpr e\n  where\n    findAllLangsExpr (VarS _ (One x)) = findAllLangsSAnno x\n    findAllLangsExpr (AppS x xs) = concatMap findAllLangsSAnno (x : xs)\n    findAllLangsExpr (LamS _ x) = findAllLangsSAnno x\n    findAllLangsExpr (LstS xs) = concatMap findAllLangsSAnno xs\n    findAllLangsExpr (TupS xs) = concatMap findAllLangsSAnno xs\n    findAllLangsExpr (NamS rs) = concatMap (findAllLangsSAnno . snd) rs\n    findAllLangsExpr (LetS _ e1 e2) = findAllLangsSAnno e1 ++ findAllLangsSAnno e2\n    findAllLangsExpr (IfS c t e') = concatMap findAllLangsSAnno [c, t, e']\n    findAllLangsExpr (DoBlockS x) = findAllLangsSAnno x\n    findAllLangsExpr (EvalS x) = findAllLangsSAnno x\n    findAllLangsExpr (CoerceS _ x) = findAllLangsSAnno x\n    findAllLangsExpr _ = []\n\ngetFData :: (Type, Int, Lang, CmdDocSet, [Socket]) -> MorlocMonad FData\ngetFData (t, i, lang, doc, sockets) = do\n  mayName <- MM.metaName i\n  (argSchemas, returnSchema) <- makeSchemas i lang t\n\n  case mayName of\n    (Just name') -> do\n      config <- MM.ask\n      registry <- MM.gets stateLangRegistry\n      let socket = MC.setupServerAndSocket config registry lang\n      return $\n        FData\n          { fdataSocket = socket\n          , fdataSubcommand = maybe (unEVar name') id (cmdDocName doc)\n          , fdataMid = i\n          , fdataType = t\n          , fdataSubSockets = sockets\n          , fdataArgSchemas = map render argSchemas\n          , fdataReturnSchema = render returnSchema\n          , fdataCmdDocSet = doc\n          }\n    Nothing -> MM.throwSourcedError i $ \"No name in FData\"\n\n-- ======================================================================\n-- Schema building\n-- ======================================================================\n\nmakeSchemas :: Int -> Lang -> Type -> MorlocMonad ([MDoc], MDoc)\nmakeSchemas mid lang (FunT ts t) = do\n  ss <- mapM (makeSchema mid lang) ts\n  s <- makeSchema mid lang t\n  return (ss, s)\nmakeSchemas mid lang t = do\n  s <- makeSchema mid lang t\n  return ([], s)\n\nmakeSchema :: Int -> Lang -> Type -> MorlocMonad MDoc\nmakeSchema mid lang t = do\n  ft <- Infer.inferConcreteTypeUniversal lang t\n  ast <- Serial.makeSerialAST mid lang ft\n  return $ Serial.serialAstToMsgpackSchema ast\n\nmakeGastSchemas :: Type -> MorlocMonad (MDoc, [MDoc])\nmakeGastSchemas (FunT ts t) = do\n  serialAsts <- mapM generalTypeToSerialAST (t : ts)\n  case map Serial.serialAstToMsgpackSchema serialAsts of\n    (s : ss) -> return (s, ss)\n    [] -> error \"makeGastSchemas: FunT produced empty serial AST list\"\nmakeGastSchemas t = do\n  s <- Serial.serialAstToMsgpackSchema <$> generalTypeToSerialAST t\n  return (s, [])\n\ngeneralTypeToSerialAST :: Type -> MorlocMonad SerialAST\ngeneralTypeToSerialAST (VarT v)\n  | v == MBT.real = return $ SerialReal (FV v (CV \"\"))\n  | v == MBT.f32 = return $ SerialReal (FV v (CV \"\"))\n  | v == MBT.f64 = return $ SerialReal (FV v (CV \"\"))\n  | v == MBT.int = return $ SerialInt (FV v (CV \"\"))\n  | v == MBT.i8 = return $ SerialInt8 (FV v (CV \"\"))\n  | v == MBT.i16 = return $ SerialInt16 (FV v (CV \"\"))\n  | v == MBT.i32 = return $ SerialInt32 (FV v (CV \"\"))\n  | v == MBT.i64 = return $ SerialInt64 (FV v (CV \"\"))\n  | v == MBT.u8 = return $ SerialUInt8 (FV v (CV \"\"))\n  | v == MBT.u16 = return $ SerialUInt16 (FV v (CV \"\"))\n  | v == MBT.u32 = return $ SerialUInt32 (FV v (CV \"\"))\n  | v == MBT.u64 = return $ SerialUInt64 (FV v (CV \"\"))\n  | v == MBT.bool = return $ SerialBool (FV v (CV \"\"))\n  | v == MBT.str = return $ SerialString (FV v (CV \"\"))\n  | v == MBT.unit = return $ SerialNull (FV v (CV \"\"))\n  | otherwise = do\n      scope <- MM.gets stateUniversalGeneralTypedefs\n      case Map.lookup v scope of\n        (Just [(_, _, _, True)]) -> error \"Cannot handle terminal types\"\n        (Just [([], t', _, False)]) -> generalTypeToSerialAST (typeOf t')\n        (Just [_]) -> error $ \"Cannot currently handle parameterized pure morloc types\"\n        Nothing -> error $ \"Failed to interpret type variable: \" <> show (unTVar v)\n        x -> error $ \"Unexpected scope: \" <> show x\ngeneralTypeToSerialAST (AppT (VarT v) [t])\n  | v == MBT.list = SerialList (FV v (CV \"\")) <$> generalTypeToSerialAST t\n  | otherwise = resolveAliasApp v [t]\ngeneralTypeToSerialAST (AppT (VarT v) ts)\n  | v == (MBT.tuple (length ts)) = SerialTuple (FV v (CV \"\")) <$> mapM generalTypeToSerialAST ts\n  | otherwise = resolveAliasApp v ts\ngeneralTypeToSerialAST (EffectT _ t) = generalTypeToSerialAST t\ngeneralTypeToSerialAST (OptionalT t) = do\n  inner <- generalTypeToSerialAST t\n  return $ SerialOptional (FV (TV \"Optional\") (CV \"\")) inner\ngeneralTypeToSerialAST (NamT o v [] rs) =\n  SerialObject o (FV v (CV \"\")) []\n    <$> mapM (secondM generalTypeToSerialAST) rs\ngeneralTypeToSerialAST t = error $ \"cannot serialize this type: \" <> show t\n\nresolveAliasApp :: TVar -> [Type] -> MorlocMonad SerialAST\nresolveAliasApp v ts = do\n  scope <- MM.gets stateUniversalGeneralTypedefs\n  case Map.lookup v scope of\n    (Just [(params, body, _, False)]) ->\n      let tvars = [tv | Left (tv, _) <- params]\n          resolved = foldl (\\acc (tv, arg) -> substituteTVar tv arg acc) (typeOf body) (zip tvars ts)\n      in generalTypeToSerialAST resolved\n    _ -> error $ \"Cannot serialize type: \" <> show (AppT (VarT v) ts)\n\n-- ======================================================================\n-- Pure expression extraction\n-- ======================================================================\n\nannotateGasts :: (AnnoS (Indexed Type) One (), CmdDocSet) -> MorlocMonad GastData\nannotateGasts (x0@(AnnoS (Idx i gtype) _ _), docs) = do\n  mayName <- MM.metaName i\n  gname <- case mayName of\n    Nothing -> MM.throwSourcedError i $ \"No name found for call-free function\"\n    (Just n') -> return n'\n\n  (retSchemaDoc, argSchemaDocs) <- makeGastSchemas gtype\n  expr <- toNexusExpr x0\n\n  return $\n    GastData\n      { commandName = maybe (unEVar gname) id (cmdDocName docs)\n      , commandMid = i\n      , commandType = gtype\n      , commandDocs = docs\n      , commandExpr = expr\n      , commandReturnSchema = render retSchemaDoc\n      , commandArgSchemas = map render argSchemaDocs\n      }\n  where\n    type2schema :: Type -> MorlocMonad Text\n    type2schema t = (render . Serial.serialAstToMsgpackSchema) <$> generalTypeToSerialAST t\n\n    toNexusExpr :: AnnoS (Indexed Type) One () -> MorlocMonad NexusExpr\n    toNexusExpr (AnnoS (Idx _ t) _ (AppS e es)) = AppX <$> type2schema t <*> toNexusExpr e <*> mapM toNexusExpr es\n    toNexusExpr (AnnoS _ _ (LamS vs e)) = LamX (map (render . pretty) vs) <$> toNexusExpr e\n    toNexusExpr (AnnoS (Idx _ (FunT _ t)) _ (ExeS (PatCall p))) = PatX <$> type2schema t <*> pure p\n    toNexusExpr (AnnoS (Idx _ t) _ (BndS v)) = BndX <$> type2schema t <*> pure (render (pretty v))\n    toNexusExpr (AnnoS (Idx _ t) _ (LstS es)) = LstX <$> type2schema t <*> mapM toNexusExpr es\n    toNexusExpr (AnnoS (Idx _ t) _ (TupS es)) = TupX <$> type2schema t <*> mapM toNexusExpr es\n    toNexusExpr (AnnoS (Idx _ t) _ (NamS rs)) =\n      NamX <$> type2schema t <*> mapM (\\(k, e) -> (,) (unKey k) <$> toNexusExpr e) rs\n    toNexusExpr (AnnoS (Idx _ t) _ (StrS v)) = StrX <$> type2schema t <*> pure v\n    toNexusExpr (AnnoS (Idx _ t) _ (RealS v)) = do\n      s <- generalTypeToSerialAST t\n      return $ case s of\n        (SerialFloat32 _) -> LitX F32X (MT.pack (show v))\n        _ -> LitX F64X (MT.pack (show v))\n    toNexusExpr (AnnoS (Idx _ t) _ (IntS v)) = do\n      s <- generalTypeToSerialAST t\n      return $ case s of\n        (SerialInt8 _) -> LitX I8X (MT.pack (show v))\n        (SerialInt16 _) -> LitX I16X (MT.pack (show v))\n        (SerialInt _) -> LitX I32X (MT.pack (show v))\n        (SerialInt32 _) -> LitX I32X (MT.pack (show v))\n        (SerialInt64 _) -> LitX I64X (MT.pack (show v))\n        (SerialUInt8 _) -> LitX U8X (MT.pack (show v))\n        (SerialUInt16 _) -> LitX U16X (MT.pack (show v))\n        (SerialUInt _) -> LitX U32X (MT.pack (show v))\n        (SerialUInt32 _) -> LitX U32X (MT.pack (show v))\n        (SerialUInt64 _) -> LitX U64X (MT.pack (show v))\n        _ -> LitX I64X (MT.pack (show v))\n    toNexusExpr (AnnoS _ _ (LogS True)) = return $ LitX BoolX \"1\"\n    toNexusExpr (AnnoS _ _ (LogS False)) = return $ LitX BoolX \"0\"\n    toNexusExpr (AnnoS _ _ UniS) = return $ LitX NullX \"0\"\n    toNexusExpr (AnnoS _ _ NullS) = return $ LitX NullX \"0\"\n    toNexusExpr (AnnoS (Idx _ t) _ (LetBndS v)) = BndX <$> type2schema t <*> pure (render (pretty v))\n    -- Desugar let to lambda application: let x = e1 in e2 -> (\\x -> e2) e1\n    toNexusExpr (AnnoS (Idx _ t) _ (LetS v e1 body)) = do\n      schema <- type2schema t\n      bodyX <- toNexusExpr body\n      e1X <- toNexusExpr e1\n      return $ AppX schema (LamX [render (pretty v)] bodyX) [e1X]\n    toNexusExpr (AnnoS _ _ (IfS _ t _)) = toNexusExpr t\n    toNexusExpr (AnnoS _ _ (DoBlockS e)) = toNexusExpr e\n    toNexusExpr (AnnoS _ _ (EvalS e)) = toNexusExpr e\n    toNexusExpr (AnnoS _ _ (CoerceS _ e)) = toNexusExpr e\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrShow [arg])) =\n      ShowX <$> type2schema t <*> toNexusExpr arg\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrRead [arg])) =\n      ReadX <$> type2schema t <*> toNexusExpr arg\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrHash [arg])) =\n      HashX <$> type2schema t <*> toNexusExpr arg\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrSave [valExpr, path])) =\n      SaveX \"voidstar\" <$> type2schema t <*> toNexusExpr valExpr <*> toNexusExpr path\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrSaveM [valExpr, path])) =\n      SaveX \"msgpack\" <$> type2schema t <*> toNexusExpr valExpr <*> toNexusExpr path\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrSaveJ [valExpr, path])) =\n      SaveX \"json\" <$> type2schema t <*> toNexusExpr valExpr <*> toNexusExpr path\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS IntrLoad [path])) =\n      LoadX <$> type2schema t <*> toNexusExpr path\n    toNexusExpr (AnnoS (Idx _ t) _ (IntrinsicS intr _)) = do\n      v <- resolveCompileTimeIntrinsic intr\n      StrX <$> type2schema t <*> pure v\n    toNexusExpr (AnnoS (Idx _ t) _ (CallS v)) = BndX <$> type2schema t <*> pure (render (pretty v))\n    toNexusExpr _ = error $ \"Unreachable value of type reached\"\n\nresolveCompileTimeIntrinsic :: Intrinsic -> MorlocMonad Text\nresolveCompileTimeIntrinsic IntrVersion = return $ MT.pack Morloc.Version.versionStr\nresolveCompileTimeIntrinsic IntrCompiled = do\n  now <- liftIO Data.Time.Clock.getCurrentTime\n  return . MT.pack $ Data.Time.Format.formatTime Data.Time.Format.defaultTimeLocale \"%Y-%m-%dT%H:%M:%SZ\" now\nresolveCompileTimeIntrinsic intr =\n  MM.throwSystemError $ \"@\" <> pretty (intrinsicName intr) <> \" cannot be used in a language-independent context\"\n\n-- ======================================================================\n-- CLI argument serialization\n-- ======================================================================\n\n-- | Serialize a 'CmdArg' to its JSON manifest form. The optional\n-- 'Maybe Text' is the pre-rendered serialization schema for typed\n-- args (pos/opt/grp); flags pass 'Nothing' since they have no schema.\n-- Group entries also pass 'Nothing' because the group's top-level\n-- schema covers the whole record; entries never dispatch individually.\nargToJson :: Maybe Text -> CmdArg -> Text\nargToJson mschema (CmdArgPos r) =\n  jsonObj $\n    [ (\"kind\", jsonStr \"pos\") ]\n    ++ schemaField mschema\n    ++ [ (\"type\", jsonStr (typeDescStr (argPosDocType r) (argPosDocLiteral r)))\n       , (\"metavar\", jsonMaybeStr (argPosDocMetavar r))\n       , (\"quoted\", jsonBool (argPosDocLiteral r == Just True && isStrType (argPosDocType r)))\n       , (\"desc\", jsonStrArr (argPosDocDesc r))\n       , (\"constraints\", constraintsJsonFor (argPosDocType r))\n       , (\"metadata\", metadataEmpty)\n       ]\nargToJson mschema (CmdArgOpt r) =\n  jsonObj $\n    [ (\"kind\", jsonStr \"opt\") ]\n    ++ schemaField mschema\n    ++ [ (\"type\", jsonStr (typeDescStr (argOptDocType r) (argOptDocLiteral r)))\n       , (\"metavar\", jsonStr (argOptDocMetavar r))\n       , (\"quoted\", jsonBool (argOptDocLiteral r == Just True && isStrType (argOptDocType r)))\n       , (\"short\", cliOptShortJson (argOptDocArg r))\n       , (\"long\", cliOptLongJson (argOptDocArg r))\n       , (\"default\", jsonStr (argOptDocDefault r))\n       , (\"desc\", jsonStrArr (argOptDocDesc r))\n       , (\"constraints\", constraintsJsonFor (argOptDocType r))\n       , (\"metadata\", metadataEmpty)\n       ]\nargToJson _ (CmdArgFlag r) =\n  jsonObj\n    [ (\"kind\", jsonStr \"flag\")\n    , (\"short\", cliOptShortJson (argFlagDocOpt r))\n    , (\"long\", cliOptLongJson (argFlagDocOpt r))\n    , (\"long_rev\", flagRevJson (argFlagDocOptRev r))\n    , (\"default\", jsonStr (argFlagDocDefault r))\n    , (\"desc\", jsonStrArr (argFlagDocDesc r))\n    , (\"metadata\", metadataEmpty)\n    ]\nargToJson mschema (CmdArgGrp r) =\n  jsonObj $\n    [ (\"kind\", jsonStr \"grp\") ]\n    ++ schemaField mschema\n    ++ [ (\"type\", jsonStr (render (pretty (recDocType r))))\n       , (\"metavar\", jsonStr (recDocMetavar r))\n       , (\"desc\", jsonStrArr (recDocDesc r))\n       , (\"group_opt\", grpOptJson (recDocOpt r))\n       , (\"entries\", jsonArr [grpEntryJson k v | (k, v) <- recDocEntries r])\n       , (\"constraints\", constraintsJsonFor (recDocType r))\n       , (\"metadata\", metadataEmpty)\n       ]\n  where\n    grpOptJson Nothing = jsonNull\n    grpOptJson (Just opt) =\n      jsonObj\n        [ (\"short\", cliOptShortJson opt)\n        , (\"long\", cliOptLongJson opt)\n        ]\n\n    -- Group entries never carry their own schema; the group's top-level\n    -- schema is used for dispatch. Pass 'Nothing' to the recursive call.\n    grpEntryJson key entry =\n      jsonObj\n        [ (\"key\", jsonStr (unKey key))\n        , (\"arg\", argToJson Nothing (either CmdArgFlag CmdArgOpt entry))\n        ]\n\n-- | Prefixed @schema@ field when a schema is present, otherwise empty.\n-- Used by 'argToJson' to splice the field into the per-variant field\n-- list in a consistent position.\nschemaField :: Maybe Text -> [(Text, Text)]\nschemaField Nothing  = []\nschemaField (Just s) = [(\"schema\", jsonStr s)]\n\n-- Check if a type is Str or ?Str (for literal string handling)\nisStrType :: Type -> Bool\nisStrType (VarT v) = v == MBT.str\nisStrType (OptionalT t) = isStrType t\nisStrType _ = False\n\ntypeDescStr :: Type -> Maybe Bool -> Text\ntypeDescStr t isLiteral\n  | isStrType t && isLiteral /= Just True = \"Str    (a filename or quoted JSON string)\"\n  | otherwise = render (pretty t)\n\n-- | Strip outer wrappers that don't change a type's \"name kind\" identity\n-- (Optional and Effect wrappers are transparent for record/object/table\n-- classification). Used by 'surfaceNamKind'.\nstripSurface :: Type -> Type\nstripSurface (OptionalT t) = stripSurface t\nstripSurface (EffectT _ t) = stripSurface t\nstripSurface t             = t\n\n-- | If a type's surface form is a named type, return its 'NamType' tag.\n-- Otherwise Nothing. Single source of the @kind@ constraint.\nsurfaceNamKind :: Type -> Maybe NamType\nsurfaceNamKind t = case stripSurface t of\n  NamT o _ _ _ -> Just o\n  _            -> Nothing\n\n-- | Lowercase label for a 'NamType' constructor, used as the value of\n-- the @kind@ constraint in the manifest.\nnamTagLabel :: NamType -> Text\nnamTagLabel NamRecord = \"record\"\nnamTagLabel NamObject = \"object\"\nnamTagLabel NamTable  = \"table\"\n\n-- | Build the JSON @constraints@ array for a surface type. Only the\n-- @kind@ constraint is populated today; future constraints (min, max,\n-- regex, length, ...) will append to this list.\nconstraintsJsonFor :: Type -> Text\nconstraintsJsonFor t = jsonArr $ catMaybes\n  [ (\\nt -> jsonObj\n      [ (\"type\", jsonStr \"kind\")\n      , (\"value\", jsonStr (namTagLabel nt))\n      ]) <$> surfaceNamKind t\n  ]\n\n-- | An empty @metadata@ slot. Always emitted so consumers never have to\n-- check presence.\nmetadataEmpty :: Text\nmetadataEmpty = jsonObj []\n\ncliOptShortJson :: CliOpt -> Text\ncliOptShortJson (CliOptShort c) = jsonStr (MT.singleton c)\ncliOptShortJson (CliOptBoth c _) = jsonStr (MT.singleton c)\ncliOptShortJson _ = jsonNull\n\ncliOptLongJson :: CliOpt -> Text\ncliOptLongJson (CliOptLong l) = jsonStr l\ncliOptLongJson (CliOptBoth _ l) = jsonStr l\ncliOptLongJson _ = jsonNull\n\nflagRevJson :: Maybe CliOpt -> Text\nflagRevJson Nothing = jsonNull\nflagRevJson (Just (CliOptLong l)) = jsonStr l\nflagRevJson (Just (CliOptBoth _ l)) = jsonStr l\nflagRevJson _ = jsonNull\n\n-- ======================================================================\n-- Expression tree serialization\n-- ======================================================================\n\nexprToJson :: NexusExpr -> Text\nexprToJson (LitX lt v) =\n  jsonObj\n    [ (\"tag\", jsonStr \"lit\")\n    , (\"schema\", jsonStr (litSchemaStr lt))\n    , (\"lit_type\", jsonStr (litSchemaStr lt))\n    , (\"value\", jsonStr v)\n    ]\nexprToJson (StrX schema v) =\n  jsonObj\n    [ (\"tag\", jsonStr \"str\")\n    , (\"schema\", jsonStr schema)\n    , (\"value\", jsonStr v)\n    ]\nexprToJson (LstX schema es) =\n  jsonObj\n    [ (\"tag\", jsonStr \"container\")\n    , (\"schema\", jsonStr schema)\n    , (\"elements\", jsonArr (map exprToJson es))\n    ]\nexprToJson (TupX schema es) =\n  jsonObj\n    [ (\"tag\", jsonStr \"container\")\n    , (\"schema\", jsonStr schema)\n    , (\"elements\", jsonArr (map exprToJson es))\n    ]\nexprToJson (NamX schema entries) =\n  jsonObj\n    [ (\"tag\", jsonStr \"container\")\n    , (\"schema\", jsonStr schema)\n    , (\"elements\", jsonArr (map (exprToJson . snd) entries))\n    ]\nexprToJson (AppX schema func args) =\n  jsonObj\n    [ (\"tag\", jsonStr \"app\")\n    , (\"schema\", jsonStr schema)\n    , (\"func\", exprToJson func)\n    , (\"args\", jsonArr (map exprToJson args))\n    ]\nexprToJson (LamX vars body) =\n  jsonObj\n    [ (\"tag\", jsonStr \"lambda\")\n    , (\"vars\", jsonStrArr vars)\n    , (\"body\", exprToJson body)\n    ]\nexprToJson (BndX schema var) =\n  jsonObj\n    [ (\"tag\", jsonStr \"bound\")\n    , (\"schema\", jsonStr schema)\n    , (\"var\", jsonStr var)\n    ]\nexprToJson (ShowX schema child) =\n  jsonObj\n    [ (\"tag\", jsonStr \"show\")\n    , (\"schema\", jsonStr schema)\n    , (\"child\", exprToJson child)\n    ]\nexprToJson (ReadX schema child) =\n  jsonObj\n    [ (\"tag\", jsonStr \"read\")\n    , (\"schema\", jsonStr schema)\n    , (\"child\", exprToJson child)\n    ]\nexprToJson (HashX schema child) =\n  jsonObj\n    [ (\"tag\", jsonStr \"hash\")\n    , (\"schema\", jsonStr schema)\n    , (\"child\", exprToJson child)\n    ]\nexprToJson (SaveX fmt schema value path) =\n  jsonObj\n    [ (\"tag\", jsonStr \"save\")\n    , (\"format\", jsonStr fmt)\n    , (\"schema\", jsonStr schema)\n    , (\"value\", exprToJson value)\n    , (\"path\", exprToJson path)\n    ]\nexprToJson (LoadX schema child) =\n  jsonObj\n    [ (\"tag\", jsonStr \"load\")\n    , (\"schema\", jsonStr schema)\n    , (\"child\", exprToJson child)\n    ]\nexprToJson (PatX schema (PatternText p ps)) =\n  jsonObj\n    [ (\"tag\", jsonStr \"interpolation\")\n    , (\"schema\", jsonStr schema)\n    , (\"strings\", jsonStrArr (p : ps))\n    ]\nexprToJson (PatX schema (PatternStruct sel)) =\n  jsonObj\n    [ (\"tag\", jsonStr \"pattern\")\n    , (\"schema\", jsonStr schema)\n    , (\"pattern\", selectorToJson sel)\n    ]\n\nselectorToJson :: Selector -> Text\nselectorToJson SelectorEnd = jsonObj [(\"type\", jsonStr \"end\")]\nselectorToJson (SelectorIdx t ts) =\n  jsonObj\n    [ (\"type\", jsonStr \"idx\")\n    , (\"selectors\", jsonArr [idxSel i s | (i, s) <- t : ts])\n    ]\n  where\n    idxSel i sub =\n      jsonObj\n        [ (\"index\", jsonInt i)\n        , (\"sub\", selectorToJson sub)\n        ]\nselectorToJson (SelectorKey t ts) =\n  jsonObj\n    [ (\"type\", jsonStr \"key\")\n    , (\"selectors\", jsonArr [keySel k s | (k, s) <- t : ts])\n    ]\n  where\n    keySel k sub =\n      jsonObj\n        [ (\"key\", jsonStr k)\n        , (\"sub\", selectorToJson sub)\n        ]\n\nlitSchemaStr :: LitType -> Text\nlitSchemaStr F32X = \"f4\"\nlitSchemaStr F64X = \"f8\"\nlitSchemaStr I8X = \"i1\"\nlitSchemaStr I16X = \"i2\"\nlitSchemaStr I32X = \"i4\"\nlitSchemaStr I64X = \"i8\"\nlitSchemaStr U8X = \"u1\"\nlitSchemaStr U16X = \"u2\"\nlitSchemaStr U32X = \"u4\"\nlitSchemaStr U64X = \"u8\"\nlitSchemaStr BoolX = \"b\"\nlitSchemaStr NullX = \"z\"\n\n-- ======================================================================\n-- Manifest builder\n-- ======================================================================\n\nbuildManifest ::\n  Config ->\n  LangRegistry ->\n  String ->\n  String ->\n  Int ->\n  [(Lang, Socket)] ->\n  [FData] ->\n  [GastData] ->\n  (Lang -> Int) ->\n  Map.Map Int Text ->\n  Map.Map Text [Text] ->\n  [Text] ->\n  [[Text]] ->\n  Text\nbuildManifest config registry programName buildDir buildTime daemonSets fdata gasts langToPool indexToGroup groupDescs moduleDoc moduleEpilogues =\n  jsonObj\n    [ (\"name\", jsonStr (MT.pack programName))\n    , (\"build\", buildJson)\n    , (\"pools\", jsonArr (map poolJson daemonSets))\n    , (\"commands\", jsonArr (map remoteCmdJson fdata ++ map pureCmdJson gasts))\n    , (\"groups\", jsonArr (map groupJson (Map.toList groupDescs)))\n    , (\"desc\", jsonStrArr moduleDoc)\n    , (\"epilogues\", jsonArr (map jsonStrArr moduleEpilogues))\n    , (\"metadata\", metadataEmpty)\n    ]\n  where\n    -- Compiler-sourced build metadata. Distinct from the top-level\n    -- user-sourced @metadata@ slot. Future additions (hash, host, user,\n    -- system, source_hash, ...) go directly in this object.\n    buildJson :: Text\n    buildJson =\n      jsonObj\n        [ (\"path\", jsonStr (MT.pack buildDir))\n        , (\"time\", jsonInt buildTime)\n        , (\"morloc_version\", jsonStr (MT.pack Morloc.Version.versionStr))\n        ]\n\n    poolJson :: (Lang, Socket) -> Text\n    poolJson (lang, _) =\n      jsonObj\n        [ (\"lang\", jsonStr (ML.showLangName lang))\n        , (\"exec\", jsonStrArr (map MT.pack (makeExecArgs lang)))\n        , (\"socket\", jsonStr (\"pipe-\" <> ML.showLangName lang))\n        , (\"metadata\", metadataEmpty)\n        ]\n\n    makeExecArgs :: Lang -> [String]\n    makeExecArgs lang =\n      let name = ML.langName lang\n          isCompiled = LR.registryIsCompiled registry name\n          runCmd = case Map.lookup name (MC.configLangOverrides config) of\n            Just cmd -> map MT.unpack cmd\n            Nothing -> map MT.unpack (LR.registryRunCommand registry name)\n          poolExe = buildDir </> \"pools\" </> programName </> ML.makeExecutablePoolName lang\n       in if isCompiled\n            then [poolExe]\n            else\n              if null runCmd\n                then [MT.unpack name, poolExe]\n                else runCmd ++ [poolExe]\n\n    groupJson :: (Text, [Text]) -> Text\n    groupJson (gname, desc) =\n      jsonObj\n        [ (\"name\", jsonStr gname)\n        , (\"desc\", jsonStrArr desc)\n        , (\"metadata\", metadataEmpty)\n        ]\n\n    -- Emit a real JSON null when the command has no group, not the\n    -- literal string \"null\". Consumers (notably Rust serde) treat the\n    -- two differently: a real null deserializes to None, while a\n    -- string \"null\" used to require a custom deserializer that has\n    -- since been dropped.\n    -- Look up by manifold ID rather than subcommand name, since the\n    -- subcommand may be renamed via --' name: docstrings.\n    cmdGroupField :: Int -> (Text, Text)\n    cmdGroupField mid = case Map.lookup mid indexToGroup of\n      Just gname -> (\"group\", jsonStr gname)\n      Nothing -> (\"group\", jsonNull)\n\n    remoteCmdJson :: FData -> Text\n    remoteCmdJson fd =\n      jsonObj\n        [ (\"name\", jsonStr (fdataSubcommand fd))\n        , (\"type\", jsonStr \"remote\")\n        , (\"mid\", jsonInt (fdataMid fd))\n        , (\"pool\", jsonInt (langToPool (socketLang (fdataSocket fd))))\n        , (\"needed_pools\", jsonArr (map (jsonInt . langToPool . socketLang) (fdataSubSockets fd)))\n        , (\"desc\", jsonStrArr (cmdDocDesc (fdataCmdDocSet fd)))\n        , (\"args\", argsJson (cmdDocArgs (fdataCmdDocSet fd)) (fdataArgSchemas fd))\n        , (\"return\", returnJson (fdataReturnSchema fd) (fdataType fd) (snd (cmdDocRet (fdataCmdDocSet fd))))\n        , (\"constraints\", jsonArr [])\n        , (\"metadata\", metadataEmpty)\n        , cmdGroupField (fdataMid fd)\n        ]\n\n    pureCmdJson :: GastData -> Text\n    pureCmdJson g =\n      jsonObj\n        [ (\"name\", jsonStr (commandName g))\n        , (\"type\", jsonStr \"pure\")\n        , (\"desc\", jsonStrArr (cmdDocDesc (commandDocs g)))\n        , (\"args\", argsJson (cmdDocArgs (commandDocs g)) (commandArgSchemas g))\n        , (\"return\", returnJson (commandReturnSchema g) (commandType g) (snd (cmdDocRet (commandDocs g))))\n        , (\"expr\", exprToJson (commandExpr g))\n        , (\"constraints\", jsonArr [])\n        , (\"metadata\", metadataEmpty)\n        , cmdGroupField (commandMid g)\n        ]\n\n    -- Render the @args@ JSON array. 'makeSchemas' produces one schema\n    -- per arg position in the original function signature, INCLUDING\n    -- flags. So 'fdataArgSchemas' is index-aligned 1:1 with 'docArgs'.\n    -- For each arg we attach the corresponding schema; flags drop\n    -- their schema in the JSON output (it's never used at dispatch\n    -- time for boolean flags) but we still consume the schema slot to\n    -- keep the index alignment intact for subsequent args.\n    argsJson :: [CmdArg] -> [Text] -> Text\n    argsJson docArgs schemas =\n      jsonArr (pairArgsWithSchemas docArgs schemas)\n      where\n        pairArgsWithSchemas :: [CmdArg] -> [Text] -> [Text]\n        pairArgsWithSchemas [] _ = []\n        -- Flags consume a schema slot but emit no `schema` field.\n        pairArgsWithSchemas (a@(CmdArgFlag _) : rest) (_ : ss) =\n          argToJson Nothing a : pairArgsWithSchemas rest ss\n        pairArgsWithSchemas (a : rest) (s : ss) =\n          argToJson (Just s) a : pairArgsWithSchemas rest ss\n        pairArgsWithSchemas (a : rest) [] =\n          -- Defensive: more args than schemas. Emit with no schema\n          -- so we fail cleanly downstream rather than silently\n          -- misaligning.\n          argToJson Nothing a : pairArgsWithSchemas rest []\n\n    -- Nested @return@ object replacing v1's flat @return_schema@ /\n    -- @return_type@ / @return_desc@. Also carries @constraints@ and\n    -- @metadata@ for symmetry with args.\n    returnJson :: Text -> Type -> [Text] -> Text\n    returnJson schema t desc =\n      let retT = stripThunks (returnTypeOnly t)\n      in jsonObj\n        [ (\"schema\", jsonStr schema)\n        , (\"type\", jsonStr (render (pretty retT)))\n        , (\"desc\", jsonStrArr desc)\n        , (\"constraints\", constraintsJsonFor retT)\n        , (\"metadata\", metadataEmpty)\n        ]\n\n    -- Extract the return type from a function type; pass other types\n    -- through unchanged.\n    returnTypeOnly :: Type -> Type\n    returnTypeOnly (FunT _ t) = t\n    returnTypeOnly t          = t\n\n    stripThunks :: Type -> Type\n    stripThunks (EffectT _ t') = stripThunks t'\n    stripThunks t' = t'\n\n-- ======================================================================\n-- Main entry point\n-- ======================================================================\n\ngenerate ::\n  [(AnnoS (Indexed Type) One (), CmdDocSet)] ->\n  [(AnnoS (Indexed Type) One (Indexed Lang), CmdDocSet)] ->\n  MorlocMonad Script\ngenerate cs rASTs = do\n  config <- MM.ask\n  st <- CMS.get\n\n  -- Extract data for remote commands\n  xs <- mapM makeFData rASTs\n  fdata <- CM.mapM getFData xs\n\n  -- Extract data for pure commands\n  gasts <- mapM annotateGasts cs\n\n  -- Get build time and compute build directory\n  buildTime <- liftIO $ floor <$> Time.getPOSIXTime\n  programName <- MM.getModuleName\n  buildDir <-\n    if stateInstall st\n      then do\n        let installDir = configHome config </> \"exe\" </> programName\n        CMS.modify (\\s -> s {stateInstallDir = Just installDir})\n        return installDir\n      else liftIO Dir.getCurrentDirectory\n\n  -- Build pool list (deduplicated by language)\n  let allSockets = concatMap (\\x -> fdataSocket x : fdataSubSockets x) fdata\n      daemonSets = uniqueFst [(socketLang s, s) | s <- allSockets]\n\n      langToPoolIndex :: Lang -> Int\n      langToPoolIndex lang =\n        case findIndex ((== lang) . fst) daemonSets of\n          Just idx -> idx\n          Nothing -> error $ \"Pool not found for language: \" <> show lang\n\n  -- Build manifest JSON with relative pool paths\n  outfileName <- MM.getOutfileName\n  registry <- MM.gets stateLangRegistry\n\n  -- Build group info for manifest\n  exportGroups <- MM.gets stateExportGroups\n  let indexToGroup =\n        Map.fromList\n          [ (idx, gname)\n          | (gname, (_, indices)) <- Map.toList exportGroups\n          , idx <- indices\n          ]\n      groupDescs =\n        Map.fromList\n          [ (gname, desc)\n          | (gname, (desc, _)) <- Map.toList exportGroups\n          ]\n\n  moduleDoc <- MM.gets stateModuleDoc\n  moduleEpilogues <- MM.gets stateModuleEpilogues\n\n  let manifestJson =\n        buildManifest\n          config\n          registry\n          programName\n          buildDir\n          buildTime\n          daemonSets\n          fdata\n          gasts\n          langToPoolIndex\n          indexToGroup\n          groupDescs\n          moduleDoc\n          moduleEpilogues\n      wrapperScript = makeWrapperScript manifestJson\n\n  return $\n    Script\n      { scriptBase = outfileName\n      , scriptLang = cLang\n      , scriptCode = \".\" :/ File outfileName (Code wrapperScript)\n      , scriptMake = [SysExe outfileName]\n      }\n\n-- Build a self-contained wrapper script with embedded manifest\nmakeWrapperScript :: Text -> Text\nmakeWrapperScript manifestJson =\n  \"#!/bin/sh\\nexec morloc-nexus \\\"$0\\\" \\\"$@\\\"\\n### MANIFEST ###\\n\" <> manifestJson\n\n-- ======================================================================\n-- Utilities\n-- ======================================================================\n\nuniqueFst :: (Eq a) => [(a, b)] -> [(a, b)]\nuniqueFst = f []\n  where\n    f _ [] = []\n    f seen (x@(a, _) : xs)\n      | a `elem` seen = f seen xs\n      | otherwise = x : f (a : seen) xs\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Parameterize.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Parameterize\nDescription : Propagate function arguments down through the AnnoS tree\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nThreads the top-level function parameters through the expression tree\nso that each manifold node knows which arguments it needs. This is\nnecessary because the tree may contain multiple alternative implementations\nthat share the same parameter interface.\n-}\nmodule Morloc.CodeGenerator.Parameterize\n  ( parameterize\n  ) where\n\nimport Data.Text (Text)\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Monad as MM\n\n{- | Add arguments that are required for each term. Unneeded arguments are\nremoved at each step.\n-}\nparameterize ::\n  AnnoS (Indexed Type) One (Indexed Lang) ->\n  MorlocMonad (AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]))\nparameterize (AnnoS m@(Idx _ (FunT inputs _)) c (LamS vs x)) = do\n  MM.sayVVV \"Entering parameterize LamS\"\n  ids <- MM.takeFromCounter (length inputs)\n  let args0 = fromJust $ safeZipWith Arg ids vs\n  x' <- parameterize' args0 x\n  return $ AnnoS m (c, args0) (LamS vs x')\nparameterize (AnnoS m@(Idx _ (FunT inputs _)) c@(Idx _ lang) (BndS v)) = do\n  MM.sayVVV $ \"Entering parameterize VarS function - \" <> pretty v <> \"@\" <> pretty lang\n  ids <- MM.takeFromCounter (length inputs)\n  let vs = map EV (freshVarsAZ [])\n      args0 = fromJust $ safeZipWith Arg ids vs\n  return $ AnnoS m (c, args0) (BndS v)\nparameterize x = do\n  MM.sayVVV \"Entering parameterize Other\"\n  parameterize' [] x\n\nparameterize' ::\n  [Arg EVar] -> -- arguments in parental scope (child needn't retain them)\n  AnnoS (Indexed Type) One (Indexed Lang) ->\n  MorlocMonad (AnnoS (Indexed Type) One (Indexed Lang, [Arg EVar]))\n-- primitives, no arguments are required for a primitive, so empty lists\nparameterize' _ (AnnoS g c UniS) = return $ AnnoS g (c, []) UniS\nparameterize' _ (AnnoS g c NullS) = return $ AnnoS g (c, []) NullS\nparameterize' _ (AnnoS g c (RealS x)) = return (AnnoS g (c, []) (RealS x))\nparameterize' _ (AnnoS g c (IntS x)) = return (AnnoS g (c, []) (IntS x))\nparameterize' _ (AnnoS g c (LogS x)) = return (AnnoS g (c, []) (LogS x))\nparameterize' _ (AnnoS g c (StrS x)) = return (AnnoS g (c, []) (StrS x))\nparameterize' args (AnnoS g c (BndS v)) = do\n  let args' = [r | r@(Arg _ v') <- args, v' == v]\n  return $ AnnoS g (c, args') (BndS v)\nparameterize' _ (AnnoS m c (ExeS (SrcCall src))) =\n  return $ AnnoS m (c, []) (ExeS (SrcCall src))\nparameterize' _ (AnnoS g c (ExeS (PatCall x))) =\n  return (AnnoS g (c, []) (ExeS (PatCall x)))\nparameterize' args (AnnoS g c (LstS xs)) = do\n  xs' <- mapM (parameterize' args) xs\n  let args' = pruneArgs args xs'\n  return $ AnnoS g (c, args') (LstS xs')\nparameterize' args (AnnoS g c (TupS xs)) = do\n  xs' <- mapM (parameterize' args) xs\n  let args' = pruneArgs args xs'\n  return $ AnnoS g (c, args') (TupS xs')\nparameterize' args (AnnoS g c (NamS entries)) = do\n  xs' <- mapM (parameterize' args . snd) entries\n  let args' = pruneArgs args xs'\n  return $ AnnoS g (c, args') (NamS (zip (map fst entries) xs'))\nparameterize' args (AnnoS g@(Idx _ (FunT inputs _)) c (LamS vs x)) = do\n  ids <- MM.takeFromCounter (length inputs)\n  let contextArgs = [r | r@(Arg _ v) <- args, v `notElem` vs] -- remove shadowed arguments\n      boundArgs = fromJust $ safeZipWith Arg ids vs\n  x' <- parameterize' (contextArgs <> boundArgs) x\n  let contextArgs' = pruneArgs contextArgs [x']\n  return $ AnnoS g (c, contextArgs' <> boundArgs) (LamS vs x')\n-- LamS MUST have a functional type, deviations would have been caught by the typechecker\nparameterize' _ (AnnoS _ _ (LamS _ _)) = error \"impossible\"\nparameterize' args (AnnoS g c (AppS x xs)) = do\n  x' <- parameterize' args x\n  xs' <- mapM (parameterize' args) xs\n  let args' = pruneArgs args (x' : xs')\n  return $ AnnoS g (c, args') (AppS x' xs')\nparameterize' args (AnnoS g c (LetBndS v)) = do\n  let args' = [r | r@(Arg _ v') <- args, v' == v]\n  return $ AnnoS g (c, args') (LetBndS v)\nparameterize' args (AnnoS g c (LetS v e1 e2)) = do\n  e1' <- parameterize' args e1\n  idx <- MM.getCounter\n  let letArg = Arg idx v\n      bodyArgs = letArg : [r | r@(Arg _ v') <- args, v' /= v]\n  e2' <- parameterize' bodyArgs e2\n  let args' = pruneArgs args [e1', e2']\n  return $ AnnoS g (c, args') (LetS v e1' e2')\nparameterize' args (AnnoS g c (IfS cond thenE elseE)) = do\n  cond' <- parameterize' args cond\n  thenE' <- parameterize' args thenE\n  elseE' <- parameterize' args elseE\n  let args' = pruneArgs args [cond', thenE', elseE']\n  return $ AnnoS g (c, args') (IfS cond' thenE' elseE')\nparameterize' args (AnnoS g c (DoBlockS e)) = do\n  e' <- parameterize' args e\n  let args' = pruneArgs args [e']\n  return $ AnnoS g (c, args') (DoBlockS e')\nparameterize' args (AnnoS g c (EvalS e)) = do\n  e' <- parameterize' args e\n  let args' = pruneArgs args [e']\n  return $ AnnoS g (c, args') (EvalS e')\nparameterize' args (AnnoS g c (CoerceS co e)) = do\n  e' <- parameterize' args e\n  let args' = pruneArgs args [e']\n  return $ AnnoS g (c, args') (CoerceS co e')\nparameterize' args (AnnoS g c (IntrinsicS intr es)) = do\n  es' <- mapM (parameterize' args) es\n  let args' = pruneArgs args es'\n  return $ AnnoS g (c, args') (IntrinsicS intr es')\nparameterize' _ (AnnoS g c (CallS v)) = do\n  return $ AnnoS g (c, []) (CallS v)\nparameterize' _ (AnnoS _ _ (VarS _ _)) = undefined\n\npruneArgs :: [Arg a] -> [AnnoS c One (g, [Arg a])] -> [Arg a]\npruneArgs args xs =\n  let usedArgs = unique $ concatMap (map ann . sannoSnd) xs\n   in [r | r@(Arg i _) <- args, i `elem` usedArgs]\n\nsannoSnd :: AnnoS g One (a, b) -> b\nsannoSnd (AnnoS _ (_, x) _) = x\n\n-- generate infinite list of fresh variables of form\n-- ['a','b',...,'z','aa','ab',...,'zz',...]\nfreshVarsAZ ::\n  [Text] -> -- variables to exclude\n  [Text]\nfreshVarsAZ exclude =\n  filter\n    (`notElem` exclude)\n    ([1 ..] >>= flip replicateM ['a' .. 'z'] |>> MT.pack)\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Realize.hs",
    "content": "{-# LANGUAGE CPP #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Realize\nDescription : Select concrete implementations for each polymorphic call site\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nWhen a term has multiple candidate implementations (different languages,\ndifferent source files), this pass selects the best one at each call site\nbased on language affinity and minimizing cross-language transitions. The\nresult is a fully-realized tree where every node has exactly one\nimplementation.\n-}\nmodule Morloc.CodeGenerator.Realize\n  ( realityCheck\n  , removeVarS\n  ) where\n\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.CodeGenerator.SystemConfig as MCS\nimport Morloc.Data.Doc\nimport Morloc.Data.Map (Map)\nimport qualified Morloc.Data.Map as Map\nimport qualified Data.Set as Set\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.TypeEval as TE\n\nrealityCheck ::\n  -- | one AST forest for each command exported from main\n  [AnnoS (Indexed Type) Many Int] ->\n  MorlocMonad\n    ( [AnnoS (Indexed Type) One ()]\n    , [AnnoS (Indexed Type) One (Indexed Lang)]\n    )\nrealityCheck es = do\n  -- translate modules into bitrees\n  (gASTs0, rASTs0) <-\n    -- select a single instance at each node in the tree\n    mapM realize es\n      -- separate unrealized (general) ASTs (uASTs) from realized ASTs (rASTs)\n      |>> partitionEithers\n\n  -- Extract non-exported recursive helpers into their own rASTs.\n  -- This must happen before removeVarS so we can find the VarS wrappers.\n  rASTs1 <- extractRecursiveHelpers rASTs0\n\n  -- Now dissolve remaining (non-recursive) VarS wrappers\n  let gASTs = map removeVarS gASTs0\n      rASTs = map removeVarS rASTs1\n\n  -- check and configure the system\n  -- in the future, the results of this step may be used to winnow the build\n  MCS.configure rASTs\n\n  return (gASTs, rASTs)\n\n-- State for the realize scoring algorithm\ndata RState = RState\n  { rLangs :: [Lang]\n  , rApplied :: [AnnoS (Indexed Type) Many Int]\n  , rBndVars :: Map EVar (AnnoS (Indexed Type) Many Int)\n  }\n\nemptyRState =\n  RState\n    { rLangs = []\n    , rApplied = []\n    , rBndVars = Map.empty\n    }\n\n{- | Choose a single concrete implementation. In the future, this component\nmay be one of the more complex components of the morloc compiler. It will\nprobably need to be implemented using an optimizing SMT solver. It will\nalso need benchmarking data from all the implementations and possibly\nstatistical info describing inputs.\n-}\nrealize ::\n  AnnoS (Indexed Type) Many Int ->\n  MorlocMonad\n    ( Either\n        (AnnoS (Indexed Type) One ())\n        (AnnoS (Indexed Type) One (Indexed Lang))\n    )\nrealize s0 = do\n  registry <- MM.gets stateLangRegistry\n  realizeWithRegistry registry s0\n\nrealizeWithRegistry ::\n  LangRegistry ->\n  AnnoS (Indexed Type) Many Int ->\n  MorlocMonad\n    ( Either\n        (AnnoS (Indexed Type) One ())\n        (AnnoS (Indexed Type) One (Indexed Lang))\n    )\nrealizeWithRegistry registry s0 = do\n  e@(AnnoS _ li _) <- scoreAnnoS emptyRState s0 >>= collapseAnnoS Nothing\n  case li of\n    (Idx _ Nothing) -> makeGAST e |>> Left\n    (Idx _ _) -> propagateDown e |>> Right\n  where\n    pairwiseCost :: Lang -> Lang -> Int\n    pairwiseCost l1 l2\n      | l1 == l2 = case Map.lookup (langName l2) (lrSameLangCosts registry) of\n          Nothing -> lrDefaultSameCost registry\n          (Just score) -> score\n      | otherwise = case Map.lookup (langName l1, langName l2) (lrOptimizedPairs registry) of\n          Nothing -> case Map.lookup (langName l2) (lrCrossLangCosts registry) of\n            Nothing -> lrDefaultCrossCost registry\n            (Just score) -> score\n          (Just score) -> score\n\n    languageCost :: Lang -> Int\n    languageCost lang = pairwiseCost lang lang\n\n    -- \\| Depth first pass calculating scores for each language. Alternates with\n    -- scoresSExpr.\n    scoreAnnoS ::\n      RState ->\n      AnnoS (Indexed Type) Many Int ->\n      MorlocMonad (AnnoS (Indexed Type) Many (Indexed [(Lang, Int)]))\n    scoreAnnoS rstat (AnnoS gi ci e) = do\n      (e', ci') <- scoreExpr rstat (e, ci)\n      return $ AnnoS gi ci' e'\n\n    -- \\| Alternates with scoresAnnoS, finds the best score for each language at\n    -- application nodes.\n    scoreExpr ::\n      RState ->\n      (ExprS (Indexed Type) Many Int, Int) ->\n      MorlocMonad (ExprS (Indexed Type) Many (Indexed [(Lang, Int)]), Indexed [(Lang, Int)])\n    scoreExpr rstat (LstS xs, i) = do\n      (xs', best) <- scoreMany rstat xs\n      return (LstS xs', Idx i best)\n    scoreExpr rstat (TupS xs, i) = do\n      (xs', best) <- scoreMany rstat xs\n      return (TupS xs', Idx i best)\n    scoreExpr rstat (NamS rs, i) = do\n      (xs, best) <- scoreMany rstat (map snd rs)\n      return (NamS (zip (map fst rs) xs), Idx i best)\n    scoreExpr rstat (LamS vs x, i) = do\n      x' <- scoreAnnoS (updateRState vs rstat) x\n      return (LamS vs x', Idx i (scoresOf x'))\n    scoreExpr rstat (AppS f xs, i) = do\n      -- store all applied arguments\n      -- these may be bound to lambdas within f\n      -- they are required for resolving the application language\n      let rstat' = rstat {rLangs = [], rApplied = xs}\n\n      f' <- scoreAnnoS rstat' f\n\n      -- best scores for each language for f\n      let scores = scoresOf f'\n          rstat'' = emptyRState {rLangs = unique $ map fst scores}\n\n      xs' <- mapM (scoreAnnoS rstat'') xs\n\n      -- [[(Lang, Int)]] : where Lang is unique within each list and Int is minimized\n      let pairss = [minPairs pairs | AnnoS _ (Idx _ pairs) _ <- xs']\n      let best = scoreApp scores pairss\n\n      return (AppS f' xs', Idx i best)\n    -- non-recursive expressions\n    scoreExpr rstat (UniS, i) = return (UniS, zipLang i rstat)\n    scoreExpr rstat (NullS, i) = return (NullS, zipLang i rstat)\n    scoreExpr rstat (VarS v (Many xs), i) = do\n      (xs', best) <- scoreMany rstat xs\n      return (VarS v (Many xs'), Idx i best)\n    scoreExpr rstat (BndS v, i) = do\n      case Map.lookup v (rBndVars rstat) of\n        (Just e@(AnnoS (Idx _ (FunT _ _)) _ _)) -> do\n          scores <- scoreAnnoS rstat e |>> scoresOf\n          return (BndS v, Idx i scores)\n        _ -> return (BndS v, zipLang i rstat)\n    scoreExpr _ (ExeS x@(SrcCall src), i) = return (ExeS x, Idx i [(srcLang src, callCost src)])\n    scoreExpr rstat (ExeS x@(PatCall _), i) = return (ExeS x, zipLang i rstat)\n    scoreExpr rstat (RealS x, i) = return (RealS x, zipLang i rstat)\n    scoreExpr rstat (IntS x, i) = return (IntS x, zipLang i rstat)\n    scoreExpr rstat (LogS x, i) = return (LogS x, zipLang i rstat)\n    scoreExpr rstat (StrS x, i) = return (StrS x, zipLang i rstat)\n    scoreExpr rstat (LetS v e1 e2, i) = do\n      e1' <- scoreAnnoS rstat e1\n      e2' <- scoreAnnoS rstat e2\n      -- include RHS scores so unused let bindings (e.g. from do-block bare\n      -- statements) still propagate their language requirement\n      let best = minPairs (scoresOf e1' ++ scoresOf e2')\n      return (LetS v e1' e2', Idx i best)\n    scoreExpr rstat (LetBndS v, i) = return (LetBndS v, zipLang i rstat)\n    scoreExpr rstat (CallS v, i) = return (CallS v, zipLang i rstat)\n    scoreExpr rstat (IfS c t e, i) = do\n      c' <- scoreAnnoS rstat c\n      t' <- scoreAnnoS rstat t\n      e' <- scoreAnnoS rstat e\n      let best = minPairs (scoresOf c' ++ scoresOf t' ++ scoresOf e')\n      return (IfS c' t' e', Idx i best)\n    scoreExpr rstat (DoBlockS x, i) = do\n      x' <- scoreAnnoS rstat x\n      return (DoBlockS x', Idx i (scoresOf x'))\n    scoreExpr rstat (EvalS x, i) = do\n      x' <- scoreAnnoS rstat x\n      return (EvalS x', Idx i (scoresOf x'))\n    scoreExpr rstat (CoerceS c x, i) = do\n      x' <- scoreAnnoS rstat x\n      return (CoerceS c x', Idx i (scoresOf x'))\n    scoreExpr rstat (IntrinsicS intr xs, i) = do\n      xs' <- mapM (scoreAnnoS rstat) xs\n      let Idx _ langScores = zipLang i rstat\n          best = case xs' of\n            [] -> langScores\n            _ -> minPairs (concatMap scoresOf xs')\n      return (IntrinsicS intr xs', Idx i best)\n\n    -- calculate the score for an application based on the score of the function\n    -- and the scores of the arguments\n    scoreApp ::\n      [ ( Lang -- the language of the ith calling function implementation\n        , Int -- the score of the ith implementation\n        )\n      ] ->\n      [ [ ( Lang -- the language of the jth implementation of the kth argument\n          , Int -- the score of the jth implementation of the kth argument\n          )\n        ]\n      ] ->\n      [(Lang, Int)]\n    -- if nothing is known, nothing is returned\n    scoreApp [] (concat -> []) = []\n    -- if none of the arguments are language-specific, the scores are based only\n    -- on the functions\n    scoreApp scores (concat -> []) = scores\n    -- if the function is not language-specific, calculate the cost of calling\n    -- all arguments from each possible language context\n    scoreApp [] pairss =\n      let score = [(lang, 0) | lang <- unique $ map fst (concat pairss)]\n       in scoreApp score pairss\n    -- if arguments and function have implementations, calculate cost relative to\n    -- each function implementation\n    scoreApp scores pairss =\n      [ ( l1\n        , s1\n            + sum\n              [ minimumDef 999999999 [s2 + pairwiseCost l1 l2 | (l2, s2) <- pairs]\n              | pairs <- pairss\n              ]\n        )\n      | (l1, s1) <- scores\n      ]\n\n    updateRState :: [EVar] -> RState -> RState\n    updateRState [] rstat = rstat\n    updateRState _ rstat@(RState _ [] _) = rstat\n    updateRState (v : vs) rstat@(RState _ (p : ps) bound) =\n      updateRState vs $\n        rstat {rApplied = ps, rBndVars = Map.insert v p bound}\n\n    zipLang :: Int -> RState -> Indexed [(Lang, Int)]\n    zipLang i (rLangs -> langs) = Idx i (zip langs (repeat 0))\n\n    scoresOf :: AnnoS a Many (Indexed [(Lang, Int)]) -> [(Lang, Int)]\n    scoresOf (AnnoS _ (Idx _ xs) _) = minPairs xs\n\n    -- find the scores of all implementations from all possible language contexts\n    scoreMany ::\n      RState ->\n      [AnnoS (Indexed Type) Many Int] ->\n      MorlocMonad ([AnnoS (Indexed Type) Many (Indexed [(Lang, Int)])], [(Lang, Int)])\n    scoreMany rstat xs0 = do\n      xs1 <- mapM (scoreAnnoS rstat) xs0\n      return (xs1, scoreMany' xs1)\n      where\n        scoreMany' :: [AnnoS (Indexed Type) Many (Indexed [(Lang, Int)])] -> [(Lang, Int)]\n        scoreMany' xs =\n          let pairss = [(minPairs . concat) [xs' | (AnnoS _ (Idx _ xs') _) <- xs]]\n              langs' = unique (rLangs rstat <> concatMap (map fst) pairss)\n           in -- Got 10 billion nodes in your AST? I didn't think so, so don't say my sentinal's ugly.\n              [ ( l1\n                , sum\n                    [ minimumDef\n                      999999999\n                      [ score + pairwiseCost l1 l2\n                      | (l2, score) <- pairs\n                      ]\n                    | pairs <- pairss\n                    ]\n                )\n              | l1 <- langs'\n              ]\n\n    collapseAnnoS ::\n      Maybe Lang ->\n      AnnoS (Indexed Type) Many (Indexed [(Lang, Int)]) ->\n      MorlocMonad (AnnoS (Indexed Type) One (Indexed (Maybe Lang)))\n    collapseAnnoS l1 (AnnoS gi@(Idx _ gt) ci e) = do\n      (e', ci') <- collapseExpr gt l1 (e, ci)\n      return (AnnoS gi ci' e')\n\n    -- The biased cost adds a slight penalty to changing language.\n    -- This penalty is unrelated to the often large penalty of foreign calls.\n    -- Rather, the purpose is just to distinguish VarS terms. It is totally\n    -- kludgy, a better recursion scheme is needed here.\n    biasedCost :: Maybe Lang -> (Lang, Int) -> Int\n    biasedCost l1 (l2, s)\n      | l1 == Just l2 = cost l1 l2 s\n      | otherwise = 1 + cost l1 l2 s\n\n    cost ::\n      Maybe Lang -> -- parent language (if given)\n      Lang -> -- child lang (should always be given if we are working from scored pairs)\n      Int -> -- score\n      Int\n    cost (Just l1) l2 score = score + pairwiseCost l1 l2\n    cost _ _ score = score\n\n    -- FIXME: in the future, this function should be replaced by an estimate of\n    -- the function runtime, for now I will just base it off languages.\n    callCost :: Source -> Int\n    callCost src = languageCost (srcLang src)\n\n    collapseExpr ::\n      Type ->\n      Maybe Lang -> -- the language of the parent expression (if Nothing, then this is a GAST)\n      (ExprS (Indexed Type) Many (Indexed [(Lang, Int)]), Indexed [(Lang, Int)]) ->\n      MorlocMonad (ExprS (Indexed Type) One (Indexed (Maybe Lang)), Indexed (Maybe Lang))\n\n    collapseExpr _ _ (VarS v (Many []), Idx i _) =\n      MM.throwSourcedError i $ \"No implementation found for\" <+> squotes (pretty v)\n    -- Select one implementation for the given term\n    collapseExpr gt l1 (VarS v (Many xs), Idx i _) = do\n      let minXs = minsBy (\\(AnnoS _ (Idx _ ss) _) -> minimumMay [cost l1 l2 s | (l2, s) <- ss]) xs\n      (x, lang) <- case minXs of\n        [] -> MM.throwSourcedError i $ \"No implementation found for\" <+> squotes (pretty v)\n        [x] -> handleOne x\n        choices -> mapM handleOne choices >>= handleMany gt\n      return (VarS v (One x), Idx i lang)\n      where\n        handleOne ::\n          AnnoS (Indexed Type) Many (Indexed [(Lang, Int)]) ->\n          MorlocMonad (AnnoS (Indexed Type) One (Indexed (Maybe Lang)), Maybe Lang)\n        handleOne x@(AnnoS _ (Idx _ ss) e) = do\n          let newLang =\n                if isFunctionalData e\n                  then l1\n                  else fmap fst (minBy (biasedCost l1) ss)\n          x' <- collapseAnnoS newLang x\n          return (x', newLang)\n\n        handleMany ::\n          Type ->\n          [(AnnoS (Indexed Type) One (Indexed (Maybe Lang)), Maybe Lang)] ->\n          MorlocMonad (AnnoS (Indexed Type) One (Indexed (Maybe Lang)), Maybe Lang)\n        handleMany gt' xs' =\n          -- Match candidates by head type constructor, then fall back to\n          -- alias reduction. This handles type aliases (e.g. Deque = List)\n          -- by first looking for an exact head match, then reducing the\n          -- expected type one level and searching again.\n          case [x | x@(AnnoS (Idx _ t) _ _, _) <- xs', sameTypeHead gt' t] of\n            [] -> do\n              gscope <- MM.getGeneralScope i\n              case TE.reduceType gscope (type2typeu gt') of\n                (Just gt'') -> handleMany (typeOf gt'') xs'\n                Nothing ->\n                  case xs' of\n                    -- All candidates have the same head: they're duplicates from\n                    -- different imports (e.g., mempty = [] from both root-py and root-cpp).\n                    (x'@(AnnoS (Idx _ t0) _ _, _) : rest)\n                      | all (\\(AnnoS (Idx _ t) _ _, _) -> sameTypeHead t0 t) rest -> return x'\n                    _ ->\n                      MM.throwSourcedError i $\n                        \"I couldn't find implementation for\" <+> squotes (pretty v) <+> \"gt' = \" <+> pretty gt'\n            [x'] -> return x'\n            (x' : _) -> return x'\n\n        -- Compare types by their head constructor, ignoring parameters.\n        -- This handles candidates with unresolved type variables (UnkT).\n        sameTypeHead :: Type -> Type -> Bool\n        sameTypeHead (AppT (VarT v1) _) (AppT (VarT v2) _) = v1 == v2\n        sameTypeHead (VarT v1) (VarT v2) = v1 == v2\n        sameTypeHead (FunT _ r1) (FunT _ r2) = sameTypeHead r1 r2\n        sameTypeHead t1 t2 = t1 == t2\n\n    ----- NOTE: Some cases are inseperable, the code above does not\n    ----- account for this, which may allow incorrect code to be\n    ----- generated.\n    -- xs' ->  MM.throwSystemError\n    --   $ \"no rule to separate the following sourced functions of type\" <+> parens (pretty gt)\":\\n\"\n    --   <> indent 2 (vsep [ \"*\" <+> pretty t <+> \":\" <+> pretty y | y@(AnnoS (Idx _ t) _ _, _)  <- xs'])\n\n    -- Propagate downwards\n    collapseExpr _ l1 (LamS vs x, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      x' <- collapseAnnoS lang x\n      return (LamS vs x', Idx i lang)\n    collapseExpr _ l1 (AppS f xs, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      f' <- collapseAnnoS lang f\n      xs' <- mapM (collapseAnnoS lang) xs\n      return (AppS f' xs', Idx i lang)\n    -- Propagate data\n    collapseExpr _ l1 (e@(LstS xs), Idx i ss) = do\n      lang <- if isFunctionalData e then return l1 else chooseLanguage l1 ss\n      xs' <- mapM (collapseAnnoS lang) xs\n      return (LstS xs', Idx i lang)\n    collapseExpr _ l1 (e@(TupS xs), Idx i ss) = do\n      lang <- if isFunctionalData e then return l1 else chooseLanguage l1 ss\n      xs' <- mapM (collapseAnnoS lang) xs\n      return (TupS xs', Idx i lang)\n    collapseExpr _ l1 (e@(NamS rs), Idx i ss) = do\n      lang <- if isFunctionalData e then return l1 else chooseLanguage l1 ss\n      xs' <- mapM (collapseAnnoS lang . snd) rs\n      return (NamS (zip (map fst rs) xs'), Idx i lang)\n    -- collapse leaf expressions\n    collapseExpr _ _ (ExeS x@(SrcCall src), Idx i _) = return (ExeS x, Idx i (Just (srcLang src)))\n    collapseExpr _ lang (ExeS x@(PatCall _), Idx i _) = return (ExeS x, Idx i lang)\n    collapseExpr _ lang (BndS v, Idx i _) = return (BndS v, Idx i lang)\n    collapseExpr _ lang (UniS, Idx i _) = return (UniS, Idx i lang)\n    collapseExpr _ lang (NullS, Idx i _) = return (NullS, Idx i lang)\n    collapseExpr _ lang (RealS x, Idx i _) = return (RealS x, Idx i lang)\n    collapseExpr _ lang (IntS x, Idx i _) = return (IntS x, Idx i lang)\n    collapseExpr _ lang (LogS x, Idx i _) = return (LogS x, Idx i lang)\n    collapseExpr _ lang (StrS x, Idx i _) = return (StrS x, Idx i lang)\n    collapseExpr _ l1 (LetS v e1 e2, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      e1' <- collapseAnnoS lang e1\n      e2' <- collapseAnnoS lang e2\n      return (LetS v e1' e2', Idx i lang)\n    collapseExpr _ lang (LetBndS v, Idx i _) = return (LetBndS v, Idx i lang)\n    collapseExpr _ lang (CallS v, Idx i _) = return (CallS v, Idx i lang)\n    collapseExpr _ l1 (IfS c t e, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      c' <- collapseAnnoS lang c\n      t' <- collapseAnnoS lang t\n      e' <- collapseAnnoS lang e\n      return (IfS c' t' e', Idx i lang)\n    collapseExpr _ l1 (DoBlockS x, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      x' <- collapseAnnoS lang x\n      return (DoBlockS x', Idx i lang)\n    collapseExpr _ l1 (EvalS x, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      x' <- collapseAnnoS lang x\n      return (EvalS x', Idx i lang)\n    collapseExpr _ l1 (CoerceS c x, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      x' <- collapseAnnoS lang x\n      return (CoerceS c x', Idx i lang)\n    collapseExpr _ l1 (IntrinsicS intr xs, Idx i ss) = do\n      lang <- chooseLanguage l1 ss\n      xs' <- mapM (collapseAnnoS lang) xs\n      return (IntrinsicS intr xs', Idx i lang)\n\n    chooseLanguage :: Maybe Lang -> [(Lang, Int)] -> MorlocMonad (Maybe Lang)\n    chooseLanguage l1 ss = do\n      case minBy snd [(l2, cost l1 l2 s2) | (l2, s2) <- ss] of\n        Nothing -> return Nothing\n        (Just (l3, _)) -> return (Just l3)\n\n    minBy :: (Ord b) => (a -> b) -> [a] -> Maybe a\n    minBy _ [] = Nothing\n    minBy _ [x] = Just x\n    minBy f (x1 : rs) = case minBy f rs of\n      Nothing -> Just x1\n      (Just x2) -> if f x1 <= f x2 then Just x1 else Just x2\n\n    minsBy :: (Ord b) => (a -> b) -> [a] -> [a]\n    minsBy _ [] = []\n    minsBy f (x : xs) = snd $ minsBy' (f x, [x]) xs\n      where\n        minsBy' (best, grp) [] = (best, grp)\n        minsBy' (best, grp) (y : ys) = minsBy' (newSet (f y)) ys\n          where\n            newSet newScore\n              | newScore == best = (best, y : grp)\n              | newScore < best = (newScore, [y])\n              | otherwise = (best, grp)\n\n    -- find the lowest cost function for each key\n    -- the groupSort function will never yield an empty value for vs, so `minimum` is safe\n    minPairs :: (Ord a, Ord b) => [(a, b)] -> [(a, b)]\n    minPairs = map (second minimum) . groupSort\n\n    propagateDown ::\n      AnnoS (Indexed Type) One (Indexed (Maybe Lang)) ->\n      MorlocMonad (AnnoS (Indexed Type) One (Indexed Lang))\n    propagateDown (AnnoS _ (Idx i Nothing) _) =\n      MM.throwSourcedError i $ \"Compiler bug: (__FILE__:__LINE__) - Unexpected Nothing\"\n    propagateDown e@(AnnoS _ (Idx _ (Just lang0)) _) = f lang0 e\n      where\n        f ::\n          Lang ->\n          AnnoS (Indexed Type) One (Indexed (Maybe Lang)) ->\n          MorlocMonad (AnnoS (Indexed Type) One (Indexed Lang))\n        f lang (AnnoS g (Idx i Nothing) e') = f lang (AnnoS g (Idx i (Just lang)) e')\n        f _ (AnnoS g (Idx i (Just lang)) e') = do\n          e'' <- case e' of\n            (AppS x xs) -> AppS <$> f lang x <*> mapM (f lang) xs\n            (LamS vs x) -> LamS vs <$> f lang x\n            (LstS xs) -> LstS <$> mapM (f lang) xs\n            (TupS xs) -> TupS <$> mapM (f lang) xs\n            (NamS rs) -> NamS <$> (zip (map fst rs) <$> mapM (f lang . snd) rs)\n            UniS -> return UniS\n            NullS -> return NullS\n            (VarS v (One x)) -> VarS v . One <$> f lang x\n            (BndS v) -> return (BndS v)\n            (RealS x) -> return (RealS x)\n            (IntS x) -> return (IntS x)\n            (LogS x) -> return (LogS x)\n            (StrS x) -> return (StrS x)\n            (ExeS x) -> return (ExeS x)\n            (LetS v e1 e2) -> LetS v <$> f lang e1 <*> f lang e2\n            (LetBndS v) -> return (LetBndS v)\n            (CallS v) -> return (CallS v)\n            (IfS c t elseE) -> IfS <$> f lang c <*> f lang t <*> f lang elseE\n            (DoBlockS x) -> DoBlockS <$> f lang x\n            (EvalS x) -> EvalS <$> f lang x\n            (CoerceS c x) -> CoerceS c <$> f lang x\n            (IntrinsicS intr xs) -> IntrinsicS intr <$> mapM (f lang) xs\n          return (AnnoS g (Idx i lang) e'')\n\n{- | This function is called on trees that contain no language-specific\ncomponents.  \"GAST\" refers to General Abstract Syntax Tree. The most common\nGAST case, and the only one that is currently supported, is a expression\nthat merely rearranges data structures without calling any functions. Here\nare a few examples:\n\n Constant values and containters (currently supported):\n f1 = 5\n f2 = [1,2,3]\n\n Variable values and containers (coming soon):\n f3 x = x\n\n f4 x = [1,2,x]\n\n Combinations of transformations on containers (possible, but not coming soon):\n f5 :: forall a b . (a, b) -> (b, a)\n f6 (x,y) = (y,x)\n\nThe idea could be elaborated into a full-fledged language.\n-}\nmakeGAST ::\n  AnnoS (Indexed Type) One (Indexed (Maybe Lang)) -> MorlocMonad (AnnoS (Indexed Type) One ())\nmakeGAST = mapAnnoSCM (\\(Idx _ _) -> return ())\n\nremoveVarS :: AnnoS g One c -> AnnoS g One c\nremoveVarS (AnnoS g1 _ (VarS _ (One (AnnoS _ c2 x)))) = removeVarS (AnnoS g1 c2 x)\nremoveVarS (AnnoS g c (AppS x xs)) = AnnoS g c (AppS (removeVarS x) (map removeVarS xs))\nremoveVarS (AnnoS g c (LamS vs x)) = AnnoS g c (LamS vs (removeVarS x))\nremoveVarS (AnnoS g c (LstS xs)) = AnnoS g c (LstS (map removeVarS xs))\nremoveVarS (AnnoS g c (TupS xs)) = AnnoS g c (TupS (map removeVarS xs))\nremoveVarS (AnnoS g c (NamS rs)) = AnnoS g c (NamS (map (second removeVarS) rs))\nremoveVarS (AnnoS g c (LetS v e1 e2)) = AnnoS g c (LetS v (removeVarS e1) (removeVarS e2))\nremoveVarS (AnnoS g c (IfS cond thenE elseE)) = AnnoS g c (IfS (removeVarS cond) (removeVarS thenE) (removeVarS elseE))\nremoveVarS (AnnoS g c (DoBlockS e)) = AnnoS g c (DoBlockS (removeVarS e))\nremoveVarS (AnnoS g c (EvalS e)) = AnnoS g c (EvalS (removeVarS e))\nremoveVarS (AnnoS g c (CoerceS co e)) = AnnoS g c (CoerceS co (removeVarS e))\nremoveVarS (AnnoS g c (IntrinsicS intr es)) = AnnoS g c (IntrinsicS intr (map removeVarS es))\nremoveVarS x = x\n\n-- | Extract non-exported recursive helpers from rASTs into their own top-level\n-- rASTs. A recursive helper is a VarS node whose body contains a CallS\n-- back-edge to its own name. These must become separate manifolds so the\n-- generated code can call them recursively. This runs before removeVarS.\nextractRecursiveHelpers ::\n  [AnnoS (Indexed Type) One (Indexed Lang)] ->\n  MorlocMonad [AnnoS (Indexed Type) One (Indexed Lang)]\nextractRecursiveHelpers rASTs = do\n  exports <- MM.gets stateExports\n  let exportSet = Set.fromList exports\n  results <- mapM (extractFromTree exportSet) rASTs\n  let (modified, helperLists) = unzip results\n      helpers = concat helperLists\n  -- Register extracted helpers in stateName if not already present (they\n  -- should be from the Link phase, but ensure it for safety).\n  nameMap <- MM.gets stateName\n  mapM_ (\\(AnnoS (Idx midx _) _ _) ->\n    case Map.lookup midx nameMap of\n      Just _ -> return ()\n      Nothing -> MM.sayVVV $ \"Warning: recursive helper manifold\" <+> pretty midx <+> \"not in stateName\"\n    ) helpers\n  return (modified ++ helpers)\n\n-- | Walk an rAST and extract recursive VarS nodes into separate rASTs.\n-- Returns the modified tree and the list of extracted helper rASTs.\nextractFromTree ::\n  Set.Set Int ->\n  AnnoS (Indexed Type) One (Indexed Lang) ->\n  MorlocMonad (AnnoS (Indexed Type) One (Indexed Lang), [AnnoS (Indexed Type) One (Indexed Lang)])\nextractFromTree exports (AnnoS g c e) = do\n  (e', helpers) <- extractExpr exports e\n  return (AnnoS g c e', helpers)\n\nextractExpr ::\n  Set.Set Int ->\n  ExprS (Indexed Type) One (Indexed Lang) ->\n  MorlocMonad (ExprS (Indexed Type) One (Indexed Lang), [AnnoS (Indexed Type) One (Indexed Lang)])\nextractExpr exports (VarS v (One child@(AnnoS (Idx midx _) _ _)))\n  -- Only extract if the function is recursive AND not already an export\n  -- (exports already have their own manifolds)\n  | not (Set.member midx exports) && containsCallS v child = do\n      -- Recursively extract from the child's body first\n      (child', innerHelpers) <- extractFromTree exports child\n      return (CallS v, child' : innerHelpers)\nextractExpr exports (VarS v (One child)) = do\n  (child', helpers) <- extractFromTree exports child\n  return (VarS v (One child'), helpers)\nextractExpr exports (AppS f xs) = do\n  (f', fHelpers) <- extractFromTree exports f\n  results <- mapM (extractFromTree exports) xs\n  let (xs', xHelperLists) = unzip results\n  return (AppS f' xs', fHelpers ++ concat xHelperLists)\nextractExpr exports (LamS vs e) = do\n  (e', helpers) <- extractFromTree exports e\n  return (LamS vs e', helpers)\nextractExpr exports (LstS xs) = do\n  results <- mapM (extractFromTree exports) xs\n  let (xs', helperLists) = unzip results\n  return (LstS xs', concat helperLists)\nextractExpr exports (TupS xs) = do\n  results <- mapM (extractFromTree exports) xs\n  let (xs', helperLists) = unzip results\n  return (TupS xs', concat helperLists)\nextractExpr exports (NamS rs) = do\n  results <- mapM (extractFromTree exports . snd) rs\n  let (vals', helperLists) = unzip results\n  return (NamS (zip (map fst rs) vals'), concat helperLists)\nextractExpr exports (LetS v e1 e2) = do\n  (e1', h1) <- extractFromTree exports e1\n  (e2', h2) <- extractFromTree exports e2\n  return (LetS v e1' e2', h1 ++ h2)\nextractExpr exports (IfS c t e) = do\n  (c', h1) <- extractFromTree exports c\n  (t', h2) <- extractFromTree exports t\n  (e', h3) <- extractFromTree exports e\n  return (IfS c' t' e', h1 ++ h2 ++ h3)\nextractExpr exports (DoBlockS e) = do\n  (e', helpers) <- extractFromTree exports e\n  return (DoBlockS e', helpers)\nextractExpr exports (EvalS e) = do\n  (e', helpers) <- extractFromTree exports e\n  return (EvalS e', helpers)\nextractExpr exports (CoerceS c e) = do\n  (e', helpers) <- extractFromTree exports e\n  return (CoerceS c e', helpers)\nextractExpr exports (IntrinsicS intr es) = do\n  results <- mapM (extractFromTree exports) es\n  let (es', helperLists) = unzip results\n  return (IntrinsicS intr es', concat helperLists)\nextractExpr _ e = return (e, [])\n\n-- | Check if an AnnoS tree contains a CallS node targeting the given name\ncontainsCallS :: EVar -> AnnoS g One c -> Bool\ncontainsCallS target (AnnoS _ _ e) = go e\n  where\n    go (CallS v) = v == target\n    go (AppS f xs) = containsCallS target f || any (containsCallS target) xs\n    go (LamS _ x) = containsCallS target x\n    go (LstS xs) = any (containsCallS target) xs\n    go (TupS xs) = any (containsCallS target) xs\n    go (NamS rs) = any (containsCallS target . snd) rs\n    go (VarS _ (One x)) = containsCallS target x\n    go (LetS _ e1 e2) = containsCallS target e1 || containsCallS target e2\n    go (LetBndS _) = False\n    go (IfS c t e') = containsCallS target c || containsCallS target t || containsCallS target e'\n    go (DoBlockS x) = containsCallS target x\n    go (EvalS x) = containsCallS target x\n    go (CoerceS _ x) = containsCallS target x\n    go (IntrinsicS _ xs) = any (containsCallS target) xs\n    go _ = False\n\n-- Check if this expression is a data structure that contains\n-- a function. If so, then the data structure is must be in the\n-- same language as the parent (since functions can't be serialized)\nisFunctionalData :: ExprS (Indexed Type) f a -> Bool\nisFunctionalData (LstS xs) = any isFunctionalDataAnnoS xs\nisFunctionalData (TupS xs) = any isFunctionalDataAnnoS xs\nisFunctionalData (NamS (map snd -> xs)) = any isFunctionalDataAnnoS xs\nisFunctionalData _ = False\n\nisFunctionalDataAnnoS :: AnnoS (Indexed Type) f a -> Bool\nisFunctionalDataAnnoS (AnnoS (Idx _ t) _ e) = handleType t || isFunctionalData e\n  where\n    handleType :: Type -> Bool\n    handleType (FunT _ _) = True\n    handleType _ = False\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Reduce.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Reduce\nDescription : Compile-time reduction of intrinsics\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nWalks the SerialManifold tree after serialization and replaces compile-time\nintrinsics (@version, @compiled, @lang, @schema, @typeof, @datafile) with\nstring literals. Runtime intrinsics (@save, @load, @hash) pass through\nunchanged to code generation.\n-}\nmodule Morloc.CodeGenerator.Reduce (reduce) where\n\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport Data.Time.Clock (getCurrentTime)\nimport Data.Time.Format (formatTime, defaultTimeLocale)\nimport Control.Monad.State (gets)\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.Version as V\n\nreduce :: SerialManifold -> MorlocMonad SerialManifold\nreduce sm = do\n  timestamp <- liftIO $ do\n    now <- getCurrentTime\n    return . T.pack $ formatTime defaultTimeLocale \"%Y-%m-%dT%H:%M:%SZ\" now\n  let ver = T.pack V.versionStr\n  reduceManifold ver timestamp sm\n\nreduceManifold :: Text -> Text -> SerialManifold -> MorlocMonad SerialManifold\nreduceManifold ver ts (SerialManifold m lang form hf se) =\n  SerialManifold m lang form hf <$> reduceSerialExpr ver ts lang se\n\nreduceSerialExpr :: Text -> Text -> Lang -> SerialExpr -> MorlocMonad SerialExpr\nreduceSerialExpr ver ts _ (ManS sm) = ManS <$> reduceManifold ver ts sm\nreduceSerialExpr ver ts lang (AppPoolS t pc args) =\n  AppPoolS t pc <$> mapM (reduceSerialArg ver ts lang) args\nreduceSerialExpr ver ts lang (AppRecS t i args) =\n  AppRecS t i <$> mapM (reduceSerialExpr ver ts lang) args\nreduceSerialExpr ver ts lang (AppForeignRecS t i sock args) =\n  AppForeignRecS t i sock <$> mapM (reduceSerialExpr ver ts lang) args\nreduceSerialExpr ver ts lang (ReturnS se) = ReturnS <$> reduceSerialExpr ver ts lang se\nreduceSerialExpr ver ts lang (SerialLetS i e1 e2) =\n  SerialLetS i <$> reduceSerialExpr ver ts lang e1 <*> reduceSerialExpr ver ts lang e2\nreduceSerialExpr ver ts lang (NativeLetS i ne se) =\n  NativeLetS i <$> reduceNativeExpr ver ts lang ne <*> reduceSerialExpr ver ts lang se\nreduceSerialExpr ver ts lang (SerializeS ast ne) =\n  SerializeS ast <$> reduceNativeExpr ver ts lang ne\nreduceSerialExpr _ _ _ e = return e\n\nreduceSerialArg :: Text -> Text -> Lang -> SerialArg -> MorlocMonad SerialArg\nreduceSerialArg ver ts _ (SerialArgManifold sm) = SerialArgManifold <$> reduceManifold ver ts sm\nreduceSerialArg ver ts lang (SerialArgExpr se) = SerialArgExpr <$> reduceSerialExpr ver ts lang se\n\nreduceNativeManifold :: Text -> Text -> NativeManifold -> MorlocMonad NativeManifold\nreduceNativeManifold ver ts (NativeManifold m lang form ne) =\n  NativeManifold m lang form <$> reduceNativeExpr ver ts lang ne\n\nreduceNativeExpr :: Text -> Text -> Lang -> NativeExpr -> MorlocMonad NativeExpr\n-- compile-time intrinsics: replace with string literals\nreduceNativeExpr ver _ _ (IntrinsicN t IntrVersion _ []) = return $ makeStr t ver\nreduceNativeExpr _ ts _ (IntrinsicN t IntrCompiled _ []) = return $ makeStr t ts\nreduceNativeExpr _ _ lang (IntrinsicN t IntrLang _ []) = return $ makeStr t (langName lang)\n-- @datafile: resolve relative path to installed data file location\nreduceNativeExpr ver ts lang (IntrinsicN t IntrDatafile _ [pathArg]) = do\n  pathArg' <- reduceNativeExpr ver ts lang pathArg\n  case extractStr pathArg' of\n    Just relPath -> do\n      mInstallDir <- gets stateInstallDir\n      let resolved = case mInstallDir of\n            Just dir -> T.pack (dir </> T.unpack relPath)\n            Nothing -> relPath\n      return $ makeStr t resolved\n    Nothing ->\n      return $ makeStr t \"<datafile: could not resolve path>\"\n-- runtime intrinsics: recurse into children but keep the intrinsic node\nreduceNativeExpr ver ts lang (IntrinsicN t intr msch es) =\n  IntrinsicN t intr msch <$> mapM (reduceNativeExpr ver ts lang) es\n-- recursive cases\nreduceNativeExpr ver ts _ (ManN nm) = ManN <$> reduceNativeManifold ver ts nm\nreduceNativeExpr ver ts lang (AppExeN t exe args) =\n  AppExeN t exe <$> mapM (reduceNativeArg ver ts lang) args\nreduceNativeExpr ver ts lang (ReturnN ne) = ReturnN <$> reduceNativeExpr ver ts lang ne\nreduceNativeExpr ver ts lang (SerialLetN i se ne) =\n  SerialLetN i <$> reduceSerialExpr ver ts lang se <*> reduceNativeExpr ver ts lang ne\nreduceNativeExpr ver ts lang (NativeLetN i ne1 ne2) =\n  NativeLetN i <$> reduceNativeExpr ver ts lang ne1 <*> reduceNativeExpr ver ts lang ne2\nreduceNativeExpr ver ts lang (DeserializeN t ast se) =\n  DeserializeN t ast <$> reduceSerialExpr ver ts lang se\nreduceNativeExpr ver ts lang (ListN fv t es) =\n  ListN fv t <$> mapM (reduceNativeExpr ver ts lang) es\nreduceNativeExpr ver ts lang (TupleN fv es) =\n  TupleN fv <$> mapM (reduceNativeExpr ver ts lang) es\nreduceNativeExpr ver ts lang (RecordN o fv tps rs) =\n  RecordN o fv tps <$> mapM (\\(k, ne) -> (,) k <$> reduceNativeExpr ver ts lang ne) rs\nreduceNativeExpr ver ts lang (DoBlockN t ne) = DoBlockN t <$> reduceNativeExpr ver ts lang ne\nreduceNativeExpr ver ts lang (EvalN t ne) = EvalN t <$> reduceNativeExpr ver ts lang ne\nreduceNativeExpr ver ts lang (CoerceN c t ne) = CoerceN c t <$> reduceNativeExpr ver ts lang ne\nreduceNativeExpr ver ts lang (IfN t c th el) =\n  IfN t <$> reduceNativeExpr ver ts lang c <*> reduceNativeExpr ver ts lang th <*> reduceNativeExpr ver ts lang el\n-- leaf nodes\nreduceNativeExpr _ _ _ e = return e\n\nreduceNativeArg :: Text -> Text -> Lang -> NativeArg -> MorlocMonad NativeArg\nreduceNativeArg ver ts _ (NativeArgManifold nm) = NativeArgManifold <$> reduceNativeManifold ver ts nm\nreduceNativeArg ver ts lang (NativeArgExpr ne) = NativeArgExpr <$> reduceNativeExpr ver ts lang ne\n\nmakeStr :: TypeF -> Text -> NativeExpr\nmakeStr (VarF fv) x = StrN fv x\nmakeStr _ x = StrN (FV (TV \"Str\") (CV \"str\")) x\n\n-- | Extract the string value from a StrN node\nextractStr :: NativeExpr -> Maybe Text\nextractStr (StrN _ x) = Just x\nextractStr _ = Nothing\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Segment.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Segment\nDescription : Break polymorphic manifold trees at language boundaries\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.CodeGenerator.Segment\n  ( segment\n  ) where\n\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.Config as MC\nimport Morloc.Data.Doc\nimport qualified Morloc.Monad as MM\n\nsegment :: PolyHead -> MorlocMonad [MonoHead]\nsegment (PolyHead lang m0 args0 e0) = do\n  (heads, (_, topExpr)) <- segmentExpr m0 (map ann args0) e0\n\n  MM.sayVVV $\n    \"segmentation complete\"\n      <> \"\\n  topExpr language:\" <+> pretty lang\n      <> \"\\n  topExpr: \" <+> pretty topExpr\n      <> \"\\n  heads:\" <+> list (map pretty heads)\n\n  return (MonoHead lang m0 args0 HeadManifoldFormLocalRoot topExpr : heads)\n\nsegmentExpr ::\n  Int -> -- manifold index\n  [Int] -> -- argument indices\n  PolyExpr ->\n  MorlocMonad ([MonoHead], (Maybe Lang, MonoExpr))\nsegmentExpr\n  _\n  args\n  ( PolyRemoteInterface\n      lang\n      callingType\n      cargs\n      remoteCall\n      e@(PolyManifold _ m (ManifoldFull foreignArgs) _)\n    ) = do\n    MM.sayVVV $\n      \"segmentExpr PolyRemoteInterface PolyManifold m\"\n        <> pretty m\n        <> \"\\n  forced ManifoldFull\" <+> pretty foreignArgs\n        <> \"\\n  lang\" <+> pretty lang\n        <> \"\\n  args\" <+> pretty args\n        <> \"\\n  cargs\" <+> pretty cargs\n        <> \"\\n  foreignArgs\" <+> pretty (map ann foreignArgs)\n    (ms, (_, e')) <- segmentExpr m (map ann foreignArgs) e\n    headForm <- case remoteCall of\n      ForeignCall -> return HeadManifoldFormLocalForeign\n      (RemoteCall _) -> return HeadManifoldFormRemoteWorker\n    let foreignHead = MonoHead lang m foreignArgs headForm e'\n    config <- MM.ask\n    reg <- MM.gets stateLangRegistry\n    let socket = MC.setupServerAndSocket config reg lang\n    return (foreignHead : ms, (Nothing, MonoPoolCall callingType m socket remoteCall foreignArgs))\nsegmentExpr m _ (PolyRemoteInterface lang callingType args remoteCall e) = do\n  MM.sayVVV $\n    \"segmentExpr PolyRemoteInterface m\"\n      <> pretty m\n      <> \"\\n  args\" <+> pretty args\n      <> \"\\n  lang\" <+> pretty lang\n  (ms, (_, e')) <- segmentExpr m args e\n  headForm <- case remoteCall of\n    ForeignCall -> return HeadManifoldFormLocalForeign\n    (RemoteCall _) -> return HeadManifoldFormRemoteWorker\n  let foreignHead = MonoHead lang m [Arg i None | i <- args] headForm (MonoReturn e')\n      es' = map (MonoBndVar (A None)) args\n\n  config <- MM.ask\n  reg <- MM.gets stateLangRegistry\n  let socket = MC.setupServerAndSocket config reg lang\n      localFun = MonoApp (MonoPoolCall callingType m socket remoteCall [Arg i None | i <- args]) es'\n\n  return (foreignHead : ms, (Nothing, localFun))\nsegmentExpr _ _ (PolyManifold lang m form e) = do\n  (ms, (_, e')) <- segmentExpr m (abilist const const form) e\n  return (ms, (Just lang, MonoManifold m form e'))\nsegmentExpr m args (PolyApp e es) = do\n  (ms, (lang, e')) <- segmentExpr m args e\n  (mss, es') <- mapM (segmentExpr m args) es |>> unzip\n  return (ms ++ concat mss, (lang, MonoApp e' (map snd es')))\nsegmentExpr m args (PolyLet i e1 e2) = do\n  MM.sayVVV \"segmentExpr PolyLet\"\n  (ms1, (_, e1')) <- segmentExpr m args e1\n  (ms2, (lang2, e2')) <- segmentExpr m args e2\n  return (ms1 ++ ms2, (lang2, MonoLet i e1' e2'))\nsegmentExpr m args (PolyList v t es) = do\n  (mss, es') <- mapM (segmentExpr m args) es |>> unzip\n  return (concat mss, (Nothing, MonoList v t (map snd es')))\nsegmentExpr m args (PolyTuple v es) = do\n  (mss, es') <- mapM (segmentExpr m args . snd) es |>> unzip\n  return (concat mss, (Nothing, MonoTuple v (zip (map fst es) (map snd es'))))\nsegmentExpr m args (PolyRecord o v ps entries) = do\n  let entryTypes = map (fst . snd) entries\n  (mss, es') <- mapM (segmentExpr m args . snd . snd) entries |>> unzip\n  let keys = map fst entries\n  return (concat mss, (Nothing, MonoRecord o v ps (zip keys (zip entryTypes (map snd es')))))\nsegmentExpr m args (PolyReturn e) = do\n  (ms, (lang, e')) <- segmentExpr m args e\n  return (ms, (lang, MonoReturn e'))\nsegmentExpr _ _ (PolyLetVar t x) = return ([], (Nothing, MonoLetVar t x))\nsegmentExpr _ _ (PolyBndVar (A lang) i) = return ([], (Just lang, MonoBndVar (A None) i))\nsegmentExpr _ _ (PolyBndVar (B t) i) = return ([], (Nothing, MonoBndVar (B t) i))\nsegmentExpr _ _ (PolyBndVar (C t) i) = return ([], (Nothing, MonoBndVar (C t) i))\nsegmentExpr _ _ (PolyExe t exe) = return ([], (Nothing, MonoExe t exe))\nsegmentExpr _ _ (PolyLog v x) = return ([], (Nothing, MonoLog v x))\nsegmentExpr _ _ (PolyReal v x) = return ([], (Nothing, MonoReal v x))\nsegmentExpr _ _ (PolyInt v x) = return ([], (Nothing, MonoInt v x))\nsegmentExpr _ _ (PolyStr v x) = return ([], (Nothing, MonoStr v x))\nsegmentExpr _ _ (PolyNull v) = return ([], (Nothing, MonoNull v))\nsegmentExpr m args (PolyDoBlock t e) = do\n  (ms, (_, e')) <- segmentExpr m args e\n  return (ms, (Nothing, MonoDoBlock t e'))\nsegmentExpr m args (PolyIf cond thenE elseE) = do\n  (ms1, (_, cond')) <- segmentExpr m args cond\n  (ms2, (_, thenE')) <- segmentExpr m args thenE\n  (ms3, (_, elseE')) <- segmentExpr m args elseE\n  return (ms1 ++ ms2 ++ ms3, (Nothing, MonoIf cond' thenE' elseE'))\nsegmentExpr m args (PolyEval t e) = do\n  (ms, (_, e')) <- segmentExpr m args e\n  return (ms, (Nothing, MonoEval t e'))\nsegmentExpr m args (PolyCoerce c t e) = do\n  (ms, (_, e')) <- segmentExpr m args e\n  return (ms, (Nothing, MonoCoerce c t e'))\nsegmentExpr m args (PolyIntrinsic t intr es) = do\n  results <- mapM (segmentExpr m args) es\n  let (mss, pairs) = unzip results\n  return (concat mss, (Nothing, MonoIntrinsic t intr (map snd pairs)))\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Serial.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Serial\nDescription : Build serialization ASTs that describe how to pack\\/unpack types\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nConstructs 'SerialAST' trees that describe the serialization and\ndeserialization plan for each type. Handles format selection (JSON\nvs MessagePack), packer resolution via typeclass instances, and\nserializability checking. Distinct from 'Serialize' which inserts\npack\\/unpack calls into the manifold tree.\n-}\nmodule Morloc.CodeGenerator.Serial\n  ( makeSerialAST\n  , chooseSerializationCycle\n  , isSerializable\n  , hasArrowHint\n  , prettySerialOne\n  , serialAstToType\n  , shallowType\n  , serialAstToMsgpackSchema\n  , encode64\n  , decode64\n  ) where\n\nimport qualified Data.Char as C\nimport qualified Data.IntMap.Strict as IntMap\nimport qualified Data.Set as Set\nimport qualified Data.Text as DT\nimport qualified Morloc.BaseTypes as BT\nimport Morloc.CodeGenerator.Infer\nimport Morloc.CodeGenerator.Namespace\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.TypeEval as TE\nimport Morloc.Typecheck.Internal (apply, qualify, substitute, subtype, unqualify)\n\n-- | recurse all the way to a serializable type\nserialAstToType :: SerialAST -> TypeF\nserialAstToType (SerialPack _ (_, s)) = serialAstToType s\nserialAstToType (SerialList v s) = AppF (VarF v) [serialAstToType s]\nserialAstToType (SerialTensor v _ s) = AppF (VarF v) [serialAstToType s]\nserialAstToType (SerialTuple v ss) = AppF (VarF v) (map serialAstToType ss)\nserialAstToType (SerialObject o n ps rs) =\n  let ts = map (serialAstToType . snd) rs\n   in NamF o n ps (zip (map fst rs) ts)\nserialAstToType (SerialReal x) = VarF x\nserialAstToType (SerialFloat32 x) = VarF x\nserialAstToType (SerialFloat64 x) = VarF x\nserialAstToType (SerialInt x) = VarF x\nserialAstToType (SerialInt8 x) = VarF x\nserialAstToType (SerialInt16 x) = VarF x\nserialAstToType (SerialInt32 x) = VarF x\nserialAstToType (SerialInt64 x) = VarF x\nserialAstToType (SerialUInt x) = VarF x\nserialAstToType (SerialUInt8 x) = VarF x\nserialAstToType (SerialUInt16 x) = VarF x\nserialAstToType (SerialUInt32 x) = VarF x\nserialAstToType (SerialUInt64 x) = VarF x\nserialAstToType (SerialBool x) = VarF x\nserialAstToType (SerialString x) = VarF x\nserialAstToType (SerialNull x) = VarF x\nserialAstToType (SerialOptional _ s) = OptionalF (serialAstToType s)\n-- passthrough type, it cannot be deserialized or serialized, only passed in from a different language\nserialAstToType (SerialUnknown v) = UnkF v\n\nencode64 :: Int -> String\nencode64 i\n  | i < 0 = error \"Negative size - not in this universe my dear\"\n  | i < 10 = [C.chr (C.ord '0' + i)] -- 0-9\n  | i < 36 = [C.chr (C.ord 'a' + i - 10)]\n  | i < 62 = [C.chr (C.ord 'A' + i - 36)]\n  | i == 62 = \"+\"\n  | i == 63 = \"/\"\n  | otherwise = \"=\" <> (encode64 (mod i 64)) <> (encode64 (div i 64))\n\ndecode64 :: String -> Int\ndecode64 (x : xs)\n  | x >= '0' && x <= '9' = C.ord x - C.ord '0'\n  | x >= 'a' && x <= 'z' = C.ord x - C.ord 'a' + C.ord '0'\n  | x >= 'A' && x <= 'Z' = C.ord x - C.ord 'A' + C.ord 'a' + C.ord '0'\n  | x == '+' = 62\n  | x == '/' = 63\n  | x == '=' = decode64 [head xs] + 64 * decode64 (tail xs)\n  | otherwise = error \"illegal character\"\ndecode64 [] = 0\n\nencode64D :: Int -> MDoc\nencode64D i = pretty (encode64 i)\n\nserialAstToMsgpackSchema :: SerialAST -> MDoc\nserialAstToMsgpackSchema (SerialPack v (_, s)) = addHint v <> serialAstToMsgpackSchema s\nserialAstToMsgpackSchema (SerialList v s) = addHint v <> \"a\" <> serialAstToMsgpackSchema s\nserialAstToMsgpackSchema (SerialTensor v ndim s) = addHint v <> \"T\" <> encode64D ndim <> serialAstToMsgpackSchema s\nserialAstToMsgpackSchema (SerialTuple v ss) = addHint v <> \"t\" <> encode64D (length ss) <> foldl (<>) \"\" (map serialAstToMsgpackSchema ss)\nserialAstToMsgpackSchema (SerialObject _ v _ rs) = addHint v <> \"m\" <> encode64D (length rs) <> foldl (<>) \"\" (map keypair rs)\n  where\n    keypair :: (Key, SerialAST) -> MDoc\n    keypair (k, s) = (encode64D . DT.length . unKey $ k) <> pretty (unKey k) <> serialAstToMsgpackSchema s\nserialAstToMsgpackSchema (SerialReal v) = addHint v <> \"f8\" -- 64 bit float\nserialAstToMsgpackSchema (SerialFloat32 v) = addHint v <> \"f4\"\nserialAstToMsgpackSchema (SerialFloat64 v) = addHint v <> \"f8\"\nserialAstToMsgpackSchema (SerialInt v) = addHint v <> \"i4\" -- 32 bit integer, will need to extend this soon\nserialAstToMsgpackSchema (SerialInt8 v) = addHint v <> \"i1\"\nserialAstToMsgpackSchema (SerialInt16 v) = addHint v <> \"i2\"\nserialAstToMsgpackSchema (SerialInt32 v) = addHint v <> \"i4\"\nserialAstToMsgpackSchema (SerialInt64 v) = addHint v <> \"i8\"\nserialAstToMsgpackSchema (SerialUInt v) = addHint v <> \"u4\"\nserialAstToMsgpackSchema (SerialUInt8 v) = addHint v <> \"u1\"\nserialAstToMsgpackSchema (SerialUInt16 v) = addHint v <> \"u2\"\nserialAstToMsgpackSchema (SerialUInt32 v) = addHint v <> \"u4\"\nserialAstToMsgpackSchema (SerialUInt64 v) = addHint v <> \"u8\"\nserialAstToMsgpackSchema (SerialBool v) = addHint v <> \"b\"\nserialAstToMsgpackSchema (SerialString v) = addHint v <> \"s\"\nserialAstToMsgpackSchema (SerialNull v) = addHint v <> \"z\"\nserialAstToMsgpackSchema (SerialOptional v s) = addHint v <> \"?\" <> serialAstToMsgpackSchema s\nserialAstToMsgpackSchema (SerialUnknown v) = addHint v <> \"*\"\n\naddHint :: FVar -> MDoc\naddHint (FV _ (CV \"\")) = \"\" -- no hint if no concrete type is defined\n-- this is helpful in the nexus\naddHint (FV _ (CV v)) = \"<\" <> pretty v <> \">\"\n\n-- | get only the toplevel type\nshallowType :: SerialAST -> TypeF\nshallowType (SerialPack _ (p, _)) = typePackerPacked p\nshallowType (SerialList v s) = AppF (VarF v) [shallowType s]\nshallowType (SerialTuple v ss) = AppF (VarF v) $ map shallowType ss\nshallowType (SerialObject o n ps rs) =\n  let ts = map (shallowType . snd) rs\n   in NamF o n ps (zip (map fst rs) ts)\nshallowType (SerialReal x) = VarF x\nshallowType (SerialFloat32 x) = VarF x\nshallowType (SerialFloat64 x) = VarF x\nshallowType (SerialInt x) = VarF x\nshallowType (SerialInt8 x) = VarF x\nshallowType (SerialInt16 x) = VarF x\nshallowType (SerialInt32 x) = VarF x\nshallowType (SerialInt64 x) = VarF x\nshallowType (SerialUInt x) = VarF x\nshallowType (SerialUInt8 x) = VarF x\nshallowType (SerialUInt16 x) = VarF x\nshallowType (SerialUInt32 x) = VarF x\nshallowType (SerialUInt64 x) = VarF x\nshallowType (SerialBool x) = VarF x\nshallowType (SerialString x) = VarF x\nshallowType (SerialNull x) = VarF x\nshallowType (SerialOptional _ s) = OptionalF (shallowType s)\nshallowType (SerialTensor v _ s) = AppF (VarF v) [shallowType s]\nshallowType (SerialUnknown v) = UnkF v\n\nfindPackers ::\n  Lang ->\n  MorlocMonad\n    ( [(([TVar], TypeU), Source)]\n    , [(([TVar], TypeU), Source)]\n    )\nfindPackers lang = do\n  sigmap <- MM.gets stateTypeclasses\n\n  MM.sayVVV $\n    \"findPackers\"\n      <> \"\\n  sigmap:\" <+> viaShow sigmap\n\n  packers <- case Map.lookup (EV \"pack\") sigmap of\n    (Just (Instance _ _ _ ts)) -> return $ concatMap f ts\n    Nothing -> return []\n\n  unpackers <- case Map.lookup (EV \"unpack\") sigmap of\n    (Just (Instance _ _ _ ts)) -> return $ concatMap f ts\n    Nothing -> return []\n\n  return (packers, unpackers)\n  where\n    f :: TermTypes -> [(([TVar], TypeU), Source)]\n    f (TermTypes (Just et) (map (val . snd) -> srcs) _) =\n      let (vs, t) = unqualify $ etype et\n       in [((vs, t), src) | src <- srcs, srcLang src == lang]\n    f (TermTypes Nothing _ _) = []\n\n-- Takes a map of packers with concrete type names as keys. A single concrete\n-- type name may map to many single types. For example, the python type \"dict\"\n-- might represent a Map with homogenous keys and values or many things that\n-- might be objects in other languages. Similarly, the python \"tuple\" type maps\n-- to tuples of all sizes -- each of which is a different type in both the\n-- morloc general type system and many other languages. So the map contains a\n-- list of possible packers. Matching the concrete type name to the right packer\n-- will be done through subtyping.\nmakeSerialAST :: Int -> Lang -> TypeF -> MorlocMonad SerialAST\nmakeSerialAST m lang t0 = do\n  -- ([(([TVar], TypeU), Source)], ...)\n  (packs, unpacks) <- findPackers lang\n\n  MM.sayVVV $ \"packs:\" <+> viaShow packs\n  MM.sayVVV $ \"unpacks:\" <+> viaShow unpacks\n\n  (_, gscope) <- getScope m lang\n\n  -- Map TVar ((TypeU, Source), (TypeU, Source))\n  let typepackers =\n        Map.fromListWith\n          (<>)\n          [ (extractKey b1, [(length vs1, qualify vs1 a1, qualify vs1 b1, src1, src2)])\n          | ((vs1, FunU [a1] b1), src1) <- packs\n          , ((vs2, FunU [a2] _), src2) <- unpacks\n          , extractKey b1 == extractKey a2\n          , length vs1 == length vs2\n          ]\n\n  makeSerialAST' gscope typepackers t0\n  where\n    makeSerialAST' ::\n      Scope ->\n      Map.Map TVar [(Int, TypeU, TypeU, Source, Source)] ->\n      TypeF ->\n      MorlocMonad SerialAST\n    -- If the type is unknown in this language, then it must be a passthrough\n    -- type. So it will only be represented in the serialization form. As a\n    -- string, for now.\n    makeSerialAST' _ _ (UnkF (FV gv _)) = do\n      registry <- MM.gets stateLangRegistry |>> lrEntries\n      serialType <- case Map.lookup (langName lang) registry of\n        Nothing -> MM.throwSourcedError m \"Unsupported language\"\n        (Just langRegistry) -> return $ CV (lreSerialType langRegistry)\n      return $ SerialUnknown (FV gv serialType)\n    makeSerialAST' gscope typepackers ft@(VarF v@(FV gv cv))\n      | finalType == BT.unitU = return $ SerialNull v\n      | finalType == BT.boolU = return $ SerialBool v\n      | finalType == BT.strU = return $ SerialString v\n      | finalType == BT.realU = return $ SerialReal v\n      | finalType == BT.f32U = return $ SerialFloat32 v\n      | finalType == BT.f64U = return $ SerialFloat64 v\n      | finalType == BT.intU = return $ SerialInt v\n      | finalType == BT.i8U = return $ SerialInt8 v\n      | finalType == BT.i16U = return $ SerialInt16 v\n      | finalType == BT.i32U = return $ SerialInt32 v\n      | finalType == BT.i64U = return $ SerialInt64 v\n      | finalType == BT.uintU = return $ SerialUInt v\n      | finalType == BT.u8U = return $ SerialUInt8 v\n      | finalType == BT.u16U = return $ SerialUInt16 v\n      | finalType == BT.u32U = return $ SerialUInt32 v\n      | finalType == BT.u64U = return $ SerialUInt64 v\n      | otherwise = case Map.lookup gv typepackers of\n          (Just ps) -> do\n            packers <- mapM makeTypePacker ps\n            unpacked <- mapM (makeSerialAST' gscope typepackers . typePackerUnpacked) packers\n            selection <- selectPacker (zip packers unpacked)\n            return $ SerialPack v selection\n          Nothing ->\n            MM.throwSourcedError m $\n              \"Cannot find constructor in VarF\" <+> dquotes (pretty v) <+> \" finalType=\" <> pretty finalType\n      where\n        -- Evaluate type aliases step-by-step, stopping at known serialization\n        -- base types. This prevents aliases like Int64 = Int from collapsing\n        -- to Int, which would lose width information for serialization.\n        finalType =\n          let t = fst $ unweaveTypeF ft\n           in resolveToSerialBaseType gscope t\n\n        resolveToSerialBaseType scope t\n          | Set.member t serialBaseTypes = t\n          | otherwise = case TE.reduceType scope t of\n              Just t' -> resolveToSerialBaseType scope t'\n              Nothing -> t\n\n        serialBaseTypes = Set.fromList\n          [ BT.unitU, BT.boolU, BT.strU, BT.realU\n          , BT.f32U, BT.f64U\n          , BT.intU, BT.i8U, BT.i16U, BT.i32U, BT.i64U\n          , BT.uintU, BT.u8U, BT.u16U, BT.u32U, BT.u64U\n          ]\n\n        makeTypePacker :: (Int, TypeU, TypeU, Source, Source) -> MorlocMonad TypePacker\n        makeTypePacker (0, generalUnpackedType, generalPackedType, forwardSource, reverseSource) = do\n          packedType <- inferConcreteType lang (Idx m (typeOf generalPackedType))\n          unpackedType <- inferConcreteType lang (Idx m (typeOf generalUnpackedType))\n          return $\n            TypePacker\n              { typePackerPacked = packedType\n              , typePackerUnpacked = unpackedType\n              , typePackerForward = forwardSource\n              , typePackerReverse = reverseSource\n              }\n        makeTypePacker (nparam, _, _, _, _) =\n          MM.throwSourcedError m $ \"Unexpected parameters for atomic variable:\" <+> pretty nparam\n\n        -- Select the first packer we happen across. This is a very key step and\n        -- eventually this function should be replaced with one more carefully\n        -- considered. But for now, I don't have any great criterion for\n        -- choosing.\n        selectPacker :: [(TypePacker, SerialAST)] -> MorlocMonad (TypePacker, SerialAST)\n        selectPacker [] = MM.throwSourcedError m $ \"Cannot find constructor for\" <+> pretty cv <+> \"in selectPacker\"\n        selectPacker [x] = return x\n        selectPacker _ = MM.throwSourcedError m \"Two you say, oh, get out of here\"\n    makeSerialAST' _ _ t@(FunF _ _) =\n      MM.throwSourcedError m $ \"Cannot serialize functions at\" <+> pretty m <> \":\" <+> pretty t\n    makeSerialAST' gscope typepackers ft@(AppF (VarF fv@(FV generalTypeName _)) ts0)\n      | null runtimeTs = MM.throwSourcedError m $ \"No runtime type args for\" <+> pretty ft\n      -- When alias expansion changed the root type, re-infer the concrete\n      -- type for the expanded general form and recurse.\n      | Just fv' <- finalVar, fv' /= generalTypeName, Just expanded <- evaluatedType = do\n          expandedTf <- inferConcreteType lang (Idx m (typeOf expanded))\n          makeSerialAST' gscope typepackers expandedTf\n      | finalVar == Just BT.list = SerialList fv <$> makeSerialAST' gscope typepackers (head runtimeTs)\n      | finalVar == Just (BT.tuple (length runtimeTs)) =\n          SerialTuple fv <$> mapM (makeSerialAST' gscope typepackers) runtimeTs\n      | Just ndim <- tensorNDim finalVar =\n          SerialTensor fv ndim <$> makeSerialAST' gscope typepackers (last runtimeTs)\n      | otherwise = case Map.lookup generalTypeName typepackers of\n          (Just ps) -> do\n            packers <- catMaybes <$> mapM (resolvePacker lang m ft) ps\n            unpacked <- mapM (makeSerialAST' gscope typepackers . typePackerUnpacked) packers\n            selection <- selectPacker (zip packers unpacked)\n            return $ SerialPack fv selection\n          Nothing ->\n            MM.throwSourcedError m $\n              \"Cannot find\" <+> pretty generalTypeName <+> \"from\" <+> dquotes (pretty fv)\n                <> \"\\n  ft:\" <+> pretty ft\n                <> \"\\n  finalVar:\" <+> pretty finalVar\n                <> \"\\n  gscope:\" <+> viaShow gscope\n                <> \"\\n  general t:\" <+> (viaShow . fst $ unweaveTypeF ft)\n                <> \"\\n  concrete t:\" <+> (viaShow . snd $ unweaveTypeF ft)\n                <> \"\\n  typepackers:\" <+> viaShow typepackers\n      where\n        -- Filter out Nat-kinded type params (phantom, not serialized)\n        isNatTypeF :: TypeF -> Bool\n        isNatTypeF (NatLitF _) = True\n        isNatTypeF _ = False\n\n        runtimeTs = filter (not . isNatTypeF) ts0\n\n        basevar :: TypeU -> Maybe TVar\n        basevar (VarU v) = Just v\n        basevar (NatVarU _) = Nothing\n        basevar (ExistU _ _ _) = Nothing\n        basevar (ForallU _ _) = Nothing\n        basevar (FunU _ _) = Nothing\n        basevar (AppU t _) = basevar t\n        basevar (NamU _ v _ _) = Just v\n        basevar (EffectU _ _) = Nothing\n        basevar (OptionalU _) = Nothing\n        basevar (NatLitU _) = Nothing\n        basevar (NatAddU _ _) = Nothing\n        basevar (NatMulU _ _) = Nothing\n        basevar (NatSubU _ _) = Nothing\n        basevar (NatDivU _ _) = Nothing\n        basevar (LabeledU _ t) = basevar t\n\n        generalType = fst $ unweaveTypeF ft\n\n        evaluatedType =\n          case TE.evaluateType gscope generalType of\n            Right et | et /= generalType -> Just et\n            _ -> Nothing\n\n        finalVar = basevar $ maybe generalType id evaluatedType\n\n        tensorNDim :: Maybe TVar -> Maybe Int\n        tensorNDim (Just v) = lookup v [(BT.tensor k, k) | k <- [1..5]]\n        tensorNDim Nothing = Nothing\n\n        selectPacker :: [(TypePacker, SerialAST)] -> MorlocMonad (TypePacker, SerialAST)\n        selectPacker [] =\n          MM.throwSourcedError m $\n            \"Cannot find constructor in selectPacker for\" <+> pretty ft\n              <> \"\\n  ft:\" <+> pretty ft\n              <> \"\\n  generalTypeName (key):\" <+> pretty generalTypeName\n              <> \"\\n  typepackers:\" <+> viaShow typepackers\n              <> \"\\n  Map.lookup generalTypeName typepackers:\" <+> viaShow (Map.lookup generalTypeName typepackers)\n        selectPacker (x : _) = return x\n    makeSerialAST' gscope typepackers (NamF o n ps rs) = do\n      ts <- mapM (makeSerialAST' gscope typepackers . snd) rs\n      let entries = zip (map fst rs) ts\n      return $ SerialObject o n ps entries\n    makeSerialAST' gscope typepackers (EffectF _ t) = makeSerialAST' gscope typepackers t\n    makeSerialAST' gscope typepackers (OptionalF t) = do\n      inner <- makeSerialAST' gscope typepackers t\n      let v = case t of\n                VarF fv -> fv\n                AppF (VarF fv) _ -> fv\n                NamF _ fv _ _ -> fv\n                _ -> FV (TV \"Optional\") (CV \"optional\")\n      return $ SerialOptional v inner\n    makeSerialAST' _ _ t = MM.throwSourcedError m $ \"makeSerialAST' error on type:\" <+> pretty t\n\nresolvePacker ::\n  Lang ->\n  Int ->\n  TypeF ->\n  (Int, TypeU, TypeU, Source, Source) ->\n  MorlocMonad (Maybe TypePacker)\nresolvePacker lang m0 resolvedType@(AppF _ _) (_, unpackedGeneralType, packedGeneralType, srcPacked, srcUnpacked) = do\n  packedConcreteType <- inferConcreteTypeU lang (Idx m0 packedGeneralType)\n  unpackedConcreteType <- inferConcreteTypeU lang (Idx m0 unpackedGeneralType)\n  maybeUnpackedType <-\n    resolveP\n      resolvedType\n      packedConcreteType\n      unpackedConcreteType\n      (packedGeneralType, unpackedGeneralType)\n\n  case maybeUnpackedType of\n    (Just unpackedType) ->\n      return . Just $\n        TypePacker\n          { typePackerPacked = resolvedType\n          , typePackerUnpacked = unpackedType\n          , typePackerForward = srcPacked\n          , typePackerReverse = srcUnpacked\n          }\n    Nothing -> return Nothing\n  where\n    -- Both sides of the packer function are guaranteed to have the same\n    -- generic values, this is guaranteed by the implementation of\n    -- Restructure.hs. So it is sufficient to resolve the generics in the packed\n    -- type and map them to the unpacked type.\n    --\n    -- Example:\n    --\n    --  resolveP (\"dict\" \"str\" \"int\") (\"dict\" a b) (\"list\" (\"list\" a b) --> (\"list\" (\"list\" \"str\" \"int\"))\n    --                    x_r             x_u                y_u                       y_r\n    --\n    -- x_u is the unresolved packed type that is extracted before typechecking\n    -- x_r is equal to x_u after type inference\n    --\n    -- () |- x_u <: x_y -| g\n    -- y_r = apply g y_u\n    --\n    -- y_u is the unresolved unpacked type that is extracted with x_u\n    --\n    -- y_u and y_r are both processed by Restructure.hs and are both guaranteed\n    -- to share the same set of generics. We can find the identity of these\n    -- generics by subtyping x_u against x_y. The produced context contains\n    -- the types for each generic variable. The context can be applied to\n    -- y_u to get the final desired y_r.\n    resolveP ::\n      TypeF -> -- resolved packed type (e.g., \"dict\" \"str\" \"int\")\n      TypeU -> -- unresolved packed type (e.g., \"dict\" a b)\n      TypeU -> -- unresolved unpacked type (e.g., \"list\" (\"list\" a b))\n      (TypeU, TypeU) -> -- The general unresolved packed and unpacked types\n      MorlocMonad (Maybe TypeF) -- the resolved unpacked types\n    resolveP a b c generalTypes = do\n      let (ga, ca) = unweaveTypeF a\n      unpackedConcreteType <- case subtype Map.empty b ca (Gamma 0 0 IntMap.empty Map.empty Map.empty [] Map.empty Map.empty) of\n        (Left typeErr) ->\n          MM.throwSourcedError m0 $\n            \"There was an error raised in subtyping while resolving serialization\"\n              <> \"\\nThe packer involved maps the type:\"\n              <> \"\\n  \"\n              <> (pretty . fst) generalTypes\n              <> \"\\n\\nTo the serialized form:\"\n              <> \"\\n  \"\n              <> (pretty . snd) generalTypes\n              <> \"\\n\\nHere the unresolved concrete packed type:\"\n              <> \"\\n  b:\" <+> pretty b\n              <> \"\\n\\nShould be the subtype of the resolved packed type:\"\n              <> \"\\n  a:\" <+> pretty a\n              <> \"\\n\\nThe generic terms in b should be resolved through subtyping and used to resolve the unpacked type:\"\n              <> \"\\n  c:\" <+> pretty c\n              <> \"\\n\\nHowever, the b <: a step failed:\\n\"\n              <> typeErr\n              <> \"\\n\\nThe packer function may not be generic enough to pack the type you specify, if this is the case, you may need to simplify the datatype\"\n        (Right g) -> do\n          return (apply g (existential c))\n\n      maybeUnpackedGeneralType <- case generalTypes of\n        (u, gc) -> do\n          -- where u  is the unresolved general packed type that was stored in Desugar.hs\n          --       gc is the unresolved general unpacked type\n          case subtype Map.empty u ga (Gamma 0 0 IntMap.empty Map.empty Map.empty [] Map.empty Map.empty) of\n            (Left _) -> return Nothing\n            (Right g) -> do\n              return . Just $ apply g (existential gc)\n\n      return $ case maybeUnpackedGeneralType of\n        (Just resolvedUnpackedGeneralType) -> Just $ weaveTypeF resolvedUnpackedGeneralType unpackedConcreteType\n        Nothing -> Nothing\n\n    -- Replaces each generic term with an existential term of the same name\n    existential :: TypeU -> TypeU\n    existential (ForallU v t0) = substitute v (existential t0)\n    existential t0 = t0\nresolvePacker _ m0 _ _ = MM.throwSourcedError m0 $ \"No packer found for this type\"\n\ncv2tv :: CVar -> TVar\ncv2tv (CV x) = TV x\n\ntv2cv :: TVar -> CVar\ntv2cv (TV x) = CV x\n\nunweaveTypeF :: TypeF -> (TypeU, TypeU)\nunweaveTypeF (UnkF (FV gv cv)) = (VarU gv, VarU (cv2tv cv))\nunweaveTypeF (VarF (FV gv cv)) = (VarU gv, VarU (cv2tv cv))\nunweaveTypeF (FunF ts t) =\n  let (gt, ct) = unweaveTypeF t\n      (gts, cts) = unzip $ map unweaveTypeF ts\n   in (FunU gts gt, FunU cts ct)\nunweaveTypeF (AppF t ts) =\n  let (gt, ct) = unweaveTypeF t\n      (gts, cts) = unzip $ map unweaveTypeF ts\n   in (AppU gt gts, AppU ct cts)\nunweaveTypeF (NamF n (FV gv cv) ps rs) =\n  let (psg, psc) = unzip $ map unweaveTypeF ps\n      keys = map fst rs\n      (vsg, vsc) = unzip $ map (unweaveTypeF . snd) rs\n   in (NamU n gv psg (zip keys vsg), NamU n (cv2tv cv) psc (zip keys vsc))\nunweaveTypeF (EffectF effs t) =\n  let (gt, ct) = unweaveTypeF t\n   in (EffectU (EffectSet effs) gt, EffectU (EffectSet effs) ct)\nunweaveTypeF (OptionalF t) =\n  let (gt, ct) = unweaveTypeF t\n   in (OptionalU gt, OptionalU ct)\n\n-- Nat types have no concrete/general distinction; duplicate as-is\nunweaveTypeF (NatLitF n) = (NatLitU n, NatLitU n)\n\nweaveTypeF :: TypeU -> TypeU -> TypeF\nweaveTypeF (VarU gv) (VarU cv) = VarF (FV gv (tv2cv cv))\nweaveTypeF (FunU tsg tg) (FunU tsc tc) = FunF (zipWith weaveTypeF tsg tsc) (weaveTypeF tg tc)\nweaveTypeF (AppU tg tsg) (AppU tc tsc) = AppF (weaveTypeF tg tc) (zipWith weaveTypeF tsg tsc)\nweaveTypeF (NamU n gv psg rsg) (NamU _ cv psc rsc) =\n  NamF\n    n\n    (FV gv (tv2cv cv))\n    (zipWith weaveTypeF psg psc)\n    ( zip\n        (map fst rsg)\n        (zipWith weaveTypeF (map snd rsg) (map snd rsc))\n    )\nweaveTypeF (EffectU effs gt) (EffectU _ ct) = EffectF (resolveEffectSet effs) (weaveTypeF gt ct)\nweaveTypeF (OptionalU gt) (OptionalU ct) = OptionalF (weaveTypeF gt ct)\nweaveTypeF ((ExistU gv _ _)) (ExistU cv _ _) = UnkF (FV gv (tv2cv cv))\nweaveTypeF (NatLitU n) (NatLitU _) = NatLitF n\nweaveTypeF (NatLitU n) _ = NatLitF n  -- Nat params may be erased in concrete type\nweaveTypeF (NatVarU _) _ = NatLitF 0  -- Nat vars erased in concrete type\nweaveTypeF (LabeledU _ gt) ct = weaveTypeF gt ct\nweaveTypeF gt (LabeledU _ ct) = weaveTypeF gt ct\nweaveTypeF gt ct = error . show $ (gt, ct)\n\n-- | Check if a SerialAST's root has the \"arrow\" concrete type hint\nhasArrowHint :: SerialAST -> Bool\nhasArrowHint (SerialObject _ (FV _ (CV \"arrow\")) _ _) = True\nhasArrowHint _ = False\n\n{- | Given a list of possible ways to (de)serialize data between two languages,\nchoose one (or none if the list is empty). Currently I just take the first\nin the list, but different cycles may have very different performance, so\nthis will be an important optimization step later on.\n-}\nchooseSerializationCycle ::\n  [(SerialAST, SerialAST)] ->\n  Maybe (SerialAST, SerialAST)\nchooseSerializationCycle [] = Nothing\nchooseSerializationCycle (x : _) = Just x\n\n{- | Determine if a SerialAST can be directly translated to JSON, if not it\nwill need to be further reduced.\n-}\nisSerializable :: SerialAST -> Bool\nisSerializable (SerialPack _ _) = False\nisSerializable (SerialList _ x) = isSerializable x\nisSerializable (SerialTuple _ xs) = all isSerializable xs\nisSerializable (SerialObject _ _ _ rs) = all (isSerializable . snd) rs\nisSerializable (SerialReal _) = True\nisSerializable (SerialFloat32 _) = True\nisSerializable (SerialFloat64 _) = True\nisSerializable (SerialInt _) = True\nisSerializable (SerialInt8 _) = True\nisSerializable (SerialInt16 _) = True\nisSerializable (SerialInt32 _) = True\nisSerializable (SerialInt64 _) = True\nisSerializable (SerialUInt _) = True\nisSerializable (SerialUInt8 _) = True\nisSerializable (SerialUInt16 _) = True\nisSerializable (SerialUInt32 _) = True\nisSerializable (SerialUInt64 _) = True\nisSerializable (SerialBool _) = True\nisSerializable (SerialString _) = True\nisSerializable (SerialNull _) = True\nisSerializable (SerialOptional _ x) = isSerializable x\nisSerializable (SerialTensor _ _ x) = isSerializable x\nisSerializable (SerialUnknown _) = True -- are you feeling lucky?\n\nprettySerialOne :: SerialAST -> MDoc\nprettySerialOne (SerialPack _ _) = \"SerialPack\"\nprettySerialOne (SerialList v x) = \"SerialList\" <> angles (pretty v) <> parens (prettySerialOne x)\nprettySerialOne (SerialTuple v xs) = \"SerialTuple\" <> angles (pretty v) <> tupled (map prettySerialOne xs)\nprettySerialOne (SerialObject r _ _ rs) =\n  block 4 (\"SerialObject@\" <> viaShow r) $\n    vsep (map (\\(k, v) -> parens (viaShow k) <> \"=\" <> prettySerialOne v) rs)\nprettySerialOne (SerialReal _) = \"SerialReal\"\nprettySerialOne (SerialFloat32 _) = \"SerialFloat32\"\nprettySerialOne (SerialFloat64 _) = \"SerialFloat64\"\nprettySerialOne (SerialInt _) = \"SerialInt\"\nprettySerialOne (SerialInt8 _) = \"SerialInt8\"\nprettySerialOne (SerialInt16 _) = \"SerialInt16\"\nprettySerialOne (SerialInt32 _) = \"SerialInt32\"\nprettySerialOne (SerialInt64 _) = \"SerialInt64\"\nprettySerialOne (SerialUInt _) = \"SerialUInt\"\nprettySerialOne (SerialUInt8 _) = \"SerialUInt8\"\nprettySerialOne (SerialUInt16 _) = \"SerialUInt16\"\nprettySerialOne (SerialUInt32 _) = \"SerialUInt32\"\nprettySerialOne (SerialUInt64 _) = \"SerialUInt64\"\nprettySerialOne (SerialBool _) = \"SerialBool\"\nprettySerialOne (SerialString _) = \"SerialString\"\nprettySerialOne (SerialNull _) = \"SerialNull\"\nprettySerialOne (SerialOptional _ x) = \"SerialOptional\" <> parens (prettySerialOne x)\nprettySerialOne (SerialTensor _ ndim x) = \"SerialTensor\" <> pretty ndim <> parens (prettySerialOne x)\nprettySerialOne (SerialUnknown _) = \"SerialUnknown\"\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/Serialize.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.CodeGenerator.Serialize\nDescription : Insert pack\\/unpack operations at cross-language call boundaries\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nWalks the segmented manifold tree and inserts serialization\\/deserialization\ncalls wherever data crosses a language boundary (interprocess communication).\nUses 'Serial.makeSerialAST' to determine the packing strategy for each type.\nThe result is the 'SerialManifold' tree consumed by the translators.\n-}\nmodule Morloc.CodeGenerator.Serialize\n  ( serialize\n  ) where\n\nimport Data.Text (Text)\nimport Morloc.CodeGenerator.Infer\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.CodeGenerator.Serial as Serial\nimport qualified Morloc.Config as MC\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Monad as MM\n\n{- | This step is performed after segmentation, so all terms are in the same\nlanguage. Here we need to determine where inputs are (de)serialized and the\nserialization states of arguments and variables.\n-}\nserialize :: MonoHead -> MorlocMonad SerialManifold\nserialize (MonoHead lang m0 args0 headForm0 e0) = do\n  form0 <- ManifoldFull <$> mapM prepareArg args0\n\n  MM.sayVVV $\n    \"In serialize for\" <+> \"m\"\n      <> pretty m0 <+> pretty lang <+> \"segment\"\n      <> \"\\n  form0:\" <+> pretty form0\n      <> \"\\n  typemap:\" <+> viaShow typemap\n      <> \"\\n  This map we made from the expression:\\n  \"\n      <> pretty e0\n\n  se1 <- serialExpr m0 e0\n  let sm = SerialManifold m0 lang form0 headForm0 se1\n  wireSerial lang sm\n  where\n    inferType = inferConcreteType lang\n    inferTypeUniversal = inferConcreteTypeUniversal lang\n    inferVar = inferConcreteVar lang\n\n    typemap = makeTypemap m0 e0\n\n    prepareArg ::\n      Arg None ->\n      MorlocMonad (Arg (Or TypeS TypeF))\n    prepareArg (Arg i _) = case Map.lookup i typemap of\n      Nothing -> return $ Arg i (L PassthroughS)\n      (Just (Right t)) -> do\n        t' <- inferType t\n        return $ Arg i (L (typeSof t'))\n      (Just (Left t)) -> do\n        MM.sayVVV \"Warning: using universal inference at prepareArg\"\n        t' <- inferTypeUniversal t\n        return $ Arg i (L (typeSof t'))\n\n    contextArg ::\n      Int ->\n      MorlocMonad (Or TypeS TypeF)\n    contextArg i = case Map.lookup i typemap of\n      (Just (Right t)) -> do\n        t' <- inferType t\n        return $ LR (typeSof t') t'\n      Nothing -> return $ L PassthroughS\n      (Just (Left t)) -> do\n        MM.sayVVV \"Warning: using universal inference at contextArg\"\n        t' <- inferTypeUniversal t\n        return $ LR (typeSof t') t'\n\n    boundArg :: Int -> MorlocMonad TypeF\n    boundArg i = case Map.lookup i typemap of\n      (Just (Right t)) -> inferType t\n      Nothing -> error \"Untyped native arg\"\n      (Just (Left t)) -> do\n        MM.sayVVV \"Warning: using universal inference at boundArg\"\n        inferTypeUniversal t\n\n    serialExpr ::\n      Int ->\n      MonoExpr ->\n      MorlocMonad SerialExpr\n    serialExpr _ (MonoManifold m form e) = do\n      MM.sayVVV $ \"serialExpr MonoManifold m\" <> pretty m <> parens (pretty form)\n      serialExpr m e\n    serialExpr m (MonoLet i e1 e2) =\n      let (m1, e1') = unwrapLetDef m e1\n       in case inferState e1 of\n            Serialized -> SerialLetS i <$> serialExpr m1 e1' <*> serialExpr m e2\n            Unserialized -> do\n              ne1 <- nativeExpr m1 e1'\n              NativeLetS i ne1 <$> serialExpr m e2\n    serialExpr _ (MonoLetVar t i) = do\n      t' <- inferType t\n      return $ LetVarS (Just t') i\n    serialExpr m (MonoReturn e) = ReturnS <$> serialExpr m e\n    serialExpr _ (MonoApp (MonoPoolCall t m docs remoteCall contextArgs) es) = do\n      contextArgs' <- mapM (typeArg Serialized . ann) contextArgs\n      let poolCall' = PoolCall m docs remoteCall contextArgs'\n      es' <- mapM (serialArg m) es\n      t' <- inferType t\n      return $ AppPoolS t' poolCall' es'\n    serialExpr _ (MonoBndVar (A _) i) = return $ BndVarS Nothing i\n    serialExpr _ (MonoBndVar (B _) i) =\n      case Map.lookup i typemap of\n        (Just (Right t)) -> BndVarS <$> fmap Just (inferType t) <*> pure i\n        _ -> return $ BndVarS Nothing i\n    serialExpr _ (MonoBndVar (C t) i) = BndVarS <$> fmap Just (inferType t) <*> pure i\n    serialExpr m (MonoIf cond thenE elseE) = do\n      ne <- nativeExpr m (MonoIf cond thenE elseE)\n      serializeS \"serialE MonoIf\" m ne\n    -- Thunk-producing intrinsics: convert to native and serialize with the\n    -- inner type (strip EffectF) so the wire format matches the forced value.\n    serialExpr m (MonoDoBlock _ e) = serialExpr m e\n    serialExpr _ (MonoExe _ _) = error \"Can represent MonoSrc as SerialExpr\"\n    serialExpr _ MonoPoolCall {} = error \"MonoPoolCall does not map to a SerialExpr\"\n    serialExpr _ (MonoApp MonoManifold {} _) = error \"Illegal?\"\n    serialExpr m e = nativeExpr m e >>= serializeS \"serialE e\" m\n\n    serialArg ::\n      Int ->\n      MonoExpr ->\n      MorlocMonad SerialArg\n    serialArg _ e@(MonoManifold m form _) = do\n      MM.sayVVV $ \"serialArg MonoManifold m\" <> pretty m <> parens (pretty form)\n      se <- serialExpr m e\n      case se of\n        (ManS sm) -> return $ SerialArgManifold sm\n        _ -> error \"Unreachable?\"\n    serialArg _ MonoPoolCall {} = error \"This step should be unreachable\"\n    serialArg _ (MonoExe _ _) = error \"This step should be unreachable\"\n    serialArg _ (MonoReturn _) = error \"Return should not happen hear (really I should remove this term completely)\"\n    serialArg m e = SerialArgExpr <$> serialExpr m e\n\n    nativeArg ::\n      Int ->\n      MonoExpr ->\n      MorlocMonad NativeArg\n    nativeArg _ e@(MonoManifold m form _) = do\n      MM.sayVVV $ \"nativeArg MonoManifold m\" <> pretty m <> parens (pretty form)\n      ne <- nativeExpr m e\n      case ne of\n        (ManN nm) -> return $ NativeArgManifold nm\n        _ -> error \"Unreachable?\"\n    nativeArg _ MonoPoolCall {} = error \"This step should be unreachable\"\n    nativeArg _ (MonoExe _ _) = error \"This step should be unreachable\"\n    nativeArg _ (MonoReturn _) = error \"Return should not happen here (really I should remove this term completely)\"\n    nativeArg m e = NativeArgExpr <$> nativeExpr m e\n\n    nativeExpr ::\n      Int ->\n      MonoExpr ->\n      MorlocMonad NativeExpr\n    nativeExpr _ (MonoManifold m form e) = do\n      MM.sayVVV $ \"nativeExpr MonoManifold m\" <> pretty m <> parens (pretty form)\n      ne <- nativeExpr m e\n      form' <- abimapM (\\i _ -> contextArg i) (\\i _ -> boundArg i) form\n      return . ManN $ NativeManifold m lang form' ne\n    nativeExpr _ MonoPoolCall {} = error \"MonoPoolCall does not map to NativeExpr\"\n    nativeExpr m (MonoLet i e1 e2) =\n      let (m1, e1') = unwrapLetDef m e1\n       in case inferState e1 of\n            Serialized -> do\n              ne2 <- nativeExpr m e2\n              SerialLetN i <$> serialExpr m1 e1' <*> pure ne2\n            Unserialized -> do\n              ne1 <- nativeExpr m1 e1'\n              ne2 <- nativeExpr m e2\n              return $ NativeLetN i ne1 ne2\n    nativeExpr _ (MonoLetVar t i) = LetVarN <$> inferType t <*> pure i\n    nativeExpr m (MonoReturn e) = ReturnN <$> nativeExpr m e\n    -- Cross-language recursive call: serialize args, call via socket, deserialize result\n    nativeExpr m (MonoApp (MonoExe (Idx idx t0) (RecCallP mid (Just targetLang))) es) = do\n      let (_, outputType) = case t0 of\n            FunT its ot -> (its, ot)\n            _ -> ([], t0)\n      nativeArgs <- mapM (nativeExpr m) es\n      serializedArgs <- mapM (serializeS \"foreignRecArg\" m) nativeArgs\n      resultType <- inferType (Idx idx outputType)\n      config <- MM.ask\n      reg <- MM.gets stateLangRegistry\n      let socket = MC.setupServerAndSocket config reg targetLang\n          serialCall = AppForeignRecS resultType mid socket serializedArgs\n      naturalizeN \"foreignRecCall\" m lang resultType serialCall\n    -- Same-language recursive call: serialize args, call serial manifold, deserialize result\n    nativeExpr m (MonoApp (MonoExe (Idx idx t0) (RecCallP mid Nothing)) es) = do\n      let (_, outputType) = case t0 of\n            FunT its ot -> (its, ot)\n            _ -> ([], t0)\n      -- Build native args, then serialize each one\n      nativeArgs <- mapM (nativeExpr m) es\n      serializedArgs <- mapM (serializeS \"recArg\" m) nativeArgs\n      -- Return type of the serial manifold call\n      resultType <- inferType (Idx idx outputType)\n      -- Create serial expression: call the serial manifold with serialized args\n      let serialCall = AppRecS resultType mid serializedArgs\n      -- Deserialize the result back to native\n      naturalizeN \"recCall\" m lang resultType serialCall\n    nativeExpr m (MonoApp (MonoExe (Idx idx t0) exe) es) = do\n      args <- mapM (nativeArg m) es\n      let (inputTypes, outputType) = case t0 of\n            FunT its ot -> (its, ot)\n            _ -> ([], t0)\n      appType <- case drop (length es) inputTypes of\n        [] -> inferType (Idx idx outputType)\n        remaining -> inferType $ Idx idx (FunT remaining outputType)\n\n      return $ AppExeN appType exe args\n    nativeExpr m e@(MonoApp (MonoPoolCall t _ _ _ _) _) = do\n      e' <- serialExpr m e\n      t' <- inferType t\n      MM.sayVVV $ \"nativeExpr MonoApp:\" <+> pretty t'\n      naturalizeN \"nativeE MonoApp\" m lang t' e'\n    nativeExpr m (MonoApp (MonoLetVar (Idx idx (FunT inputTypes outputType)) i) es) = do\n      MM.sayVVV $ \"MonoLetVar case\"\n      args <- mapM (nativeArg m) es\n      appType <- case drop (length es) inputTypes of\n        [] -> inferType (Idx idx outputType)\n        remaining -> inferType $ Idx idx (FunT remaining outputType)\n      return $ AppExeN appType (LocalCallP i) args\n    nativeExpr _ (MonoApp e es) = do\n      MM.sayVVV \"nativeExprr MonoApp\"\n      MM.sayVVV $ \"e:\" <+> pretty e\n      MM.sayVVV $ \"es:\" <+> list (map pretty es)\n      error \"Illegal application\"\n    nativeExpr _ (MonoExe t exe) = ExeN <$> inferType t <*> pure exe\n    nativeExpr _ (MonoBndVar (A _) _) = error \"MonoBndVar must have a type if used in native context\"\n    nativeExpr _ (MonoBndVar (B _) i) =\n      case Map.lookup i typemap of\n        (Just (Right t)) -> BndVarN <$> inferType t <*> pure i\n        _ -> error \"No type found\"\n    nativeExpr _ (MonoBndVar (C t) i) = BndVarN <$> inferType t <*> pure i\n    nativeExpr m (MonoList v t es) =\n      ListN\n        <$> inferVar v\n        <*> inferType t\n        <*> mapM (nativeExpr m) es\n    nativeExpr m (MonoTuple v rs) =\n      TupleN\n        <$> inferVar v\n        <*> mapM (nativeExpr m . snd) rs\n    nativeExpr m (MonoRecord o v ps rs) =\n      RecordN o\n        <$> inferVar v\n        <*> mapM inferType ps\n        <*> mapM (secondM (nativeExpr m . snd)) rs\n    nativeExpr _ (MonoLog v x) = LogN <$> inferVar v <*> pure x\n    nativeExpr _ (MonoReal v x) = RealN <$> inferVar v <*> pure x\n    nativeExpr _ (MonoInt v x) = IntN <$> inferVar v <*> pure x\n    nativeExpr _ (MonoStr v x) = StrN <$> inferVar v <*> pure x\n    nativeExpr _ (MonoNull v) = NullN <$> inferVar v\n    nativeExpr m (MonoIf cond thenE elseE) = do\n      condNe <- nativeExpr m cond\n      thenNe <- nativeExpr m thenE\n      elseNe <- nativeExpr m elseE\n      let ifType = case (thenNe, elseNe) of\n            (NullN _, _) -> typeFof elseNe\n            (_, NullN _) -> typeFof thenNe\n            _ -> typeFof thenNe\n      return $ IfN ifType condNe thenNe elseNe\n    nativeExpr m (MonoDoBlock t e) = DoBlockN <$> inferType t <*> nativeExpr m e\n    nativeExpr m (MonoEval t e) = EvalN <$> inferType t <*> nativeExpr m e\n    nativeExpr m (MonoCoerce c t e) = CoerceN c <$> inferType t <*> nativeExpr m e\n    -- Runtime intrinsics with thunk return types (save/load): the C functions\n    -- (mlc_save, mlc_load) are eager, so we wrap them in DoBlockN to produce a\n    -- proper thunk that EvalN can call.\n    nativeExpr m (MonoIntrinsic t intr es)\n      | intr `elem` [IntrSave, IntrSaveM, IntrSaveJ, IntrLoad] = do\n          tf <- inferType t\n          es' <- mapM (nativeExpr m) es\n          msch <- intrinsicSchema m intr tf es'\n          let innerTf = case tf of\n                EffectF _ inner -> inner\n                other -> other\n          return $ DoBlockN tf (IntrinsicN innerTf intr msch es')\n    nativeExpr m (MonoIntrinsic t intr es) = do\n      tf <- inferType t\n      es' <- mapM (nativeExpr m) es\n      msch <- intrinsicSchema m intr tf es'\n      return $ IntrinsicN tf intr msch es'\n\n    -- Compute the msgpack schema string for runtime intrinsics\n    intrinsicSchema :: Int -> Intrinsic -> TypeF -> [NativeExpr] -> MorlocMonad (Maybe Text)\n    intrinsicSchema m intr _ (dataArg:_)\n      | intr `elem` [IntrHash, IntrSave, IntrSaveM, IntrSaveJ, IntrShow, IntrSchema] = do\n          ast <- Serial.makeSerialAST m lang (typeFof dataArg)\n          return . Just . render $ Serial.serialAstToMsgpackSchema ast\n    intrinsicSchema _ IntrTypeof _ (dataArg:_) =\n      -- @typeof yields the user-facing type name as a compile-time constant\n      -- string. The string is stored in the Intrinsic node's schema slot and\n      -- emitted as a literal by the translator; the argument is erased.\n      return . Just $ renderTypeFName (typeFof dataArg)\n    intrinsicSchema m IntrLoad tf _ = do\n      -- For @load, the return type is {?a} or ?a; the schema is for a\n      let unwrap (EffectF _ inner) = unwrap inner\n          unwrap (OptionalF inner) = inner\n          unwrap other = other\n          dataType = unwrap tf\n      ast <- Serial.makeSerialAST m lang dataType\n      return . Just . render $ Serial.serialAstToMsgpackSchema ast\n    intrinsicSchema m IntrRead tf _ = do\n      -- For @read, the return type is ?a; the schema is for a\n      let unwrap (OptionalF inner) = inner\n          unwrap other = other\n          dataType = unwrap tf\n      ast <- Serial.makeSerialAST m lang dataType\n      return . Just . render $ Serial.serialAstToMsgpackSchema ast\n    intrinsicSchema _ _ _ _ = return Nothing\n\n    -- Render a TypeF as a user-facing Morloc type string (for @typeof).\n    -- Uses the general type variable name (not the language-concrete one),\n    -- matching what the user wrote in their source.\n    renderTypeFName :: TypeF -> Text\n    renderTypeFName = render . go\n      where\n        go (UnkF (FV t _)) = pretty t\n        go (VarF (FV t _)) = pretty t\n        go (NamF _ (FV t _) params _) =\n          case params of\n            [] -> pretty t\n            ps -> parens (pretty t <+> hsep (map go ps))\n        go (AppF con args) = parens (go con <+> hsep (map go args))\n        go (FunF args ret) =\n          parens (hsep (punctuate \" ->\" (map go args ++ [go ret])))\n        go (EffectF _ t) = go t\n        go (OptionalF t) = \"?\" <> go t\n        go (NatLitF n) = pretty n\n\n    typeArg ::\n      SerializationState ->\n      Int ->\n      MorlocMonad (Arg TypeM)\n    typeArg s i = case (s, Map.lookup i typemap) of\n      (Serialized, Just (Right t)) -> do\n        t' <- inferType t\n        return $ Arg i (Serial t')\n      (Serialized, Nothing) -> return $ Arg i Passthrough\n      (Serialized, Just (Left t)) -> do\n        MM.sayVVV $ \"typeArg universal inference of unindexed type \" <> pretty t\n        t' <- inferTypeUniversal t\n        return $ Arg i (Serial t')\n      (Unserialized, Just (Right t)) -> do\n        t' <- inferType t\n        return $ Arg i (Native t')\n      (Unserialized, Nothing) -> error \"Bug: untyped non-passthrough value\"\n      (Unserialized, Just (Left t)) -> do\n        MM.sayVVV $ \"typeArg universal inference of unindexed type \" <> pretty t\n        t' <- inferTypeUniversal t\n        return $ Arg i (Native t')\n\n    makeTypemap :: Int -> MonoExpr -> Map.Map Int (Either Type (Indexed Type))\n    makeTypemap _ (MonoLetVar t i) = Map.singleton i (Right t)\n    makeTypemap parentIndex (MonoBndVar (B t) i) = Map.singleton i (Right (Idx parentIndex t))\n    makeTypemap _ (MonoBndVar (C t) i) = Map.singleton i (Right t)\n    makeTypemap _ (MonoManifold midx (manifoldBound -> ys) e) =\n      Map.union (Map.fromList [(i, Left t) | (Arg i (Just t)) <- ys]) (makeTypemap midx e)\n    makeTypemap parentIdx (MonoLet _ e1 e2) = Map.union (makeTypemap parentIdx e1) (makeTypemap parentIdx e2)\n    makeTypemap parentIdx (MonoReturn e) = makeTypemap parentIdx e\n    makeTypemap parentIdx (MonoEval _ e) = makeTypemap parentIdx e\n    makeTypemap parentIdx (MonoDoBlock _ e) = makeTypemap parentIdx e\n    makeTypemap parentIdx (MonoCoerce _ _ e) = makeTypemap parentIdx e\n    makeTypemap parentIdx (MonoIntrinsic _ _ es) = Map.unionsWith mergeTypes (map (makeTypemap parentIdx) es)\n    makeTypemap parentIdx (MonoIf cond thenE elseE) =\n      Map.unionsWith mergeTypes [makeTypemap parentIdx cond, makeTypemap parentIdx thenE, makeTypemap parentIdx elseE]\n    makeTypemap _ (MonoApp (MonoExe (ann -> idx) _) es) = Map.unionsWith mergeTypes (map (makeTypemap idx) es)\n    makeTypemap parentIdx (MonoApp e es) = Map.unionsWith mergeTypes (map (makeTypemap parentIdx) (e : es))\n    makeTypemap _ (MonoList (ann -> idx) _ es) = Map.unionsWith mergeTypes (map (makeTypemap idx) es)\n    makeTypemap _ (MonoTuple (ann -> idx) (map snd -> es)) = Map.unionsWith mergeTypes (map (makeTypemap idx) es)\n    makeTypemap _ (MonoRecord _ (ann -> idx) _ (map (snd . snd) -> es)) = Map.unionsWith mergeTypes (map (makeTypemap idx) es)\n    makeTypemap _ _ = Map.empty\n\n    mergeTypes :: Either Type (Indexed Type) -> Either Type (Indexed Type) -> Either Type (Indexed Type)\n    mergeTypes (Right t) _ = Right t\n    mergeTypes _ (Right t) = Right t\n    mergeTypes x _ = x\n\n    serializeS :: MDoc -> Int -> NativeExpr -> MorlocMonad SerialExpr\n    serializeS msg m se = do\n      MM.sayVVV $ \"serializeS\" <+> pretty m <> \":\" <+> msg\n      SerializeS <$> Serial.makeSerialAST m lang (typeFof se) <*> pure se\n\n    inferState :: MonoExpr -> SerializationState\n    inferState (MonoApp MonoPoolCall {} _) = Serialized\n    inferState (MonoApp MonoExe {} _) = Unserialized\n    inferState (MonoApp (MonoManifold _ _ e) _) = inferState e\n    inferState (MonoLet _ _ e) = inferState e\n    inferState (MonoReturn e) = inferState e\n    inferState (MonoManifold _ _ e) = inferState e\n    inferState (MonoIf _ thenE _) = inferState thenE\n    inferState MonoPoolCall {} = Unserialized\n    inferState MonoBndVar {} = Unserialized\n    inferState _ = Unserialized\n\n{- | Unwrap structural MonoManifold/MonoReturn wrappers from a let definition.\nMonoManifold contributes its index (for type lookups); MonoReturn is the\nmanifold's return semantics, which is meaningless in a let-binding context.\n-}\nunwrapLetDef :: Int -> MonoExpr -> (Int, MonoExpr)\nunwrapLetDef _ (MonoManifold m _ (MonoReturn e)) = (m, e)\nunwrapLetDef _ (MonoManifold m _ e) = (m, e)\nunwrapLetDef m (MonoReturn e) = (m, e)\nunwrapLetDef m e = (m, e)\n\nnaturalizeN :: MDoc -> Int -> Lang -> TypeF -> SerialExpr -> MorlocMonad NativeExpr\nnaturalizeN msg m lang t se = do\n  MM.sayVVV $ \"naturalizeN at\" <+> msg\n  DeserializeN t <$> Serial.makeSerialAST m lang t <*> pure se\n\nclass IsSerializable a where\n  serialLet :: Int -> SerialExpr -> a -> a\n  nativeLet :: Int -> NativeExpr -> a -> a\n\ninstance IsSerializable SerialExpr where\n  serialLet = SerialLetS\n  nativeLet = NativeLetS\n\ninstance IsSerializable NativeExpr where\n  serialLet = SerialLetN\n  nativeLet = NativeLetN\n\ntype D a = (Map.Map Int Request, a)\n\nwireSerial :: Lang -> SerialManifold -> MorlocMonad SerialManifold\nwireSerial lang sm0@(SerialManifold m0 _ _ _ _) = foldSerialManifoldM fm sm0 |>> snd\n  where\n    defs = makeMonoidFoldDefault Map.empty (Map.unionWith (<>))\n\n    fm =\n      FoldManifoldM\n        { opSerialManifoldM = wireSerialManifold\n        , opNativeManifoldM = wireNativeManifold\n        , opSerialExprM = wireSerialExpr\n        , opNativeExprM = wireNativeExpr\n        , opSerialArgM = monoidSerialArg defs\n        , opNativeArgM = monoidNativeArg defs\n        }\n\n    wireSerialManifold :: SerialManifold_ (D SerialExpr) -> MorlocMonad (D SerialManifold)\n    wireSerialManifold (SerialManifold_ m _ form headForm (req, e)) = do\n      let form' = afirst (specialize req) form\n          req' = Map.map fst (manifoldToMap form')\n      e' <- letWrap m form' req e\n      return (req', SerialManifold m lang form' headForm e')\n\n    wireNativeManifold :: NativeManifold_ (D NativeExpr) -> MorlocMonad (D NativeManifold)\n    wireNativeManifold (NativeManifold_ m _ form (req, e)) = do\n      let form' = afirst (specialize req) form\n          req' = Map.map fst (manifoldToMap form')\n      e' <- letWrap m form' req e\n      return (req', NativeManifold m lang form' e')\n\n    wireSerialExpr (LetVarS_ t i) = return (Map.singleton i SerialContent, LetVarS t i)\n    wireSerialExpr (BndVarS_ t i) = return (Map.singleton i SerialContent, BndVarS t i)\n    wireSerialExpr (AppPoolS_ t p@(PoolCall _ _ _ pargs) args) = do\n      let req1 = Map.unionsWith (<>) (map fst args)\n          req2 = Map.fromList [(i, requestOf tm) | Arg i tm <- pargs]\n          req3 = Map.unionWith (<>) req1 req2\n      return (req3, AppPoolS t p (map snd args))\n    wireSerialExpr (AppRecS_ t mid args) = do\n      let req = Map.unionsWith (<>) (map fst args)\n      return (req, AppRecS t mid (map snd args))\n    wireSerialExpr (AppForeignRecS_ t mid socket args) = do\n      let req = Map.unionsWith (<>) (map fst args)\n      return (req, AppForeignRecS t mid socket (map snd args))\n    wireSerialExpr (SerialLetS_ i (req1, se1) (req2, se2)) = do\n      let req' = Map.unionWith (<>) req1 req2\n      e' <- case Map.lookup i req2 of\n        (Just NativeContent) -> case typeSof se1 of\n          (SerialS tf) -> NativeLetS i <$> naturalizeN \"a\" m0 lang tf se1 <*> pure se2\n          (FunctionS _ (SerialS tf)) -> NativeLetS i <$> naturalizeN \"a\" m0 lang tf se1 <*> pure se2\n          _ -> error \"Unuseable let definition\"\n        (Just NativeAndSerialContent) -> case typeSof se1 of\n          (SerialS tf) -> do\n            ne1 <- naturalizeN \"a\" m0 lang tf (LetVarS (Just tf) i)\n            return $ SerialLetS i se1 (NativeLetS i ne1 se2)\n          (FunctionS _ (SerialS tf)) -> do\n            ne1 <- naturalizeN \"a\" m0 lang tf (LetVarS (Just tf) i)\n            return $ SerialLetS i se1 (NativeLetS i ne1 se2)\n          _ -> error \"Unuseable let definition\"\n        _ -> return $ SerialLetS i se1 se2\n      return (req', e')\n    wireSerialExpr (NativeLetS_ i (req1, ne1) (req2, se2)) = do\n      let req' = Map.unionWith (<>) req1 req2\n      e' <- case Map.lookup i req2 of\n        (Just SerialContent) -> SerialLetS i <$> serializeS \"b\" m0 (typeFof ne1) ne1 <*> pure se2\n        (Just NativeAndSerialContent) -> do\n          let tf = typeFof ne1\n          sv <- serializeS \"b\" m0 tf (LetVarN tf i)\n          return $ NativeLetS i ne1 (SerialLetS i sv se2)\n        _ -> return $ NativeLetS i ne1 se2\n      return (req', e')\n    wireSerialExpr e = monoidSerialExpr defs e\n\n    wireNativeExpr ::\n      NativeExpr_ (D NativeManifold) (D SerialExpr) (D NativeExpr) (D SerialArg) (D NativeArg) ->\n      MorlocMonad (D NativeExpr)\n    wireNativeExpr (LetVarN_ t i) = return (Map.singleton i NativeContent, LetVarN t i)\n    wireNativeExpr (BndVarN_ t i) = return (Map.singleton i NativeContent, BndVarN t i)\n    wireNativeExpr (SerialLetN_ i (req1, se1) (req2, ne2)) = do\n      let req' = Map.unionWith (<>) req1 req2\n      e' <- case Map.lookup i req2 of\n        (Just NativeContent) -> case typeSof se1 of\n          (SerialS tf) -> NativeLetN i <$> naturalizeN \"a\" m0 lang tf se1 <*> pure ne2\n          (FunctionS _ (SerialS tf)) -> NativeLetN i <$> naturalizeN \"a\" m0 lang tf se1 <*> pure ne2\n          _ -> error \"Unuseable let definition\"\n        (Just NativeAndSerialContent) -> case typeSof se1 of\n          (SerialS tf) -> do\n            ne1 <- naturalizeN \"a\" m0 lang tf (LetVarS (Just tf) i)\n            return $ SerialLetN i se1 (NativeLetN i ne1 ne2)\n          (FunctionS _ (SerialS tf)) -> do\n            ne1 <- naturalizeN \"a\" m0 lang tf (LetVarS (Just tf) i)\n            return $ SerialLetN i se1 (NativeLetN i ne1 ne2)\n          _ -> error \"Unuseable let definition\"\n        _ -> return $ SerialLetN i se1 ne2\n      return (req', e')\n    wireNativeExpr (NativeLetN_ i (req1, ne1) (req2, ne2)) = do\n      let req' = Map.unionWith (<>) req1 req2\n      e' <- case Map.lookup i req2 of\n        (Just SerialContent) -> SerialLetN i <$> serializeS \"b\" m0 (typeFof ne1) ne1 <*> pure ne2\n        (Just NativeAndSerialContent) -> do\n          let tf = typeFof ne1\n          sv <- serializeS \"b\" m0 tf (LetVarN tf i)\n          return $ NativeLetN i ne1 (SerialLetN i sv ne2)\n        _ -> return $ NativeLetN i ne1 ne2\n      return (req', e')\n    wireNativeExpr e = monoidNativeExpr defs e\n\n    specialize :: Map.Map Int Request -> Int -> Or TypeS TypeF -> Or TypeS TypeF\n    specialize req i r = case (Map.lookup i req, r) of\n      (Nothing, _) -> L PassthroughS\n      (Just SerialContent, LR t _) -> L t\n      (Just NativeContent, LR _ t) -> R t\n      _ -> r\n\n    letWrap ::\n      (IsSerializable e, HasRequest t, MayHaveTypeF t) =>\n      Int ->\n      ManifoldForm (Or TypeS TypeF) t ->\n      Map.Map Int Request ->\n      e ->\n      MorlocMonad e\n    letWrap m form0 req0 e0 = do\n      foldlM wrapAsNeeded e0 (Map.toList req0)\n      where\n        formMap = manifoldToMap form0\n\n        wrapAsNeeded :: (IsSerializable e) => e -> (Int, Request) -> MorlocMonad e\n        wrapAsNeeded e (i, req) = case (req, Map.lookup i formMap) of\n          (SerialContent, Just (NativeContent, Just t)) -> serialLet i <$> serializeS \"wan 1\" m t (BndVarN t i) <*> pure e\n          (NativeAndSerialContent, Just (NativeContent, Just t)) -> serialLet i <$> serializeS \"wan 2\" m t (BndVarN t i) <*> pure e\n          (NativeContent, Just (SerialContent, Just t)) -> nativeLet i <$> naturalizeN \"wan 3\" m lang t (BndVarS (Just t) i) <*> pure e\n          (NativeAndSerialContent, Just (SerialContent, Just t)) -> nativeLet i <$> naturalizeN \"wan 4\" m lang t (BndVarS (Just t) i) <*> pure e\n          _ -> return e\n\n    manifoldToMap ::\n      (HasRequest t, MayHaveTypeF t) =>\n      ManifoldForm (Or TypeS TypeF) t ->\n      Map.Map Int (Request, Maybe TypeF)\n    manifoldToMap form = f form\n      where\n        mapRequestFromXs xs = Map.fromList [(i, (requestOf t, mayHaveTypeF t)) | (Arg i t) <- typeMofRs xs]\n        mapRequestFromYs ys = Map.fromList [(i, (requestOf t, mayHaveTypeF t)) | (Arg i t) <- ys]\n\n        f (ManifoldFull xs) = mapRequestFromXs xs\n        f (ManifoldPass ys) = mapRequestFromYs ys\n        f (ManifoldPart xs ys) = Map.union (mapRequestFromXs xs) (mapRequestFromYs ys)\n\n    serializeS :: MDoc -> Int -> TypeF -> NativeExpr -> MorlocMonad SerialExpr\n    serializeS msg m t se = do\n      MM.sayVVV $ \"serializeS\" <+> pretty m <> \":\" <+> msg\n      SerializeS <$> Serial.makeSerialAST m lang t <*> pure se\n\ndata Request = SerialContent | NativeContent | NativeAndSerialContent\n  deriving (Ord, Eq, Show)\n\nclass HasRequest a where\n  requestOf :: a -> Request\n\ninstance HasRequest TypeM where\n  requestOf Passthrough = SerialContent\n  requestOf (Serial _) = SerialContent\n  requestOf (Native _) = NativeContent\n  requestOf (Function _ _) = NativeContent\n\ninstance HasRequest SerialExpr where\n  requestOf _ = SerialContent\n\ninstance HasRequest NativeExpr where\n  requestOf _ = NativeContent\n\ninstance HasRequest SerialArg where\n  requestOf _ = SerialContent\n\ninstance HasRequest NativeArg where\n  requestOf _ = NativeContent\n\ninstance HasRequest TypeS where\n  requestOf _ = SerialContent\n\ninstance HasRequest TypeF where\n  requestOf _ = NativeContent\n\ninstance Semigroup Request where\n  SerialContent <> SerialContent = SerialContent\n  NativeContent <> NativeContent = NativeContent\n  _ <> _ = NativeAndSerialContent\n\ndata SerializationState = Serialized | Unserialized\n  deriving (Show, Eq, Ord)\n"
  },
  {
    "path": "library/Morloc/CodeGenerator/SystemConfig.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ScopedTypeVariables #-}\n\n{- |\nModule      : Morloc.CodeGenerator.SystemConfig\nDescription : Write runtime files and compile shared libraries during @morloc init@\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nHandles the @morloc init@ system setup: writing embedded C library sources,\ncompiling @libmorloc.so@, building the static nexus binary, and running\nper-language @init.sh@ scripts to compile language extensions.\n-}\nmodule Morloc.CodeGenerator.SystemConfig\n  ( configure\n  , configureAll\n  ) where\n\nimport Morloc.CodeGenerator.Namespace\nimport qualified Morloc.Completion as Completion\nimport qualified Morloc.DataFiles as DF\nimport Morloc.Module (OverwriteProtocol (..))\n\nimport qualified Data.Text.IO as TIO\n\nimport Control.Exception (SomeException, catch, displayException, fromException, try)\nimport System.IO.Error (ioeGetErrorString)\nimport System.Directory (createDirectoryIfMissing, createFileLink, doesDirectoryExist, doesFileExist, findExecutable, getHomeDirectory, listDirectory, pathIsSymbolicLink, removeDirectoryRecursive, removeFile)\nimport System.Environment (lookupEnv)\nimport System.FilePath (takeDirectory)\nimport System.IO (hIsTerminalDevice, hPutStrLn, stderr)\nimport System.Exit (ExitCode(..))\nimport System.Process (CreateProcess(..), StdStream(..), createProcess, proc, waitForProcess)\n\nconfigure :: [AnnoS (Indexed Type) One (Indexed Lang)] -> MorlocMonad ()\nconfigure _ = return ()\n\nconfigureAll :: Bool -> OverwriteProtocol -> Bool -> Bool -> Config -> IO Bool\nconfigureAll verbose force slurmSupport sanitize config = do\n  result <- try (configureAllSteps verbose force slurmSupport sanitize config) :: IO (Either SomeException ())\n  case result of\n    Left e -> do\n      -- Strip the \"user error (...)\" wrapper from IOError messages\n      let msg = case fromException e :: Maybe IOError of\n            Just ioe -> ioeGetErrorString ioe\n            Nothing -> displayException e\n      sayError $ \"Configuration failed: \" ++ msg\n      return False\n    Right _ -> return True\n\nconfigureAllSteps :: Bool -> OverwriteProtocol -> Bool -> Bool -> Config -> IO ()\nconfigureAllSteps verbose force slurmSupport sanitize config = do\n  let homeDir = configHome config\n      srcLibrary = configLibrary config\n      includeDir = homeDir </> \"include\"\n      tmpDir = configTmpDir config\n      optDir = homeDir </> \"opt\"\n      libDir = homeDir </> \"lib\"\n\n  -- When force is set, clean build output directories\n  when (force == ForceOverwrite) $ do\n    sayInfo verbose \"Force rebuild: cleaning stale artifacts\"\n    cleanDirectory libDir\n    cleanDirectoryExcept includeDir [\"mlccpptypes\"]\n    cleanDirectory optDir\n\n  ensureDirectory verbose \"morloc home directory\" homeDir\n  ensureDirectory verbose \"morloc lib directory\" libDir\n  ensureDirectory verbose \"morloc include directory\" includeDir\n  ensureDirectory verbose \"morloc tmp directory\" tmpDir\n  ensureDirectory verbose \"morloc opt directory\" optDir\n  ensureDirectory verbose \"morloc module directory\" srcLibrary\n\n  sayInfo verbose $ \"Slurm support ... \" <> show slurmSupport\n\n  sayInfo verbose $ \"Sanitize ... \" <> show sanitize\n\n  sayInfo verbose \"Writing build config file\"\n  let sanitizeLine = if sanitize then \"\\nsanitize: true\" else \"\\nsanitize: false\"\n  TIO.writeFile\n    (configBuildConfig config)\n    ((if slurmSupport then \"slurm-support: true\" else \"slurm-support: false\") <> sanitizeLine)\n\n  -- Clean and create build directory\n  let buildDir = tmpDir </> \"libmorloc-build\"\n  buildDirExists <- doesDirectoryExist buildDir\n  when buildDirExists $ removeDirectoryRecursive buildDir\n  createDirectoryIfMissing True buildDir\n\n  requireTool \"gcc\" \"gcc is required to compile language extensions (C++ pools, Python/R bindings)\"\n  -- Install morloc.h (the C ABI contract for language extensions and pool templates)\n  sayInfo verbose \"Installing morloc.h\"\n  TIO.writeFile (includeDir </> \"morloc.h\") DF.libmorlocHeader\n\n  -- Install libmorloc.so and morloc-nexus.\n  --\n  -- Strategy (in priority order):\n  --   1. MORLOC_RUST_BIN: directory with pre-built libmorloc.so + morloc-nexus\n  --      (used for release installs with portable musl-linked binaries)\n  --   2. MORLOC_RUST_DIR: Cargo workspace source — build from source via cargo\n  --      (used for development and container builds)\n  --   3. Auto-detect Cargo workspace relative to morloc binary\n  let soPath = libDir </> \"libmorloc.so\"\n  -- Primary install goes to $MORLOC_HOME/bin/\n  let nexusBinDir = homeDir </> \"bin\"\n      nexusBinPath = nexusBinDir </> \"morloc-nexus\"\n  createDirectoryIfMissing True nexusBinDir\n  -- Symlink to ~/.local/bin/ if that directory exists\n  userHome <- getHomeDirectory\n  let userBinDir = userHome </> \".local\" </> \"bin\"\n\n  rustBinEnv <- lookupEnv \"MORLOC_RUST_BIN\"\n  case rustBinEnv of\n    Just binDir -> do\n      -- Pre-built binaries (release path)\n      let prebuiltSo = binDir </> \"libmorloc.so\"\n          prebuiltNexus = binDir </> \"morloc-nexus\"\n          prebuiltManager = binDir </> \"morloc-manager\"\n          managerBinPath = nexusBinDir </> \"morloc-manager\"\n      sayInfo verbose $ \"Installing pre-built libmorloc.so from \" <> binDir\n      run verbose \"cp\" [prebuiltSo, soPath]\n      run verbose \"cp\" [prebuiltNexus, nexusBinPath]\n      run verbose \"chmod\" [\"+x\", soPath]\n      run verbose \"chmod\" [\"+x\", nexusBinPath]\n      -- Install morloc-manager if present in pre-built binaries\n      managerExists <- doesFileExist prebuiltManager\n      when managerExists $ do\n        sayInfo verbose \"Installing pre-built morloc-manager\"\n        run verbose \"cp\" [prebuiltManager, managerBinPath]\n        run verbose \"chmod\" [\"+x\", managerBinPath]\n    Nothing -> do\n      -- Try to build from source: need both cargo and the Rust workspace\n      rustDirEnv <- lookupEnv \"MORLOC_RUST_DIR\"\n      rustDir <- case rustDirEnv of\n        Just d -> return d\n        Nothing -> do\n          morlocBin <- findExecutable \"morloc\"\n          let searchDirs = case morlocBin of\n                Just binPath ->\n                  [ takeDirectory (takeDirectory binPath) </> \"share\" </> \"morloc\" </> \"rust\"\n                  , takeDirectory (takeDirectory binPath) </> \"data\" </> \"rust\"\n                  ]\n                Nothing -> []\n          findRustDir searchDirs\n\n      hasCargo <- findExecutable \"cargo\"\n\n      when (null rustDir || hasCargo == Nothing) $\n        ioError . userError $ unlines\n          [ \"morloc init requires pre-built libmorloc.so and morloc-nexus binaries.\"\n          , \"\"\n          , \"Download them from: https://github.com/morloc-project/morloc/releases\"\n          , \"\"\n          , \"Then set MORLOC_RUST_BIN to the directory containing them:\"\n          , \"  export MORLOC_RUST_BIN=/path/to/binaries\"\n          , \"  morloc init -f\"\n          , \"\"\n          , \"For development, you can build from source instead:\"\n          , \"  1. Install Rust: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh\"\n          , \"  2. Set MORLOC_RUST_DIR to the data/rust/ directory in the compiler repo\"\n          , \"  3. Run: morloc init -f\"\n          ]\n\n      sayInfo verbose \"Compiling libmorloc.so (Rust)\"\n      run verbose \"cargo\"\n        [ \"build\", \"--release\"\n        , \"--manifest-path\", rustDir </> \"Cargo.toml\"\n        , \"-p\", \"morloc-runtime\"\n        ]\n      -- Build the .so from the staticlib using gcc --whole-archive.\n      -- This exports ALL symbols, which is required because the Rust runtime's\n      -- internal state (SHM globals, allocator) must be visible to language\n      -- extensions (pymorloc, rmorloc, cppmorloc).\n      -- We cannot use the cdylib directly because Rust's cdylib only exports\n      -- #[no_mangle] pub extern \"C\" symbols, and adding a version script to\n      -- override this conflicts with Rust's own version script on ARM/aarch64.\n      let rustStaticLib = rustDir </> \"target\" </> \"release\" </> \"libmorloc_runtime.a\"\n      run verbose \"gcc\"\n        [ \"-shared\", \"-o\", soPath\n        , \"-Wl,--whole-archive\", rustStaticLib, \"-Wl,--no-whole-archive\"\n        , \"-lpthread\", \"-lrt\", \"-ldl\", \"-lm\"\n        ]\n      hasStrip <- findExecutable \"strip\"\n      case hasStrip of\n        Just stripPath -> run verbose stripPath [soPath]\n        Nothing -> return ()\n\n      sayInfo verbose \"Compiling morloc-nexus (Rust)\"\n      run verbose \"cargo\"\n        [ \"build\", \"--release\"\n        , \"--manifest-path\", rustDir </> \"Cargo.toml\"\n        , \"-p\", \"morloc-nexus\"\n        ]\n      let rustNexus = rustDir </> \"target\" </> \"release\" </> \"morloc-nexus\"\n      run verbose \"cp\" [rustNexus, nexusBinPath]\n      case hasStrip of\n        Just stripPath -> run verbose stripPath [nexusBinPath]\n        Nothing -> return ()\n\n      sayInfo verbose \"Compiling morloc-manager (Rust)\"\n      run verbose \"cargo\"\n        [ \"build\", \"--release\"\n        , \"--manifest-path\", rustDir </> \"Cargo.toml\"\n        , \"-p\", \"morloc-manager\"\n        ]\n      let rustManager = rustDir </> \"target\" </> \"release\" </> \"morloc-manager\"\n          managerBinPath = nexusBinDir </> \"morloc-manager\"\n      run verbose \"cp\" [rustManager, managerBinPath]\n      case hasStrip of\n        Just stripPath -> run verbose stripPath [managerBinPath]\n        Nothing -> return ()\n\n\n  -- Symlink binaries to ~/.local/bin/ if it exists\n  userBinExists <- doesDirectoryExist userBinDir\n  when userBinExists $ do\n    symlinkBinary nexusBinPath (userBinDir </> \"morloc-nexus\")\n    let managerSrc = nexusBinDir </> \"morloc-manager\"\n    managerExists <- doesFileExist managerSrc\n    when managerExists $\n      symlinkBinary managerSrc (userBinDir </> \"morloc-manager\")\n\n  -- Create exe/ and fdb/ directories\n  let exeDir = homeDir </> \"exe\"\n      fdbDir = homeDir </> \"fdb\"\n  ensureDirectory verbose \"morloc exe directory\" exeDir\n  ensureDirectory verbose \"morloc fdb directory\" fdbDir\n\n  -- Configure each language via its init.sh script\n  forM_ DF.langSetups $ \\ls -> do\n    missing <- checkTools (DF.lsRequiredTools ls)\n    if null missing\n      then do\n        hPutStrLn stderr $ \"Building \" <> DF.lsName ls <> \" extension ...\"\n        sayInfo verbose $ \"Configuring \" <> DF.lsName ls <> \" language support\"\n        -- Write data files to build dir\n        forM_ (DF.lsFiles ls) $ \\ef ->\n          TIO.writeFile (buildDir </> DF.embededFileName ef) (DF.embededFileText ef)\n        -- Write and run init script\n        let initPath = buildDir </> \"init.sh\"\n        TIO.writeFile initPath (DF.embededFileText (DF.lsInitScript ls))\n        let sanitizeFlagsStr = if sanitize then \"-fsanitize=alignment -fno-sanitize-recover=alignment\" else \"\"\n        result <- try (run verbose \"bash\" [initPath, homeDir, buildDir, sanitizeFlagsStr]) :: IO (Either SomeException ())\n        case result of\n          Left e -> sayWarning $ DF.lsName ls <> \" setup failed: \" <> displayException e\n          Right _ -> return ()\n        -- Clean up\n        removeFileSafe initPath\n        forM_ (DF.lsFiles ls) $ \\ef ->\n          removeFileSafe (buildDir </> DF.embededFileName ef)\n      else\n        sayWarning $ \"Skipping \" <> DF.lsName ls <> \" setup (missing: \" <> unwords missing <> \")\"\n\n  -- Generate shell completions\n  sayInfo verbose \"Generating shell completions\"\n  Completion.regenerateCompletions verbose homeDir\n\n-- | Search for a Rust workspace directory containing Cargo.toml\nfindRustDir :: [FilePath] -> IO FilePath\nfindRustDir [] = return \"\"\nfindRustDir (d:ds) = do\n  exists <- doesFileExist (d </> \"Cargo.toml\")\n  if exists then return d else findRustDir ds\n\n-- ANSI color wrapping, disabled when stderr is not a terminal\nwithColor :: String -> String -> IO String\nwithColor code msg = do\n  isTty <- hIsTerminalDevice stderr\n  return $ if isTty then code <> msg <> \"\\ESC[0m\" else msg\n\nsayInfo :: Bool -> String -> IO ()\nsayInfo verbose message = when verbose $ do\n  line <- withColor \"\\ESC[34m\" (\"[INFO] \" <> message)\n  hPutStrLn stderr line\n\nsayWarning :: String -> IO ()\nsayWarning message = do\n  line <- withColor \"\\ESC[33m\" (\"[WARNING] \" <> message)\n  hPutStrLn stderr line\n\nsayError :: String -> IO ()\nsayError message = do\n  line <- withColor \"\\ESC[31m\" (\"[ERROR] \" <> message)\n  hPutStrLn stderr line\n\nrun :: Bool -> String -> [String] -> IO ()\nrun verbose cmd args = do\n  when verbose $ do\n    line <- withColor \"\\ESC[2m\" (cmd <> \" \" <> unwords args)\n    hPutStrLn stderr line\n  let cp = (proc cmd args) { std_out = UseHandle stderr }\n  (_, _, _, ph) <- createProcess cp\n  exitCode <- waitForProcess ph\n  case exitCode of\n    ExitSuccess -> return ()\n    ExitFailure code -> ioError . userError $ cmd <> \" exited with code \" <> show code\n\nensureDirectory :: Bool -> String -> FilePath -> IO ()\nensureDirectory verbose description path = do\n  exists <- doesDirectoryExist path\n  if exists\n    then sayInfo verbose $ description ++ \" ... \" ++ path\n    else do\n      createDirectoryIfMissing True path\n      sayInfo verbose $ description ++ \" ... created \" ++ path\n\n-- | Remove a file, ignoring errors if it doesn't exist\nremoveFileSafe :: FilePath -> IO ()\nremoveFileSafe path = do\n  result <- try (removeFile path) :: IO (Either SomeException ())\n  case result of\n    Left _ -> return ()\n    Right _ -> return ()\n\n-- | Remove all contents of a directory (but keep the directory itself)\ncleanDirectory :: FilePath -> IO ()\ncleanDirectory dir = do\n  exists <- doesDirectoryExist dir\n  when exists $ do\n    removeDirectoryRecursive dir\n    createDirectoryIfMissing True dir\n\n-- | Remove all contents of a directory except entries in the keep list\ncleanDirectoryExcept :: FilePath -> [String] -> IO ()\ncleanDirectoryExcept dir keep = do\n  exists <- doesDirectoryExist dir\n  when exists $ do\n    entries <- listDirectory dir\n    forM_ entries $ \\entry -> do\n      unless (entry `elem` keep) $ do\n        let path = dir </> entry\n        isDir <- doesDirectoryExist path\n        if isDir\n          then removeDirectoryRecursive path\n          else removeFile path\n\n-- | Check that a tool exists on PATH, error if not\nrequireTool :: String -> String -> IO ()\nrequireTool tool msg = do\n  found <- findExecutable tool\n  case found of\n    Nothing -> ioError . userError $ tool <> \" not found on PATH. \" <> msg\n    Just _ -> return ()\n\n-- | Create a symlink at dst pointing to src, removing any existing file at dst.\nsymlinkBinary :: FilePath -> FilePath -> IO ()\nsymlinkBinary src dst = do\n  -- Remove existing file or symlink at destination\n  isLink <- pathIsSymbolicLink dst `catch` (\\(_ :: SomeException) -> return False)\n  when isLink $ removeFile dst\n  isFile <- doesFileExist dst\n  when isFile $ removeFile dst\n  createFileLink src dst\n\n-- | Check which tools from a list are missing. Returns list of missing tool names.\ncheckTools :: [String] -> IO [String]\ncheckTools tools = do\n  results <- forM tools $ \\tool -> do\n    found <- findExecutable tool\n    return (tool, found)\n  return [t | (t, Nothing) <- results]\n"
  },
  {
    "path": "library/Morloc/Completion.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# OPTIONS_GHC -Wno-unused-top-binds #-}\n\n{- |\nModule      : Morloc.Completion\nDescription : Generate shell completion scripts for morloc and installed programs\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.Completion\n  ( regenerateCompletions\n  ) where\n\nimport Control.Monad (when)\nimport Control.Exception (SomeException, try)\nimport Data.Aeson ((.!=), (.:), (.:?))\nimport qualified Data.Aeson as JSON\nimport Data.Aeson.Types (Parser)\nimport qualified Data.ByteString.Lazy as BL\nimport Data.Char (isAlphaNum)\nimport Data.List (intercalate, isSuffixOf, nub)\nimport Data.Maybe (catMaybes)\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport System.Directory (createDirectoryIfMissing, listDirectory)\nimport System.FilePath (dropExtension, takeFileName, (</>))\nimport System.IO (hIsTerminalDevice, hPutStrLn, stderr)\n\n-- Lightweight manifest types for completion generation\n\ndata ManifestInfo = ManifestInfo\n  { miName :: Text\n  , miCommands :: [CmdInfo]\n  , miGroups :: [GroupInfo]\n  }\n\ndata CmdInfo = CmdInfo\n  { ciName :: Text\n  , ciGroup :: Maybe Text\n  , ciArgs :: [ArgInfo]\n  }\n\ndata ArgInfo\n  = PosArg\n  | OptArg {oaShort :: Maybe Char, oaLong :: Maybe Text}\n  | FlagArg {faShort :: Maybe Char, faLong :: Maybe Text, faLongRev :: Maybe Text}\n  | GrpArg {gaGroupOpt :: Maybe (Maybe Char, Maybe Text), gaEntries :: [ArgInfo]}\n\ndata GroupInfo = GroupInfo\n  { giName :: Text\n  }\n\n-- JSON parsing instances\n\ninstance JSON.FromJSON ManifestInfo where\n  parseJSON = JSON.withObject \"ManifestInfo\" $ \\o ->\n    ManifestInfo\n      <$> (o .:? \"name\" .!= \"\")\n      <*> (o .:? \"commands\" .!= [])\n      <*> (o .:? \"groups\" .!= [])\n\ninstance JSON.FromJSON CmdInfo where\n  parseJSON = JSON.withObject \"CmdInfo\" $ \\o ->\n    CmdInfo\n      <$> o .: \"name\"\n      <*> o .:? \"group\"\n      <*> (o .:? \"args\" .!= [])\n\ninstance JSON.FromJSON ArgInfo where\n  parseJSON = JSON.withObject \"ArgInfo\" $ \\o -> do\n    kind <- o .: \"kind\" :: Parser Text\n    case kind of\n      \"pos\" -> return PosArg\n      \"opt\" -> do\n        s <- o .:? \"short\"\n        l <- o .:? \"long\"\n        return $ OptArg (fmap charFromText s) l\n      \"flag\" -> do\n        s <- o .:? \"short\"\n        l <- o .:? \"long\"\n        lr <- o .:? \"long_rev\"\n        return $ FlagArg (fmap charFromText s) l lr\n      \"grp\" -> do\n        gopt <- o .:? \"group_opt\"\n        entries <- o .:? \"entries\" .!= []\n        parsedOpt <- traverse parseGroupOpt gopt\n        parsedEntries <- mapM parseEntry entries\n        return $ GrpArg parsedOpt parsedEntries\n      _ -> return PosArg\n    where\n      charFromText :: Text -> Char\n      charFromText t = case T.unpack t of\n        [c] -> c\n        _ -> '?'\n\n      parseGroupOpt :: JSON.Value -> Parser (Maybe Char, Maybe Text)\n      parseGroupOpt = JSON.withObject \"GroupOpt\" $ \\o' -> do\n        s <- o' .:? \"short\"\n        l <- o' .:? \"long\"\n        return (fmap charFromText s, l)\n\n      parseEntry :: JSON.Value -> Parser ArgInfo\n      parseEntry = JSON.withObject \"Entry\" $ \\o -> do\n        arg <- o .: \"arg\"\n        JSON.parseJSON arg\n\ninstance JSON.FromJSON GroupInfo where\n  parseJSON = JSON.withObject \"GroupInfo\" $ \\o ->\n    GroupInfo <$> o .: \"name\"\n\n{- | Read all manifests and regenerate completion files.\nThe Bool parameter controls whether to print activation instructions.\n-}\nregenerateCompletions :: Bool -> String -> IO ()\nregenerateCompletions printInstructions configHome = do\n  let fdbDir = configHome </> \"fdb\"\n      compDir = configHome </> \"completions\"\n  createDirectoryIfMissing True compDir\n\n  manifests <- loadManifests fdbDir\n  let bashScript = generateBash manifests\n      zshScript = generateZsh manifests\n      bashPath = compDir </> \"morloc-completions.bash\"\n      zshPath = compDir </> \"_morloc_completions\"\n  writeFile bashPath bashScript\n  writeFile zshPath zshScript\n\n  when printInstructions $ do\n      isTty <- hIsTerminalDevice stderr\n      let info msg = if isTty then \"\\ESC[34m[INFO] \" ++ msg ++ \"\\ESC[0m\" else \"[INFO] \" ++ msg\n      hPutStrLn stderr $ info $ \"Shell completions written to \" ++ compDir ++ \"/\"\n      hPutStrLn stderr $ info $ \"  Bash: add to ~/.bashrc:  source \" ++ bashPath\n      hPutStrLn stderr $ info $ \"  Zsh:  add to ~/.zshrc:   source \" ++ zshPath\n\n-- | Load all .manifest files from the fdb directory\nloadManifests :: FilePath -> IO [ManifestInfo]\nloadManifests fdbDir = do\n  result <- try (listDirectory fdbDir) :: IO (Either SomeException [FilePath])\n  case result of\n    Left _ -> return []\n    Right entries -> do\n      let manifestFiles = filter (\".manifest\" `isSuffixOf`) entries\n      catMaybes <$> mapM loadOne manifestFiles\n  where\n    -- Derive program name from filename (e.g. \"pricer.manifest\" -> \"pricer\")\n    nameFromFile f = T.pack (dropExtension (takeFileName f))\n\n    loadOne f = do\n      r <- try (BL.readFile (fdbDir </> f)) :: IO (Either SomeException BL.ByteString)\n      case r of\n        Left _ -> return Nothing\n        Right bs -> case JSON.eitherDecode bs of\n          Right m ->\n            let m' = if T.null (miName m) then m {miName = nameFromFile f} else m\n             in return (Just m')\n          Left err -> do\n            hPutStrLn stderr $ \"Warning: failed to parse \" ++ f ++ \": \" ++ err\n            return Nothing\n\n-- | Collect all flag/opt strings for a command\nargCompletionWords :: [ArgInfo] -> [String]\nargCompletionWords = concatMap argWords\n  where\n    argWords PosArg = []\n    argWords (OptArg s l) = shortWord s ++ longWord l\n    argWords (FlagArg s l lr) = shortWord s ++ longWord l ++ longWord lr\n    argWords (GrpArg gopt entries) =\n      maybe [] (\\(s, l) -> shortWord s ++ longWord l) gopt\n        ++ concatMap argWords entries\n\n    shortWord (Just c) = ['-' : [c]]\n    shortWord Nothing = []\n    longWord (Just t) = [\"--\" ++ T.unpack t]\n    longWord Nothing = []\n\n-- | Get flags/opts that take a value (opts and grp opts, but not flags)\nvalueArgs :: [ArgInfo] -> [String]\nvalueArgs = concatMap go\n  where\n    go PosArg = []\n    go (OptArg s l) = shortWord s ++ longWord l\n    go FlagArg {} = []\n    go (GrpArg gopt entries) =\n      maybe [] (\\(gs, gl) -> shortWord gs ++ longWord gl) gopt\n        ++ concatMap go entries\n\n    shortWord (Just c) = ['-' : [c]]\n    shortWord Nothing = []\n    longWord (Just t) = [\"--\" ++ T.unpack t]\n    longWord Nothing = []\n\n-- | Sanitize a name for use as a bash function name\nsanitizeName :: Text -> String\nsanitizeName = map (\\c -> if isAlphaNum c then c else '_') . T.unpack\n\n-- Bash generation\n\ngenerateBash :: [ManifestInfo] -> String\ngenerateBash manifests =\n  unlines $\n    [ \"# Auto-generated by morloc -- do not edit manually\"\n    , \"\"\n    ]\n      ++ morlocBashCompletion\n      ++ concatMap programBashCompletion manifests\n\nmorlocBashCompletion :: [String]\nmorlocBashCompletion =\n  [ \"_morloc() {\"\n  , \"  local cur prev\"\n  , \"  COMPREPLY=()\"\n  , \"  cur=\\\"${COMP_WORDS[COMP_CWORD]}\\\"\"\n  , \"  prev=\\\"${COMP_WORDS[COMP_CWORD-1]}\\\"\"\n  , \"\"\n  , \"  if [[ $COMP_CWORD -eq 1 ]]; then\"\n  , \"    COMPREPLY=($(compgen -W \\\"make install typecheck dump init list uninstall\\\" -- \\\"$cur\\\"))\"\n  , \"    return\"\n  , \"  fi\"\n  , \"\"\n  , \"  case \\\"${COMP_WORDS[1]}\\\" in\"\n  , \"    make)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        -o|--outfile|--config|--include) return ;;\"\n  , \"      esac\"\n  , \"      if [[ \\\"$cur\\\" != -* ]]; then\"\n  , \"        COMPREPLY=($(compgen -f -X '!*.loc' -- \\\"$cur\\\") $(compgen -d -- \\\"$cur\\\"))\"\n  , \"        return\"\n  , \"      fi\"\n  , \"      COMPREPLY=($(compgen -W \\\"-e --expression -o --outfile --install -f --force --include --config -v --vanilla\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"    install)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        --config) return ;;\"\n  , \"      esac\"\n  , \"      COMPREPLY=($(compgen -W \\\"--config --vanilla -v -f --force --ssh --no-typecheck\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"    typecheck)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        --config) return ;;\"\n  , \"      esac\"\n  , \"      if [[ \\\"$cur\\\" != -* ]]; then\"\n  , \"        COMPREPLY=($(compgen -f -X '!*.loc' -- \\\"$cur\\\") $(compgen -d -- \\\"$cur\\\"))\"\n  , \"        return\"\n  , \"      fi\"\n  , \"      COMPREPLY=($(compgen -W \\\"--config --vanilla -t --type --raw -e --expression -v -r --realize\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"    dump)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        --config) return ;;\"\n  , \"      esac\"\n  , \"      if [[ \\\"$cur\\\" != -* ]]; then\"\n  , \"        COMPREPLY=($(compgen -f -X '!*.loc' -- \\\"$cur\\\") $(compgen -d -- \\\"$cur\\\"))\"\n  , \"        return\"\n  , \"      fi\"\n  , \"      COMPREPLY=($(compgen -W \\\"--config --vanilla -v -e --expression\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"    init)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        --config) return ;;\"\n  , \"      esac\"\n  , \"      COMPREPLY=($(compgen -W \\\"--config -q --quiet --vanilla -f --force --slurm\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"    list)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        --config) return ;;\"\n  , \"      esac\"\n  , \"      COMPREPLY=($(compgen -W \\\"--modules --programs --config --vanilla -v\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"    uninstall)\"\n  , \"      case \\\"$prev\\\" in\"\n  , \"        --config) return ;;\"\n  , \"      esac\"\n  , \"      COMPREPLY=($(compgen -W \\\"--module --program --dry-run --config --vanilla\\\" -- \\\"$cur\\\"))\"\n  , \"      ;;\"\n  , \"  esac\"\n  , \"}\"\n  , \"complete -F _morloc morloc\"\n  , \"\"\n  ]\n\nprogramBashCompletion :: ManifestInfo -> [String]\nprogramBashCompletion mi =\n  let name = miName mi\n      safeName = sanitizeName name\n      funcName = \"_morloc_prog_\" ++ safeName\n      groups = miGroups mi\n      groupNames = map (T.unpack . giName) groups\n      cmds = miCommands mi\n      ungroupedCmds = [c | c <- cmds, ciGroup c == Nothing]\n      ungroupedNames = map (T.unpack . ciName) ungroupedCmds\n      firstLevelWords = nub (ungroupedNames ++ groupNames)\n      groupedCmds grp = [c | c <- cmds, ciGroup c == Just grp]\n   in if T.null name\n        then []\n        else\n          [ \"# --- Installed program: \" ++ T.unpack name ++ \" ---\"\n          , funcName ++ \"() {\"\n          , \"  local cur prev\"\n          , \"  COMPREPLY=()\"\n          , \"  cur=\\\"${COMP_WORDS[COMP_CWORD]}\\\"\"\n          , \"  prev=\\\"${COMP_WORDS[COMP_CWORD-1]}\\\"\"\n          , \"\"\n          , \"  if [[ $COMP_CWORD -eq 1 ]]; then\"\n          , \"    COMPREPLY=($(compgen -W \\\"\" ++ unwords firstLevelWords ++ \"\\\" -- \\\"$cur\\\"))\"\n          , \"    return\"\n          , \"  fi\"\n          , \"\"\n          , \"  case \\\"${COMP_WORDS[1]}\\\" in\"\n          ]\n            -- group cases\n            ++ concatMap\n              ( \\grp ->\n                  let grpCmds = groupedCmds (giName grp)\n                      grpCmdNames = map (T.unpack . ciName) grpCmds\n                   in [ \"    \" ++ T.unpack (giName grp) ++ \")\"\n                      , \"      if [[ $COMP_CWORD -eq 2 ]]; then\"\n                      , \"        COMPREPLY=($(compgen -W \\\"\" ++ unwords grpCmdNames ++ \"\\\" -- \\\"$cur\\\"))\"\n                      , \"        return\"\n                      , \"      fi\"\n                      ]\n                        ++ bashCommandFlagCases 3 grpCmds\n                        ++ [\"      ;;\"]\n              )\n              groups\n            -- ungrouped command cases\n            ++ concatMap (bashSingleCommandCase 1) ungroupedCmds\n            ++ [ \"  esac\"\n               , \"}\"\n               , \"complete -F \" ++ funcName ++ \" \" ++ T.unpack name\n               , \"\"\n               ]\n\n-- | Generate flag completion for a direct command at the given COMP_CWORD depth\nbashSingleCommandCase :: Int -> CmdInfo -> [String]\nbashSingleCommandCase _depth cmd =\n  let words' = argCompletionWords (ciArgs cmd)\n      valArgs = valueArgs (ciArgs cmd)\n   in if null words'\n        then\n          [ \"    \" ++ T.unpack (ciName cmd) ++ \")\"\n          , \"      ;;\"\n          ]\n        else\n          [ \"    \" ++ T.unpack (ciName cmd) ++ \")\"\n          ]\n            ++ ( if null valArgs\n                  then []\n                  else\n                    [ \"      case \\\"$prev\\\" in\"\n                    , \"        \" ++ intercalate \"|\" valArgs ++ \") return ;;\"\n                    , \"      esac\"\n                    ]\n               )\n            ++ [ \"      COMPREPLY=($(compgen -W \\\"\" ++ unwords words' ++ \"\\\" -- \\\"$cur\\\"))\"\n               , \"      ;;\"\n               ]\n\n-- | Generate command dispatch within a group (at COMP_CWORD == depth for command name)\nbashCommandFlagCases :: Int -> [CmdInfo] -> [String]\nbashCommandFlagCases depth cmds =\n  if null cmds\n    then []\n    else\n      [ \"      case \\\"${COMP_WORDS[\" ++ show (depth - 1) ++ \"]}\\\" in\"\n      ]\n        ++ concatMap\n          ( \\cmd ->\n              let words' = argCompletionWords (ciArgs cmd)\n                  valArgs = valueArgs (ciArgs cmd)\n               in if null words'\n                    then []\n                    else\n                      [ \"        \" ++ T.unpack (ciName cmd) ++ \")\"\n                      ]\n                        ++ ( if null valArgs\n                              then []\n                              else\n                                [ \"          case \\\"$prev\\\" in\"\n                                , \"            \" ++ intercalate \"|\" valArgs ++ \") return ;;\"\n                                , \"          esac\"\n                                ]\n                           )\n                        ++ [ \"          COMPREPLY=($(compgen -W \\\"\" ++ unwords words' ++ \"\\\" -- \\\"$cur\\\"))\"\n                           , \"          ;;\"\n                           ]\n          )\n          cmds\n        ++ [ \"      esac\"\n           ]\n\n-- Zsh generation\n\ngenerateZsh :: [ManifestInfo] -> String\ngenerateZsh manifests =\n  unlines $\n    [ \"#compdef morloc\"\n    , \"# Auto-generated by morloc -- do not edit manually\"\n    , \"\"\n    ]\n      ++ morlocZshCompletion\n      ++ concatMap programZshCompletion manifests\n\nmorlocZshCompletion :: [String]\nmorlocZshCompletion =\n  [ \"_morloc() {\"\n  , \"  local -a subcmds\"\n  , \"  subcmds=(\"\n  , \"    'make:Build a morloc script'\"\n  , \"    'install:Install a morloc module'\"\n  , \"    'typecheck:Typecheck a morloc program'\"\n  , \"    'dump:Dump parsed code'\"\n  , \"    'init:Initialize morloc environment'\"\n  , \"    'list:List installed modules and programs'\"\n  , \"    'uninstall:Uninstall a module or program'\"\n  , \"  )\"\n  , \"\"\n  , \"  if (( CURRENT == 2 )); then\"\n  , \"    _describe 'subcommand' subcmds\"\n  , \"    return\"\n  , \"  fi\"\n  , \"\"\n  , \"  case \\\"$words[2]\\\" in\"\n  , \"    make)\"\n  , \"      _arguments \\\\\"\n  , \"        '-e[Read as expression]' \\\\\"\n  , \"        '--expression[Read as expression]' \\\\\"\n  , \"        '-o[Output file]:outfile:_files' \\\\\"\n  , \"        '--outfile[Output file]:outfile:_files' \\\\\"\n  , \"        '--install[Install to PATH]' \\\\\"\n  , \"        '-f[Force overwrite]' \\\\\"\n  , \"        '--force[Force overwrite]' \\\\\"\n  , \"        '--include[Include pattern]:pattern:' \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '-v[Verbose]' \\\\\"\n  , \"        '--vanilla[Ignore local config]' \\\\\"\n  , \"        '*:script:_files -g \\\"*.loc\\\"'\"\n  , \"      ;;\"\n  , \"    install)\"\n  , \"      _arguments \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '--vanilla[Ignore local config]' \\\\\"\n  , \"        '-v[Verbose]' \\\\\"\n  , \"        '-f[Force overwrite]' \\\\\"\n  , \"        '--force[Force overwrite]' \\\\\"\n  , \"        '--ssh[Use SSH protocol]' \\\\\"\n  , \"        '--no-typecheck[Skip typechecking]'\"\n  , \"      ;;\"\n  , \"    typecheck)\"\n  , \"      _arguments \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '--vanilla[Ignore local config]' \\\\\"\n  , \"        '-t[Parse type string]' \\\\\"\n  , \"        '--type[Parse type string]' \\\\\"\n  , \"        '--raw[Print raw objects]' \\\\\"\n  , \"        '-e[Read as expression]' \\\\\"\n  , \"        '--expression[Read as expression]' \\\\\"\n  , \"        '-v[Verbose]' \\\\\"\n  , \"        '-r[Typecheck realizations]' \\\\\"\n  , \"        '--realize[Typecheck realizations]' \\\\\"\n  , \"        '*:script:_files -g \\\"*.loc\\\"'\"\n  , \"      ;;\"\n  , \"    dump)\"\n  , \"      _arguments \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '--vanilla[Ignore local config]' \\\\\"\n  , \"        '-v[Verbose]' \\\\\"\n  , \"        '-e[Read as expression]' \\\\\"\n  , \"        '--expression[Read as expression]' \\\\\"\n  , \"        '*:script:_files -g \\\"*.loc\\\"'\"\n  , \"      ;;\"\n  , \"    init)\"\n  , \"      _arguments \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '-q[Minimal output]' \\\\\"\n  , \"        '--quiet[Minimal output]' \\\\\"\n  , \"        '--vanilla[Ignore local config]' \\\\\"\n  , \"        '-f[Force overwrite]' \\\\\"\n  , \"        '--force[Force overwrite]' \\\\\"\n  , \"        '--slurm[Enable SLURM support]'\"\n  , \"      ;;\"\n  , \"    list)\"\n  , \"      _arguments \\\\\"\n  , \"        '--modules[List only modules]' \\\\\"\n  , \"        '--programs[List only programs]' \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '--vanilla[Ignore local config]' \\\\\"\n  , \"        '-v[Verbose]'\"\n  , \"      ;;\"\n  , \"    uninstall)\"\n  , \"      _arguments \\\\\"\n  , \"        '--module[Uninstall only the module]' \\\\\"\n  , \"        '--program[Uninstall only the program]' \\\\\"\n  , \"        '--dry-run[Show what would be removed]' \\\\\"\n  , \"        '--config[Config file]:config:_files' \\\\\"\n  , \"        '--vanilla[Ignore local config]'\"\n  , \"      ;;\"\n  , \"  esac\"\n  , \"}\"\n  , \"compdef _morloc morloc\"\n  , \"\"\n  ]\n\nprogramZshCompletion :: ManifestInfo -> [String]\nprogramZshCompletion mi =\n  let name = miName mi\n      safeName = sanitizeName name\n      funcName = \"_morloc_prog_\" ++ safeName\n      groups = miGroups mi\n      cmds = miCommands mi\n      ungroupedCmds = [c | c <- cmds, ciGroup c == Nothing]\n      groupedCmds grp = [c | c <- cmds, ciGroup c == Just grp]\n      -- Build first-level descriptions\n      ungroupedDescs = map (\\c -> \"    '\" ++ T.unpack (ciName c) ++ \":\" ++ T.unpack (ciName c) ++ \"'\") ungroupedCmds\n      groupDescs = map (\\g -> \"    '\" ++ T.unpack (giName g) ++ \":\" ++ T.unpack (giName g) ++ \"'\") groups\n      allDescs = ungroupedDescs ++ groupDescs\n   in if T.null name\n        then []\n        else\n          [ \"# --- Installed program: \" ++ T.unpack name ++ \" ---\"\n          , funcName ++ \"() {\"\n          , \"  local -a cmds\"\n          , \"  cmds=(\"\n          ]\n            ++ allDescs\n            ++ [ \"  )\"\n               , \"\"\n               , \"  if (( CURRENT == 2 )); then\"\n               , \"    _describe 'command' cmds\"\n               , \"    return\"\n               , \"  fi\"\n               , \"\"\n               , \"  case \\\"$words[2]\\\" in\"\n               ]\n            -- group cases\n            ++ concatMap\n              ( \\grp ->\n                  let grpCmds = groupedCmds (giName grp)\n                      grpDescs = map (\\c -> \"        '\" ++ T.unpack (ciName c) ++ \":\" ++ T.unpack (ciName c) ++ \"'\") grpCmds\n                   in [ \"    \" ++ T.unpack (giName grp) ++ \")\"\n                      , \"      if (( CURRENT == 3 )); then\"\n                      , \"        local -a grpcmds=(\"\n                      ]\n                        ++ grpDescs\n                        ++ [ \"        )\"\n                           , \"        _describe 'command' grpcmds\"\n                           , \"        return\"\n                           , \"      fi\"\n                           ]\n                        ++ zshCommandFlagCases 3 grpCmds\n                        ++ [ \"      ;;\"\n                           ]\n              )\n              groups\n            -- ungrouped command cases\n            ++ concatMap zshSingleCommandCase ungroupedCmds\n            ++ [ \"  esac\"\n               , \"}\"\n               , \"compdef \" ++ funcName ++ \" \" ++ T.unpack name\n               , \"\"\n               ]\n\n-- | Generate zsh _arguments for a single command\nzshSingleCommandCase :: CmdInfo -> [String]\nzshSingleCommandCase cmd =\n  let argSpecs = zshArgSpecs (ciArgs cmd)\n   in if null argSpecs\n        then\n          [ \"    \" ++ T.unpack (ciName cmd) ++ \")\"\n          , \"      ;;\"\n          ]\n        else\n          [ \"    \" ++ T.unpack (ciName cmd) ++ \")\"\n          , \"      _arguments \\\\\"\n          ]\n            ++ formatZshArgs argSpecs\n            ++ [ \"      ;;\"\n               ]\n\n-- | Generate dispatch within a group for zsh\nzshCommandFlagCases :: Int -> [CmdInfo] -> [String]\nzshCommandFlagCases depth cmds =\n  if null cmds\n    then []\n    else\n      [ \"      case \\\"$words[\" ++ show depth ++ \"]\\\" in\"\n      ]\n        ++ concatMap\n          ( \\cmd ->\n              let argSpecs = zshArgSpecs (ciArgs cmd)\n               in if null argSpecs\n                    then []\n                    else\n                      [ \"        \" ++ T.unpack (ciName cmd) ++ \")\"\n                      , \"          _arguments \\\\\"\n                      ]\n                        ++ map (\"    \" ++) (formatZshArgs argSpecs)\n                        ++ [ \"          ;;\"\n                           ]\n          )\n          cmds\n        ++ [ \"      esac\"\n           ]\n\n-- | Convert args to zsh _arguments spec strings\nzshArgSpecs :: [ArgInfo] -> [String]\nzshArgSpecs = concatMap go\n  where\n    go PosArg = []\n    go (OptArg s l) =\n      maybe [] (\\c -> [\"'-\" ++ [c] ++ \"[Option]:value:'\"]) s\n        ++ maybe [] (\\t -> [\"'--\" ++ T.unpack t ++ \"[Option]:value:'\"]) l\n    go (FlagArg s l lr) =\n      maybe [] (\\c -> [\"'-\" ++ [c] ++ \"[Flag]'\"]) s\n        ++ maybe [] (\\t -> [\"'--\" ++ T.unpack t ++ \"[Flag]'\"]) l\n        ++ maybe [] (\\t -> [\"'--\" ++ T.unpack t ++ \"[Flag]'\"]) lr\n    go (GrpArg gopt entries) =\n      maybe\n        []\n        ( \\(s, l) ->\n            maybe [] (\\c -> [\"'-\" ++ [c] ++ \"[Group option]:value:'\"]) s\n              ++ maybe [] (\\t -> [\"'--\" ++ T.unpack t ++ \"[Group option]:value:'\"]) l\n        )\n        gopt\n        ++ concatMap go entries\n\n-- | Format zsh args with proper line continuation\nformatZshArgs :: [String] -> [String]\nformatZshArgs [] = []\nformatZshArgs [x] = [\"        \" ++ x]\nformatZshArgs (x : xs) = (\"        \" ++ x ++ \" \\\\\") : formatZshArgs xs\n"
  },
  {
    "path": "library/Morloc/Config.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Config\nDescription : Configuration loading and default paths\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nLoads the morloc configuration from @~\\/.local\\/share\\/morloc\\/config@ (YAML),\nmodule-level configs from @\\<module\\>.yaml@, and build configs. Also sets up\nper-language server sockets for IPC during pool execution.\n-}\nmodule Morloc.Config\n  ( Config (..)\n  , loadMorlocConfig\n  , loadModuleConfig\n  , loadDefaultMorlocConfig\n  , loadBuildConfig\n  , setupServerAndSocket\n  , getDefaultConfigFilepath\n  , getDefaultMorlocLibrary\n  ) where\n\nimport qualified Data.Aeson.KeyMap as K\nimport qualified Data.Map.Strict as Map\nimport Data.Text (Text)\nimport qualified Data.Yaml as Y\nimport qualified Data.Yaml.Config as YC\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.LangRegistry as LR\nimport qualified Morloc.Language as ML\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport qualified Morloc.System as MS\nimport System.Environment (lookupEnv)\n\ngetDefaultConfigFilepath :: IO Path\ngetDefaultConfigFilepath = MS.combine <$> getDefaultMorlocHome <*> pure \"config\"\n\n-- | Load the default Morloc configuration, ignoring any local configurations.\nloadDefaultMorlocConfig :: IO Config\nloadDefaultMorlocConfig = do\n  defaults <- defaultFields\n  return $\n    Config\n      (MT.unpack . fromJust $ defaults K.!? \"home\")\n      (MT.unpack . fromJust $ defaults K.!? \"source\")\n      (MT.unpack . fromJust $ defaults K.!? \"plane\")\n      (MT.unpack . fromJust $ defaults K.!? \"plane-core\")\n      (MT.unpack . fromJust $ defaults K.!? \"tmpdir\")\n      (MT.unpack . fromJust $ defaults K.!? \"build-config\")\n      Map.empty -- configLangOverrides\n      Nothing   -- configRegistry\n\n{- | Load a Morloc config file. If no file is given (i.e., Nothing), then the\ndefault configuration will be used.\n-}\nloadMorlocConfig :: Maybe Path -> IO Config\nloadMorlocConfig Nothing = do\n  defaults <- defaultFields\n  MS.loadYamlConfig\n    Nothing\n    (YC.useCustomEnv defaults)\n    loadDefaultMorlocConfig\nloadMorlocConfig (Just configFile) = do\n  configExists <- MS.doesFileExist configFile\n  defaults <- defaultFields\n  if configExists\n    then\n      MS.loadYamlConfig\n        (Just [configFile])\n        (YC.useCustomEnv defaults)\n        loadDefaultMorlocConfig\n    else\n      loadMorlocConfig Nothing\n\nloadModuleConfig :: Maybe Path -> MorlocMonad ModuleConfig\nloadModuleConfig Nothing = return defaultValue\nloadModuleConfig (Just configFile) = do\n  let moduleConfigFile = MS.dropExtension configFile <> \".yaml\"\n  configExists <- liftIO $ MS.doesFileExist moduleConfigFile\n  if configExists\n    then do\n      result <- liftIO $ Y.decodeFileEither moduleConfigFile\n      case result of\n        Left errMsg ->\n          MM.throwSystemError $\n            \"Failed to parse module config file '\"\n              <> pretty configFile\n              <> \"': \"\n              <> pretty (Y.prettyPrintParseException errMsg)\n        Right config -> return config\n    else\n      return defaultValue\n\nloadBuildConfig :: Config -> IO BuildConfig\nloadBuildConfig config = do\n  let configFile = configBuildConfig config\n  configExists <- MS.doesFileExist configFile\n  if configExists\n    then do\n      result <- Y.decodeFileEither configFile\n      case result of\n        Left errMsg ->\n          error $\n            \"Failed to parse build config file '\" <> configFile <> \"': \" <> Y.prettyPrintParseException errMsg\n        Right buildConfig -> return buildConfig\n    else\n      return defaultValue\n\nsetupServerAndSocket ::\n  Config ->\n  LangRegistry ->\n  Lang ->\n  Socket\nsetupServerAndSocket c reg lang = Socket lang args socket\n  where\n    name = ML.langName lang\n    -- Look up run command: config overrides take precedence over registry defaults\n    runCmd = case Map.lookup name (configLangOverrides c) of\n      Just cmd -> cmd\n      Nothing -> LR.registryRunCommand reg name\n    isCompiled = LR.registryIsCompiled reg name\n    poolExe = ML.makeExecutablePoolName lang\n\n    args\n      | isCompiled = [\"./\" <> pretty poolExe]\n      | null runCmd = [pretty name, pretty poolExe]\n      | otherwise = map pretty runCmd ++ [pretty poolExe]\n\n    socket = \"pipe-\" <> pretty (ML.showLangName lang)\n\n-- This is where the default file organization of morloc is set\ndefaultFields :: IO (K.KeyMap Text)\ndefaultFields = do\n  home <- MT.pack <$> getDefaultMorlocHome\n  lib <- MT.pack <$> getDefaultMorlocSource\n  tmp <- MT.pack <$> getDefaultMorlocTmpDir\n  buildConfig <- MT.pack <$> getDefaultMorlocBuildConfig\n  return $\n    K.fromList\n      [ (\"home\", home)\n      , (\"source\", lib)\n      , (\"plane\", \"default\")\n      , (\"plane-core\", \"morloclib\")\n      , (\"tmpdir\", tmp)\n      , (\"build-config\", buildConfig)\n      ]\n\n-- | Get the Morloc home directory (absolute path).\n-- Respects MORLOC_HOME env var, falling back to ~/.local/share/morloc.\ngetDefaultMorlocHome :: IO Path\ngetDefaultMorlocHome = do\n  envHome <- lookupEnv \"MORLOC_HOME\"\n  case envHome of\n    Just p | not (null p) -> return p\n    _ -> MS.combine <$> MS.getHomeDirectory <*> pure \".local/share/morloc\"\n\n{- | Get the Morloc source directory (absolute path). Usually this will be a\nfolder inside the home directory. This is the path to the source data (often\na get repo).\n-}\ngetDefaultMorlocSource :: IO Path\ngetDefaultMorlocSource = MS.combine <$> getDefaultMorlocHome <*> pure \"src/morloc/plane\"\n\n-- | Get the path to the morloc shared libraries folder\ngetDefaultMorlocLibrary :: IO Path\ngetDefaultMorlocLibrary = MS.combine <$> getDefaultMorlocHome <*> pure \"lib\"\n\n-- | Get the Morloc default temporary directory.\ngetDefaultMorlocTmpDir :: IO Path\ngetDefaultMorlocTmpDir = MS.combine <$> getDefaultMorlocHome <*> pure \"tmp\"\n\n{- | Get the Morloc default build config. This will store `morloc init` flags\nthat affect all builds\n-}\ngetDefaultMorlocBuildConfig :: IO Path\ngetDefaultMorlocBuildConfig = MS.combine <$> getDefaultMorlocHome <*> pure \".build-config.yaml\"\n"
  },
  {
    "path": "library/Morloc/Data/Annotated.hs",
    "content": "{-# LANGUAGE TypeFamilies #-}\n\n{- |\nModule      : Morloc.Data.Annotated\nDescription : Class of annotated entities\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nTypeclass for containers that pair an annotation with a value, providing\naccessors and mapping operations over both components.\n-}\nmodule Morloc.Data.Annotated (Annotated (..)) where\n\nclass Annotated f where\n  -- | Extract the annotation\n  ann :: f a b -> a\n\n  -- | Extract the value\n  val :: f a b -> b\n\n  -- | Pair an annotation with a value\n  annotate :: a -> b -> f a b\n\n  -- | Apply a function to both the annotation and the value\n  annapp :: (a -> b -> c) -> f a b -> c\n  annapp f x = f (ann x) (val x)\n\n  -- | Replace the value using a function of both annotation and value\n  reval :: (a -> b -> b') -> f a b -> f a b'\n  reval f x = annotate (ann x) $ f (ann x) (val x)\n\n  -- | Replace the annotation using a function of both annotation and value\n  reann :: (a -> b -> a') -> f a b -> f a' b\n  reann f x = annotate (f (ann x) (val x)) (val x)\n\n  -- | Monadic 'annapp'\n  annappM :: (Monad m) => (a -> b -> m c) -> f a b -> m c\n  annappM f x = f (ann x) (val x)\n\n  -- | Monadic 'reval'\n  revalM :: (Monad m) => (a -> b -> m b') -> f a b -> m (f a b')\n  revalM f x = annotate (ann x) <$> f (ann x) (val x)\n\n  -- | Monadic 'reann'\n  reannM :: (Monad m) => (a -> b -> m a') -> f a b -> m (f a' b)\n  reannM f x = annotate <$> f (ann x) (val x) <*> pure (val x)\n\ninstance Annotated (,) where\n  ann = fst\n  val = snd\n  annotate a b = (a, b)\n"
  },
  {
    "path": "library/Morloc/Data/Bifoldable.hs",
    "content": "{-# LANGUAGE TypeFamilies #-}\n\n{- |\nModule      : Morloc.Data.Bifoldable\nDescription : The Bifoldable typeclass with monadic instances\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nBifoldable operations for containers with two type parameters. Each operation\nhas a monadic variant (the primary interface) and a pure variant derived via\n'Identity'. The @ubi*@ family of functions specializes to cases where both\ntype parameters are equal (@a ~ b@).\n-}\nmodule Morloc.Data.Bifoldable (Bifoldable (..)) where\n\nimport Control.Monad.Identity (runIdentity)\nimport Data.Foldable (foldlM, foldrM)\nimport Data.Maybe (catMaybes)\nimport Morloc.Data.Bifunctor\n\n-- | Lift a binary function into a monadic return\nreturn2 :: (Monad m) => (a -> b -> c) -> (a -> b -> m c)\nreturn2 f x y = return $ f x y\n\nclass (Bifunctor f) => Bifoldable f where\n  -- | Collect both components into a monadic list\n  bilistM :: (Monad m) => (a -> m c) -> (b -> m c) -> f a b -> m [c]\n\n  -- | Collect only second components into a monadic list\n  bilistsndM :: (Monad m) => (b -> m c) -> f a b -> m [c]\n  bilistsndM f = fmap catMaybes . bilistM (return . const Nothing) (fmap Just . f)\n\n  -- | Collect only first components into a monadic list\n  bilistfstM :: (Monad m) => (a -> m c) -> f a b -> m [c]\n  bilistfstM f = fmap catMaybes . bilistM (fmap Just . f) (return . const Nothing)\n\n  -- | Monoidal append over both components (monadic)\n  biappendM :: (Monad m, Monoid c) => (a -> m c) -> (b -> m c) -> f a b -> m c\n  biappendM f g = fmap mconcat . bilistM f g\n\n  -- | Concatenate results from a foldable of bifunctors (monadic)\n  bicatM :: (Monad m, Foldable t) => (a -> m c) -> (b -> m c) -> t (f a b) -> m [c]\n  bicatM f g xs = foldrM (\\e' b -> (<>) b <$> bilistM f g e') [] xs\n\n  -- | Monoidal fold over a foldable of bifunctors (monadic)\n  bifoldMapM :: (Foldable t, Monoid c, Monad m) => (a -> m c) -> (b -> m c) -> t (f a b) -> m c\n  bifoldMapM f g = fmap mconcat . bicatM f g\n\n  -- | Right fold over a foldable of bifunctors (monadic)\n  bifoldrM ::\n    (Monoid c, Foldable t, Monad m) => (a -> c -> m c) -> (b -> c -> m c) -> c -> t (f a b) -> m c\n  bifoldrM f g c xs = foldrM (\\x c' -> biappendM (`f` c') (`g` c') x) c xs\n\n  -- | Left fold over a foldable of bifunctors (monadic)\n  bifoldlM ::\n    (Monoid c, Foldable t, Monad m) => (c -> a -> m c) -> (c -> b -> m c) -> c -> t (f a b) -> m c\n  bifoldlM f g c xs = foldlM (\\c' x -> biappendM (f c') (g c') x) c xs\n\n  -- | Right fold starting from 'mempty' (monadic)\n  bifoldr1M ::\n    (Monoid c, Foldable t, Monad m) => (a -> c -> m c) -> (b -> c -> m c) -> t (f a b) -> m c\n  bifoldr1M f g xs = foldrM (\\x c' -> biappendM (`f` c') (`g` c') x) mempty xs\n\n  -- | Left fold starting from 'mempty' (monadic)\n  bifoldl1M ::\n    (Monoid c, Foldable t, Monad m) => (c -> a -> m c) -> (c -> b -> m c) -> t (f a b) -> m c\n  bifoldl1M f g xs = foldlM (\\c' x -> biappendM (f c') (g c') x) mempty xs\n\n  -- | Map uniformly over both components when @a ~ b@ (monadic)\n  ubimapM :: (a ~ b, Monad m) => (a -> m c) -> f a b -> m (f c c)\n  ubimapM f = bimapM f f\n\n  -- | Uniform monoidal append when @a ~ b@ (monadic)\n  ubiappendM :: (Monoid c, a ~ b, Monad m) => (a -> m c) -> f a b -> m c\n  ubiappendM f = biappendM f f\n\n  -- | Uniform concatenation when @a ~ b@ (monadic)\n  ubicatM :: (Foldable t, a ~ b, Monad m) => (a -> m c) -> t (f a b) -> m [c]\n  ubicatM f = bicatM f f\n\n  -- | Uniform monoidal fold when @a ~ b@ (monadic)\n  ubifoldMapM :: (Foldable t, Monoid c, a ~ b, Monad m) => (a -> m c) -> t (f a b) -> m c\n  ubifoldMapM f = bifoldMapM f f\n\n  -- | Uniform list collection when @a ~ b@ (monadic)\n  ubilistM :: (a ~ b, Monad m) => (a -> m c) -> f a b -> m [c]\n  ubilistM f = bilistM f f\n\n  -- | Uniform right fold when @a ~ b@ (monadic)\n  ubifoldrM :: (Monoid c, Foldable t, a ~ b, Monad m) => (a -> c -> m c) -> c -> t (f a b) -> m c\n  ubifoldrM f = bifoldrM f f\n\n  -- | Uniform left fold when @a ~ b@ (monadic)\n  ubifoldlM :: (Monoid c, Foldable t, a ~ b, Monad m) => (c -> a -> m c) -> c -> t (f a b) -> m c\n  ubifoldlM f = bifoldlM f f\n\n  -- | Uniform right fold from 'mempty' when @a ~ b@ (monadic)\n  ubifoldr1M :: (Monoid c, Foldable t, a ~ b, Monad m) => (a -> c -> m c) -> t (f a b) -> m c\n  ubifoldr1M f = bifoldr1M f f\n\n  -- | Uniform left fold from 'mempty' when @a ~ b@ (monadic)\n  ubifoldl1M :: (Monoid c, Foldable t, a ~ b, Monad m) => (c -> a -> m c) -> t (f a b) -> m c\n  ubifoldl1M f = bifoldl1M f f\n\n  -- | Pure 'bilistM'\n  bilist :: (a -> c) -> (b -> c) -> f a b -> [c]\n  bilist f g = runIdentity . bilistM (return . f) (return . g)\n\n  -- | Pure 'bilistsndM'\n  bilistsnd :: (b -> c) -> f a b -> [c]\n  bilistsnd f = runIdentity . bilistsndM (return . f)\n\n  -- | Pure 'bilistfstM'\n  bilistfst :: (a -> c) -> f a b -> [c]\n  bilistfst f = runIdentity . bilistfstM (return . f)\n\n  -- | Pure 'biappendM'\n  biappend :: (Monoid c) => (a -> c) -> (b -> c) -> f a b -> c\n  biappend f g = runIdentity . biappendM (return . f) (return . g)\n\n  -- | Pure 'bicatM'\n  bicat :: (Foldable t) => (a -> c) -> (b -> c) -> t (f a b) -> [c]\n  bicat f g = runIdentity . bicatM (return . f) (return . g)\n\n  -- | Pure 'bifoldMapM'\n  bifoldMap :: (Foldable t, Monoid c) => (a -> c) -> (b -> c) -> t (f a b) -> c\n  bifoldMap f g = runIdentity . bifoldMapM (return . f) (return . g)\n\n  -- | Pure 'bifoldrM'\n  bifoldr :: (Monoid c, Foldable t) => (a -> c -> c) -> (b -> c -> c) -> c -> t (f a b) -> c\n  bifoldr f g c = runIdentity . bifoldrM (\\b x -> return $ f b x) (\\b x -> return $ g b x) c\n\n  -- | Pure 'bifoldlM'\n  bifoldl :: (Monoid c, Foldable t) => (c -> a -> c) -> (c -> b -> c) -> c -> t (f a b) -> c\n  bifoldl f g c = runIdentity . bifoldlM (\\x b -> return $ f x b) (\\x b -> return $ g x b) c\n\n  -- | Pure 'bifoldr1M'\n  bifoldr1 :: (Monoid c, Foldable t) => (a -> c -> c) -> (b -> c -> c) -> t (f a b) -> c\n  bifoldr1 f g = runIdentity . bifoldrM (\\b x -> return $ f b x) (\\b x -> return $ g b x) mempty\n\n  -- | Pure 'bifoldl1M'\n  bifoldl1 :: (Monoid c, Foldable t) => (c -> a -> c) -> (c -> b -> c) -> t (f a b) -> c\n  bifoldl1 f g = runIdentity . bifoldlM (\\x b -> return $ f x b) (\\x b -> return $ g x b) mempty\n\n  -- | Pure 'ubimapM'\n  ubimap :: (a ~ b) => (a -> c) -> f a b -> f c c\n  ubimap f = runIdentity . ubimapM (return . f)\n\n  -- | Pure 'ubiappendM'\n  ubiappend :: (Monoid c, a ~ b) => (a -> c) -> f a b -> c\n  ubiappend f = runIdentity . ubiappendM (return . f)\n\n  -- | Pure 'ubicatM'\n  ubicat :: (Foldable t, a ~ b) => (a -> c) -> t (f a b) -> [c]\n  ubicat f = runIdentity . ubicatM (return . f)\n\n  -- | Pure 'ubifoldMapM'\n  ubifoldMap :: (Foldable t, Monoid c, a ~ b) => (a -> c) -> t (f a b) -> c\n  ubifoldMap f = runIdentity . ubifoldMap (return . f)\n\n  -- | Pure 'ubilistM'\n  ubilist :: (a ~ b) => (a -> c) -> f a b -> [c]\n  ubilist f = runIdentity . ubilistM (return . f)\n\n  -- | Pure 'ubifoldrM'\n  ubifoldr :: (Monoid c, Foldable t, a ~ b) => (a -> c -> c) -> c -> t (f a b) -> c\n  ubifoldr f c = runIdentity . ubifoldrM (return2 f) c\n\n  -- | Pure 'ubifoldlM'\n  ubifoldl :: (Monoid c, Foldable t, a ~ b) => (c -> a -> c) -> c -> t (f a b) -> c\n  ubifoldl f c = runIdentity . ubifoldlM (return2 f) c\n\n  -- | Pure 'ubifoldr1M'\n  ubifoldr1 :: (Monoid c, Foldable t, a ~ b) => (a -> c -> c) -> t (f a b) -> c\n  ubifoldr1 f = runIdentity . ubifoldr1M (return2 f)\n\n  -- | Pure 'ubifoldl1M'\n  ubifoldl1 :: (Monoid c, Foldable t, a ~ b) => (c -> a -> c) -> t (f a b) -> c\n  ubifoldl1 f = runIdentity . ubifoldl1M (return2 f)\n\ninstance Bifoldable Either where\n  bilistM f _ (Left a) = do\n    a' <- f a\n    return [a']\n  bilistM _ g (Right b) = do\n    b' <- g b\n    return [b']\n\ninstance Bifoldable (,) where\n  bilistM f g (a, b) = do\n    a' <- f a\n    b' <- g b\n    return [a', b']\n"
  },
  {
    "path": "library/Morloc/Data/Bifunctor.hs",
    "content": "{-# LANGUAGE TypeFamilies #-}\n\n{- |\nModule      : Morloc.Data.Bifunctor\nDescription : The Bifunctor typeclass, with monadic instances\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nA custom Bifunctor class that provides monadic variants ('bimapM', 'firstM',\n'secondM') alongside the standard pure operations. The monadic variants are\nthe primary interface; pure operations are derived via 'Identity'.\n-}\nmodule Morloc.Data.Bifunctor (Bifunctor (..)) where\n\nimport Control.Monad.Identity (runIdentity)\n\nclass Bifunctor f where\n  -- | Map monadically over both components\n  bimapM :: (Monad m) => (a -> m a') -> (b -> m b') -> f a b -> m (f a' b')\n\n  -- | Map monadically over the first component\n  firstM :: (Monad m) => (a -> m a') -> f a b -> m (f a' b)\n  firstM f = bimapM f return\n\n  -- | Map monadically over the second component\n  secondM :: (Monad m) => (b -> m b') -> f a b -> m (f a b')\n  secondM g = bimapM return g\n\n  -- | Map over both components\n  bimap :: (a -> a') -> (b -> b') -> f a b -> f a' b'\n  bimap f g = runIdentity . bimapM (return . f) (return . g)\n\n  -- | Map over the first component\n  first :: (a -> a') -> f a b -> f a' b\n  first f = runIdentity . firstM (return . f)\n\n  -- | Map over the second component\n  second :: (b -> b') -> f a b -> f a b'\n  second f = runIdentity . secondM (return . f)\n\ninstance Bifunctor Either where\n  bimapM f _ (Left a) = Left <$> f a\n  bimapM _ g (Right b) = Right <$> g b\n\ninstance Bifunctor (,) where\n  bimapM f g (a, b) = (,) <$> f a <*> g b\n\ninstance Bifunctor ((,,) x1) where\n  bimapM f g (x1, x2, x3) = (,,) x1 <$> f x2 <*> g x3\n"
  },
  {
    "path": "library/Morloc/Data/DAG.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Data.DAG\nDescription : Functions for working with directed acyclic graphs\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nOperations on DAGs represented as @Map k (n, [(k, e)])@ where @k@ is the\nnode key, @n@ is node data, and @e@ is edge data. Used throughout the\ncompiler to represent module dependency graphs.\n-}\nmodule Morloc.Data.DAG\n  ( edgelist\n  , nodes\n  , lookupNode\n  , roots\n  , mapNode\n  , mapNodeM\n  , mapNodeWithKeyM\n  , mapEdge\n  , synthesize\n  , synthesizeNodes\n  ) where\n\nimport qualified Data.Set as Set\nimport qualified Morloc.Data.Map as Map\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State (MorlocMonad)\n\n-- | Get all edges as @(source, target)@ pairs\nedgelist :: DAG k e n -> [(k, k)]\nedgelist d = concat [[(k, j) | (j, _) <- xs] | (k, (_, xs)) <- Map.toList d]\n\n-- | Get all node values\nnodes :: DAG k e n -> [n]\nnodes = map fst . Map.elems\n\n-- | Look up the node data for a given key\nlookupNode :: (Ord k) => k -> DAG k e n -> Maybe n\nlookupNode k d = case Map.lookup k d of\n  (Just (n, _)) -> Just n\n  Nothing -> Nothing\n\n-- | Get all root keys (nodes with no incoming edges)\nroots :: (Ord k) => DAG k e n -> [k]\nroots d = Set.toList $ Set.difference parents children\n  where\n    g = edgelist d\n    parents = Map.keysSet d\n    children = Set.fromList (map snd g)\n\n-- | Map a pure function over node data, leaving edges unchanged\nmapNode :: (n1 -> n2) -> DAG k e n1 -> DAG k e n2\nmapNode f = Map.map (first f)\n\n-- | Map a monadic function over node data, leaving edges unchanged\nmapNodeM :: (n1 -> MorlocMonad n2) -> DAG k e n1 -> MorlocMonad (DAG k e n2)\nmapNodeM f = Map.mapM (\\(n, xs) -> f n >>= (\\n' -> return (n', xs)))\n\n-- | Map a monadic function over node data with access to the key\nmapNodeWithKeyM :: (k -> n1 -> MorlocMonad n2) -> DAG k e n1 -> MorlocMonad (DAG k e n2)\nmapNodeWithKeyM f = Map.mapWithKeyM (\\k (n, xs) -> f k n >>= (\\n' -> return (n', xs)))\n\n-- | Map a pure function over edge data, leaving nodes unchanged\nmapEdge :: (e1 -> e2) -> DAG k e1 n -> DAG k e2 n\nmapEdge f = Map.map (\\(n, xs) -> (n, [(k, f e) | (k, e) <- xs]))\n\n-- | Like 'synthesize' but keeps original edge data unchanged\nsynthesizeNodes ::\n  (Ord k, Monad m) =>\n  (k -> n1 -> [(k, e, n2)] -> m n2) ->\n  DAG k e n1 ->\n  m (Maybe (DAG k e n2))\nsynthesizeNodes f = synthesize f (\\e _ _ -> return e)\n\n{- | Bottom-up synthesis over a DAG: compute new node and edge values where\neach node's new value depends on its children's already-computed values.\nReturns 'Nothing' if the DAG contains cycles (detected by fixed-point\nstall).\n-}\nsynthesize ::\n  (Ord k, Monad m) =>\n  (k -> n1 -> [(k, e1, n2)] -> m n2) ->\n  (e1 -> n2 -> n2 -> m e2) ->\n  DAG k e1 n1 ->\n  m (Maybe (DAG k e2 n2))\nsynthesize f fe d0 = go (Just Map.empty)\n  where\n    -- Iteratively process nodes whose children are all resolved\n    go Nothing = return Nothing\n    go (Just processed)\n      | Map.size d0 == Map.size processed = return (Just processed)\n      | otherwise = do\n          processed' <- foldlM addIfReady processed (Map.toList d0)\n          if Map.size processed' == Map.size processed\n            -- No progress means a cycle\n            then return Nothing\n            else go (Just processed')\n\n    -- Leaf nodes: no children to wait for\n    addIfReady processed (key, (nodeVal, []))\n      | Map.member key processed = return processed\n      | otherwise = do\n          newNode <- f key nodeVal []\n          return $ Map.insert key (newNode, []) processed\n    -- Interior nodes: all children must be processed first\n    addIfReady processed (key, (nodeVal, childEdges))\n      | Map.member key processed = return processed\n      | otherwise = case mapM ((`Map.lookup` processed) . fst) childEdges of\n          Nothing -> return processed\n          (Just resolvedChildren) -> do\n            let augmented = [(k, e, n2) | ((k, e), (n2, _)) <- zip childEdges resolvedChildren]\n            newNode <- f key nodeVal augmented\n            newEdges <-\n              mapM\n                (\\((k2, e), (childNode, _)) -> (,) k2 <$> fe e newNode childNode)\n                (zip childEdges resolvedChildren)\n            return $ Map.insert key (newNode, newEdges) processed\n"
  },
  {
    "path": "library/Morloc/Data/Doc.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Data.Doc\nDescription : Pretty-printing utilities wrapping prettyprinter\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nRe-exports \"Prettyprinter\" and \"Prettyprinter.Render.Text\", plus convenience\nfunctions for rendering documents to 'Text', building code blocks, and\nperforming template-style text substitution.\n-}\nmodule Morloc.Data.Doc\n  ( module Prettyprinter\n  , module Prettyprinter.Render.Text\n  , render\n  , render'\n  , textEsc'\n  , escapeStringLit\n  , escapeQuotes\n  , tupledNoFold\n  , int\n  , integer\n  , block\n  , format\n  ) where\n\nimport qualified Data.Text as DT\nimport Prettyprinter hiding (annotate, (<>))\nimport Prettyprinter.Render.Text\n\n-- | Render a 'Doc' to strict 'DT.Text' using default layout options\nrender :: Doc ann -> DT.Text\nrender = renderStrict . layoutPretty defaultLayoutOptions\n\n-- | Render a 'Doc' to 'String' (ignores layout)\nrender' :: Doc ann -> String\nrender' = show\n\n-- | Convenience wrapper: @pretty@ specialized to 'Int'\nint :: Int -> Doc ann\nint = pretty\n\n-- | Convenience wrapper: @pretty@ specialized to 'Integer'\ninteger :: Integer -> Doc ann\ninteger = pretty\n\n-- | Format a code block with braces and indentation\nblock :: Int -> Doc ann -> Doc ann -> Doc ann\nblock level header body = align . vsep $ [header, \"{\", indent level body, \"}\"]\n\n-- | Like 'tupled' but never folds long lines (folding breaks commenting)\ntupledNoFold :: [Doc ann] -> Doc ann\ntupledNoFold [] = \"\"\ntupledNoFold (x : xs) = parens (foldl (\\l r -> l <> \",\" <+> r) x xs)\n\n-- | Re-escape whitespace and backslash for embedding in generated\n-- string literals. Quote escaping is handled separately via\n-- 'escapeQuotes' using the language-specific terminator.\nescapeStringLit :: DT.Text -> DT.Text\nescapeStringLit = DT.concatMap escapeChar\n  where\n    escapeChar '\\\\' = \"\\\\\\\\\"\n    escapeChar '\\n' = \"\\\\n\"\n    escapeChar '\\t' = \"\\\\t\"\n    escapeChar '\\r' = \"\\\\r\"\n    escapeChar c = DT.singleton c\n\n-- | Replace occurrences of a quote terminator with its escaped form.\nescapeQuotes :: DT.Text -> DT.Text -> DT.Text -> DT.Text\nescapeQuotes terminator escaped = DT.replace terminator escaped\n\n-- | Render a 'DT.Text' literal as a double-quoted, escaped 'Doc'\ntextEsc' :: DT.Text -> Doc ann\ntextEsc' = dquotes . pretty . escapeQuotes \"\\\"\" \"\\\\\\\"\" . escapeStringLit\n\n-- | Template substitution: split @fmtstr@ on @breaker@ and interleave @replacements@\nformat ::\n  DT.Text -> -- main text with substitution patterns\n  DT.Text -> -- break string\n  [Doc ann] -> -- replacement strings\n  Doc ann\nformat fmtstr breaker replacements =\n  let xs = DT.splitOn breaker fmtstr\n   in foldl (<>) (pretty . head $ xs) $ zipWith (\\r x -> r <> pretty x) replacements (tail xs)\n"
  },
  {
    "path": "library/Morloc/Data/GMap.hs",
    "content": "{- |\nModule      : Morloc.Data.GMap\nDescription : A general map datatype (non-injective and non-surjective)\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nA two-level map @GMap a b c@ that maps outer keys @a@ to inner keys @b@, and\ninner keys @b@ to values @c@. Multiple outer keys may share the same inner\nkey (and thus value). Used in the compiler for type-alias indirection where\nmany names can refer to the same underlying definition.\n-}\nmodule Morloc.Data.GMap\n  ( elems\n  , empty\n  , innerKeys\n  , insert\n  , insertWith\n  , insertWithM\n  , change\n  , insertMany\n  , insertManyWith\n  , keys\n  , lookup\n  , mapInnerKeys\n  , mapKeys\n  , mapVals\n  , mapValsM\n  , mapValsWithKeyM\n  , yIsX\n  ) where\n\nimport qualified Data.Set as Set\nimport qualified Morloc.Data.Map as Map\nimport Morloc.Namespace.Prim hiding (insert, lookup)\nimport Prelude hiding (lookup)\n\n-- | Map a function over all values\nmapVals :: (c -> c') -> GMap a b c -> GMap a b c'\nmapVals f (GMap x y) = GMap x (Map.map f y)\n\n-- | Monadic 'mapVals'\nmapValsM :: (Ord b, Monad m) => (c -> m c') -> GMap a b c -> m (GMap a b c')\nmapValsM f (GMap m1 m2) = do\n  let m2list = Map.toList m2\n  xs2 <- mapM (f . snd) m2list\n  return $ GMap m1 (Map.fromList (zip (map fst m2list) xs2))\n\n-- | Monadic map over values with access to the inner key\nmapValsWithKeyM :: (Ord b, Monad m) => (b -> c -> m c') -> GMap a b c -> m (GMap a b c')\nmapValsWithKeyM f (GMap m1 m2) = do\n  let m2list = Map.toList m2\n  xs2 <- mapM (uncurry f) m2list\n  return $ GMap m1 (Map.fromList (zip (map fst m2list) xs2))\n\n-- | Map a function over outer keys\nmapKeys :: (Ord a') => (a -> a') -> GMap a b c -> GMap a' b c\nmapKeys f (GMap x y) = GMap (Map.mapKeys f x) y\n\n-- | Map a function over inner keys (updating both the key mapping and value map)\nmapInnerKeys :: (Ord b') => (b -> b') -> GMap a b c -> GMap a b' c\nmapInnerKeys f (GMap x y) = GMap (Map.map f x) (Map.mapKeys f y)\n\n-- | Get all outer keys\nkeys :: GMap a b c -> [a]\nkeys (GMap x _) = Map.keys x\n\n-- | Get the sets of (mapped-to inner keys, all inner keys)\ninnerKeys :: (Ord b) => GMap a b c -> (Set.Set b, Set.Set b)\ninnerKeys (GMap x y) = (Set.fromList (Map.elems x), Set.fromList (Map.keys y))\n\n-- | Get all values\nelems :: GMap a b c -> [c]\nelems (GMap _ y) = Map.elems y\n\n-- | An empty 'GMap'\nempty :: GMap a b c\nempty = GMap Map.empty Map.empty\n\n-- | Insert a mapping from outer key to inner key to value\ninsert :: (Ord a, Ord b) => a -> b -> c -> GMap a b c -> GMap a b c\ninsert k1 k2 x (GMap m1 m2) = GMap (Map.insert k1 k2 m1) (Map.insert k2 x m2)\n\n-- | Insert with a combining function for values at colliding inner keys\ninsertWith :: (Ord a, Ord b) => (c -> c -> c) -> a -> b -> c -> GMap a b c -> GMap a b c\ninsertWith f k1 k2 x (GMap m1 m2) = GMap (Map.insert k1 k2 m1) (Map.insertWith f k2 x m2)\n\n-- | Monadic 'insertWith'\ninsertWithM ::\n  (Monad m, Ord a, Ord b) => (c -> c -> m c) -> a -> b -> c -> GMap a b c -> m (GMap a b c)\ninsertWithM f k1 k2 x1 (GMap m1 m2) = do\n  let map1 = Map.insert k1 k2 m1\n  x3 <- case Map.lookup k2 m2 of\n    (Just x2) -> f x1 x2\n    Nothing -> return x1\n  return $ GMap map1 (Map.insert k2 x3 m2)\n\n{- | Given an outer key, replace its associated value. Since multiple outer\nkeys may share the same inner key, this affects all of them.\n-}\nchange :: (Ord a, Ord b) => a -> c -> GMap a b c -> Maybe (GMap a b c)\nchange k1 v (GMap x y) = do\n  k2 <- Map.lookup k1 x\n  return $ GMap x (Map.insert k2 v y)\n\n-- | Insert multiple outer keys that all map to the same inner key and value\ninsertMany :: (Ord a, Ord b) => [a] -> b -> c -> GMap a b c -> GMap a b c\ninsertMany ks k2 x (GMap m1 m2) = GMap m1' m2'\n  where\n    m1' = Map.union (Map.fromList (zip ks (repeat k2))) m1\n    m2' = Map.insert k2 x m2\n\n-- | 'insertMany' with a combining function for colliding inner keys\ninsertManyWith :: (Ord a, Ord b) => (c -> c -> c) -> [a] -> b -> c -> GMap a b c -> GMap a b c\ninsertManyWith f ks k2 x (GMap m1 m2) = GMap m1' m2'\n  where\n    m1' = Map.union (Map.fromList (zip ks (repeat k2))) m1\n    m2' = Map.insertWith f k2 x m2\n\n{- | Make @newKey@ point to the same inner key as @oldKey@. Returns 'Nothing'\nif @oldKey@ is absent.\n-}\nyIsX :: (Ord a) => a -> a -> GMap a b c -> Maybe (GMap a b c)\nyIsX oldKey newKey (GMap m x) = do\n  i <- Map.lookup oldKey m\n  return (GMap (Map.insert newKey i m) x)\n\n-- | Two-phase lookup: outer key to inner key, then inner key to value\nlookup :: (Ord a, Ord b) => a -> GMap a b c -> GMapRet c\nlookup k1 (GMap m1 m2) =\n  case Map.lookup k1 m1 of\n    Nothing -> GMapNoFst\n    (Just k2) -> case Map.lookup k2 m2 of\n      Nothing -> GMapNoSnd\n      (Just x) -> GMapJust x\n"
  },
  {
    "path": "library/Morloc/Data/Json.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Data.Json\nDescription : Lightweight JSON builder utilities\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nMinimal JSON text builders for generating manifest files and other JSON\noutput without depending on aeson for serialization. Each function produces\na 'Text' fragment that is valid JSON.\n-}\nmodule Morloc.Data.Json\n  ( jsonEscape\n  , jsonStr\n  , jsonInt\n  , jsonBool\n  , jsonNull\n  , jsonArr\n  , jsonObj\n  , jsonStrArr\n  , jsonMaybeStr\n  ) where\n\nimport Data.Char (ord)\nimport Data.Text (Text)\nimport qualified Data.Text as MT\nimport Numeric (showHex)\n\n-- | Escape a text value for inclusion in a JSON string (without surrounding quotes)\njsonEscape :: Text -> Text\njsonEscape = MT.concatMap esc\n  where\n    esc '\"' = \"\\\\\\\"\"\n    esc '\\\\' = \"\\\\\\\\\"\n    esc '\\n' = \"\\\\n\"\n    esc '\\r' = \"\\\\r\"\n    esc '\\t' = \"\\\\t\"\n    esc '\\b' = \"\\\\b\"\n    esc '\\f' = \"\\\\f\"\n    esc c | c < ' ' = \"\\\\u\" <> MT.pack (pad4 (showHex (ord c) \"\"))\n    esc c = MT.singleton c\n\n    pad4 s = replicate (4 - length s) '0' ++ s\n\n-- | Wrap a text value as a JSON string (escaped and double-quoted)\njsonStr :: Text -> Text\njsonStr t = \"\\\"\" <> jsonEscape t <> \"\\\"\"\n\n-- | Render an 'Int' as a JSON number\njsonInt :: Int -> Text\njsonInt = MT.pack . show\n\n-- | Render a 'Bool' as a JSON boolean\njsonBool :: Bool -> Text\njsonBool True = \"true\"\njsonBool False = \"false\"\n\n-- | The JSON null literal\njsonNull :: Text\njsonNull = \"null\"\n\n-- | Render a list of pre-formatted JSON values as a JSON array\njsonArr :: [Text] -> Text\njsonArr xs = \"[\" <> MT.intercalate \",\" xs <> \"]\"\n\n-- | Render key-value pairs as a JSON object (values are pre-formatted JSON)\njsonObj :: [(Text, Text)] -> Text\njsonObj pairs = \"{\" <> MT.intercalate \",\" [jsonStr k <> \":\" <> v | (k, v) <- pairs] <> \"}\"\n\n-- | Render a list of text values as a JSON array of strings\njsonStrArr :: [Text] -> Text\njsonStrArr = jsonArr . map jsonStr\n\n-- | Render 'Nothing' as @null@, 'Just' as a JSON string\njsonMaybeStr :: Maybe Text -> Text\njsonMaybeStr Nothing = jsonNull\njsonMaybeStr (Just t) = jsonStr t\n"
  },
  {
    "path": "library/Morloc/Data/Map/Extra.hs",
    "content": "{- |\nModule      : Morloc.Data.Map.Extra\nDescription : Additional functions for the Map class\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nMonadic extensions to \"Data.Map.Strict\" that are not provided by the standard\nlibrary: monadic union, map, and three-way merge operations.\n-}\nmodule Morloc.Data.Map.Extra\n  ( mergeMaps\n  , mergeMapsM\n  , mapKeysWithM\n  , mapM\n  , mapWithKeyM\n  , unionWithM\n  , unionsWithM\n  ) where\n\nimport Control.Monad (foldM)\nimport Data.Bifunctor (first)\nimport Data.List.Extra (groupSort)\nimport qualified Data.Map.Strict as Map\nimport Prelude hiding (mapM)\nimport qualified Prelude\n\nonSndM :: (Monad m) => (b -> m c) -> (a, b) -> m (a, c)\nonSndM f (x, y) = (,) x <$> f y\n\n-- | Monadic version of @Data.Map.unionWith@\nunionWithM :: (Monad m, Ord a) => (b -> b -> m b) -> Map.Map a b -> Map.Map a b -> m (Map.Map a b)\nunionWithM f m1 m2 = do\n  pairs <- Prelude.mapM (onSndM (uncurry f)) (Map.toList $ Map.intersectionWith (,) m1 m2)\n\n  return $\n    Map.unions\n      [ Map.difference m1 m2\n      , Map.fromList pairs\n      , Map.difference m2 m1\n      ]\n\n-- | Monadic version of @Data.Map.unionsWith@\nunionsWithM :: (Monad m, Ord a) => (b -> b -> m b) -> [Map.Map a b] -> m (Map.Map a b)\nunionsWithM f = foldM (unionWithM f) Map.empty\n\n-- | Monadic version of @Data.Map.mapKeysWith@, merging values when keys collide\nmapKeysWithM ::\n  (Monad m, Ord k2) =>\n  (a -> a -> m a) ->\n  (k1 -> k2) ->\n  Map.Map k1 a ->\n  m (Map.Map k2 a)\nmapKeysWithM f g m =\n  Map.fromList\n    <$> Prelude.mapM\n      foldValues\n      (groupSort $ map (first g) (Map.toList m))\n  where\n    foldValues (k, v : vs) = (,) k <$> foldM f v vs\n    foldValues _ = undefined -- groupSort never produces empty value lists\n\n-- | Monadic version of @Data.Map.map@\nmapM :: (Monad m) => (a -> m b) -> Map.Map k a -> m (Map.Map k b)\nmapM f = Map.traverseWithKey (\\_ a -> f a)\n\n-- | Monadic version of @Data.Map.mapWithKey@\nmapWithKeyM :: (Monad m) => (k -> a -> m b) -> Map.Map k a -> m (Map.Map k b)\nmapWithKeyM = Map.traverseWithKey\n\n{- | Three-way merge of two maps: apply @fb@ to left-only, @fc@ to right-only,\nand @fbc@ to entries present in both\n-}\nmergeMaps ::\n  (Ord a) =>\n  (b -> d) ->\n  (c -> d) ->\n  (b -> c -> d) ->\n  Map.Map a b ->\n  Map.Map a c ->\n  Map.Map a d\nmergeMaps fb fc fbc m1 m2 =\n  Map.unions\n    [ Map.map fb (Map.difference m1 m2)\n    , Map.mapMaybeWithKey (\\k v -> fbc v <$> Map.lookup k m2) (Map.intersection m1 m2)\n    , Map.map fc (Map.difference m2 m1)\n    ]\n\n-- | Monadic version of 'mergeMaps'\nmergeMapsM ::\n  (Ord a, Monad m) =>\n  (b -> m d) ->\n  (c -> m d) ->\n  (b -> c -> m d) ->\n  Map.Map a b ->\n  Map.Map a c ->\n  m (Map.Map a d)\nmergeMapsM fb fc fbc m1 m2 = do\n  bs <- mapM fb $ Map.difference m1 m2\n  bcs <- mapM (uncurry fbc) $ Map.intersectionWith (,) m1 m2\n  cs <- mapM fc $ Map.difference m2 m1\n  return $ Map.unions [bs, bcs, cs]\n"
  },
  {
    "path": "library/Morloc/Data/Map.hs",
    "content": "{- |\nModule      : Morloc.Data.Map\nDescription : An extension of the base map module\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.Data.Map\n  ( module Data.Map.Strict\n  , module Morloc.Data.Map.Extra\n  ) where\n\nimport Data.Map.Strict\nimport Morloc.Data.Map.Extra\n"
  },
  {
    "path": "library/Morloc/Data/Text.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Data.Text\nDescription : Text utilities and re-exports\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nRe-exports \"Data.Text\", \"Data.Text.IO\", and \"Data.Text.Encoding\", plus\nconversion helpers ('show'', 'read'') and string-stripping utilities\n('unenclose', 'unangle', 'unquote', 'undquote').\n-}\nmodule Morloc.Data.Text\n  ( module Data.Text\n  , module Data.Text.IO\n  , module Data.Text.Encoding\n  , show'\n  , pretty\n  , read'\n  , readMay'\n  , unenclose\n  , unangle\n  , unquote\n  , undquote\n  , stripPrefixIfPresent\n  , liftToText\n  ) where\n\nimport Data.Maybe (fromMaybe)\nimport Data.Text hiding (map)\nimport Data.Text.Encoding\nimport Data.Text.IO\nimport qualified Data.Text.Lazy as DL\nimport qualified Safe\nimport qualified Text.Pretty.Simple as Pretty\nimport Prelude hiding (concat, length, lines, unlines)\n\n-- | 'show' producing 'Text' instead of 'String'\nshow' :: (Show a) => a -> Text\nshow' = pack . Prelude.show\n\n-- | 'read' accepting 'Text' instead of 'String'\nread' :: (Read a) => Text -> a\nread' = read . unpack\n\n-- | Safe 'read' from 'Text', returning 'Nothing' on parse failure\nreadMay' :: (Read a) => Text -> Maybe a\nreadMay' = Safe.readMay . unpack\n\n-- | Strip a prefix if present, otherwise return the text unchanged\nstripPrefixIfPresent :: Text -> Text -> Text\nstripPrefixIfPresent prefix text =\n  case stripPrefix prefix text of\n    (Just x) -> x\n    Nothing -> text\n\n-- | Pretty-print any 'Show' instance to 'Text' (no color)\npretty :: (Show a) => a -> Text\npretty = DL.toStrict . Pretty.pShowNoColor\n\n-- | Lift a @String -> String@ function to operate on 'Text'\nliftToText :: (String -> String) -> Text -> Text\nliftToText f = pack . f . unpack\n\n-- | Strip matching open\\/close delimiters from a text value\nunenclose :: Text -> Text -> Text -> Text\nunenclose a b x = fromMaybe x (stripPrefix a x >>= stripSuffix b)\n\n-- | Strip surrounding angle brackets: @\\<foo\\>@ -> @foo@\nunangle :: Text -> Text\nunangle = unenclose \"<\" \">\"\n\n-- | Strip surrounding single quotes: @\\'foo\\'@ -> @foo@\nunquote :: Text -> Text\nunquote = unenclose \"'\" \"'\"\n\n-- | Strip surrounding double quotes: @\\\"foo\\\"@ -> @foo@\nundquote :: Text -> Text\nundquote = unenclose \"\\\"\" \"\\\"\"\n"
  },
  {
    "path": "library/Morloc/DataFiles.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE TemplateHaskell #-}\n\n{- |\nModule      : Morloc.DataFiles\nDescription : Template-Haskell-embedded data files for runtime and codegen\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nAll non-Haskell data files (C library sources, pool templates, init scripts,\nlang.yaml configs, nexus source) are embedded at compile time via\n'Data.FileEmbed.embedFileRelative'. This module provides typed access to\nthese files for use by 'Morloc.CodeGenerator.SystemConfig' (init) and the\ntranslators (codegen).\n-}\nmodule Morloc.DataFiles\n  ( EmbededFile (..)\n  , LangSetup (..)\n  , libmorlocHeader\n  , poolTemplate\n  , poolTemplateGeneric\n  , langSetups\n  , langRegistryFiles\n  , languagesYaml\n  ) where\n\nimport Data.FileEmbed (embedFileRelative)\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport Data.Text.Encoding (decodeUtf8)\n\ndata EmbededFile = EmbededFile\n  { embededFileName :: String -- basename for the file\n  , embededFileText :: Text -- full text the file contained at compile time\n  }\n\n-- | Per-language init setup: an init script and associated data files.\ndata LangSetup = LangSetup\n  { lsName :: String\n  , lsRequiredTools :: [String]\n  , lsInitScript :: EmbededFile\n  , lsFiles :: [EmbededFile]\n  }\n\n-- | The single self-contained morloc.h header (the ABI contract for libmorloc.so).\n-- Language extensions and pool templates #include this to call into the Rust library.\nlibmorlocHeader :: Text\nlibmorlocHeader = decodeUtf8 $(embedFileRelative \"data/morloc/morloc.h\")\n\n-- | Pool template lookup by canonical language name\npoolTemplate :: Text -> EmbededFile\npoolTemplate \"cpp\" = EmbededFile \"pool.cpp\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/pool.cpp\"))\npoolTemplate name = error $ \"No embedded pool template for \" <> T.unpack name\n\n-- | 3-section pool templates for the generic translator (sources, manifolds, dispatch)\npoolTemplateGeneric :: Text -> EmbededFile\npoolTemplateGeneric \"py\" = EmbededFile \"pool.py\" (decodeUtf8 $ $(embedFileRelative \"data/lang/py/pool.py\"))\npoolTemplateGeneric \"r\" = EmbededFile \"pool.R\" (decodeUtf8 $ $(embedFileRelative \"data/lang/r/pool.R\"))\npoolTemplateGeneric name = poolTemplate name\n\n{- | Per-language init setups. Each bundles an init.sh script with\nthe data files that should be written to the build dir before running it.\n-}\nlangSetups :: [LangSetup]\nlangSetups = [cppSetup, pythonSetup, rSetup, juliaSetup]\n\ncppSetup :: LangSetup\ncppSetup =\n  LangSetup\n    \"C++\"\n    [\"g++\", \"git\"]\n    (EmbededFile \"init.sh\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/init.sh\")))\n    [ EmbededFile \"cppmorloc.hpp\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/cppmorloc.hpp\"))\n    , EmbededFile \"cppmorloc.cpp\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/cppmorloc.cpp\"))\n    , EmbededFile \"morloc_pch.hpp\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/morloc_pch.hpp\"))\n    , EmbededFile \"mlc_arrow.hpp\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/mlc_arrow.hpp\"))\n    , EmbededFile \"mlc_tensor.hpp\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/mlc_tensor.hpp\"))\n    , EmbededFile \"nanoarrow.h\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/nanoarrow/nanoarrow.h\"))\n    , EmbededFile \"nanoarrow.c\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/nanoarrow/nanoarrow.c\"))\n    ]\n\npythonSetup :: LangSetup\npythonSetup =\n  LangSetup\n    \"python\"\n    [\"python3\"]\n    (EmbededFile \"init.sh\" (decodeUtf8 $ $(embedFileRelative \"data/lang/py/init.sh\")))\n    [ EmbededFile \"pymorloc.c\" (decodeUtf8 $ $(embedFileRelative \"data/lang/py/pymorloc.c\"))\n    , EmbededFile \"setup.py\" (decodeUtf8 $ $(embedFileRelative \"data/lang/py/setup.py\"))\n    , EmbededFile \"Makefile\" (decodeUtf8 $ $(embedFileRelative \"data/lang/py/Makefile\"))\n    ]\n\nrSetup :: LangSetup\nrSetup =\n  LangSetup\n    \"R\"\n    [\"R\"]\n    (EmbededFile \"init.sh\" (decodeUtf8 $ $(embedFileRelative \"data/lang/r/init.sh\")))\n    [ EmbededFile \"rmorloc.c\" (decodeUtf8 $ $(embedFileRelative \"data/lang/r/rmorloc.c\"))\n    ]\n\njuliaSetup :: LangSetup\njuliaSetup =\n  LangSetup\n    \"Julia\"\n    [\"julia\"]\n    (EmbededFile \"init.sh\" (decodeUtf8 $ $(embedFileRelative \"data/lang/julia/init.sh\")))\n    [ EmbededFile \"juliabridge.c\" (decodeUtf8 $ $(embedFileRelative \"data/lang/julia/juliabridge.c\"))\n    , EmbededFile\n        \"MorlocRuntime.jl\"\n        (decodeUtf8 $ $(embedFileRelative \"data/lang/julia/MorlocRuntime.jl\"))\n    , EmbededFile \"lang.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/julia/lang.yaml\"))\n    , EmbededFile \"pool.jl\" (decodeUtf8 $ $(embedFileRelative \"data/lang/julia/pool.jl\"))\n    ]\n\n-- | Per-language lang.yaml files keyed by canonical name\nlangRegistryFiles :: [(String, EmbededFile)]\nlangRegistryFiles =\n  [ (\"c\", EmbededFile \"lang.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/c/lang.yaml\")))\n  , (\"cpp\", EmbededFile \"lang.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/cpp/lang.yaml\")))\n  , (\"py\", EmbededFile \"lang.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/py/lang.yaml\")))\n  , (\"r\", EmbededFile \"lang.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/r/lang.yaml\")))\n  , (\"jl\", EmbededFile \"lang.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/julia/lang.yaml\")))\n  ]\n\n-- | Shared languages.yaml with pairwise costs\nlanguagesYaml :: EmbededFile\nlanguagesYaml = EmbededFile \"languages.yaml\" (decodeUtf8 $ $(embedFileRelative \"data/lang/languages.yaml\"))\n"
  },
  {
    "path": "library/Morloc/Frontend/API.hs",
    "content": "{- |\nModule      : Morloc.Frontend.API\nDescription : Entry point for the frontend pipeline (parse, typecheck, valuecheck)\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nOrchestrates the full frontend pipeline: parsing source files into a module\nDAG, recursively resolving imports, and re-exporting the typechecker and\nvaluechecker entry points. This is the primary interface consumed by the\ntop-level compiler driver ('Morloc').\n-}\nmodule Morloc.Frontend.API\n  ( parse\n  , Parser.readType\n  , Typecheck.typecheck\n  , Typecheck.resolveTypes\n  , Valuecheck.valuecheck\n  ) where\n\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Morloc.Config as Config\nimport qualified Morloc.Data.DAG as MDD\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as MT\nimport Morloc.Frontend.Namespace\nimport Morloc.Frontend.Parser (PState (..), emptyPState)\nimport qualified Morloc.Frontend.Parser as Parser\nimport qualified Morloc.Frontend.Typecheck as Typecheck\nimport qualified Morloc.Frontend.Valuecheck as Valuecheck\nimport qualified Morloc.LangRegistry as LR\nimport qualified Morloc.Module as Mod\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.System as MS\nimport System.Directory (doesDirectoryExist, doesFileExist, listDirectory)\n\n{- | Parse a morloc source file and all its imports into a module DAG.\nRecursively discovers and parses imported modules.\n-}\nparse ::\n  -- | path to the current module (if we are reading from a file)\n  Maybe Path ->\n  -- | code of the current module\n  Code ->\n  MorlocMonad (DAG MVar Import ExprI)\nparse f (Code code) = do\n  moduleConfig <- Config.loadModuleConfig f\n  langMap <- buildLangMap'\n\n  -- Compute project root from entry-point file path\n  let projectRoot = fmap MS.takeDirectory f\n\n  let parserState =\n        emptyPState\n          { psModuleConfig = moduleConfig\n          , psLangMap = langMap\n          , psProjectRoot = projectRoot\n          }\n\n  -- store source text, project root, and load package metadata for the main file\n  case f of\n    Just path -> do\n      MM.modify (\\st -> st\n        { stateSourceText = Map.insert path code (stateSourceText st)\n        , stateProjectRoot = projectRoot\n        })\n      Mod.loadModuleMetadata path\n    Nothing -> return ()\n\n  case Parser.readProgram Nothing f code parserState mempty of\n    (Left e) -> MM.throwSystemError $ pretty e\n    (Right (mainDag, mainState)) -> do\n      -- capture module-level docs from the main module before imports overwrite them\n      MM.modify (\\st -> st\n        { stateModuleDoc = psModuleDoc mainState\n        , stateModuleEpilogues = psModuleEpilogues mainState\n        })\n      parseImports mainDag mainState Map.empty\n  where\n    -- descend recursively into imports\n    parseImports ::\n      DAG MVar Import ExprI ->\n      PState ->\n      Map.Map MVar Path ->\n      MorlocMonad (DAG MVar Import ExprI)\n    parseImports d s m = case unimported of\n      [] -> do\n        -- transfer source positions from parser state into MorlocState\n        MM.modify (\\st -> st\n          { stateSourceMap = psSourceMap s <> stateSourceMap st\n          , stateTermDocs = psTermDocs s <> stateTermDocs st\n          })\n        -- emit any docstring warnings accumulated during desugar\n        case psWarnings s of\n          [] -> return ()\n          ws -> MM.tell ws\n        return d\n      ((mainModule, importedModule) : _) -> do\n        importPath <- case Map.lookup mainModule m of\n          (Just mainPath) -> Mod.findModule (Just mainPath, mainModule) importedModule\n          Nothing -> Mod.findModule (Nothing, mainModule) importedModule\n\n        -- Load the <main>.yaml file associated with the main morloc package file\n        moduleConfig <- Config.loadModuleConfig (Just importPath)\n        let newState = s {psModuleConfig = moduleConfig}\n\n        Mod.loadModuleMetadata importPath\n        (childPath, code') <- openLocalModule importPath\n        case Parser.readProgram (Just importedModule) childPath code' newState d of\n          (Left e) -> MM.throwSystemError $ pretty e\n          (Right (d', s')) ->\n            -- The parsed module may have a different internal name than the\n            -- import edge target (e.g., file declares \"module units\" but\n            -- import edge targets \".units\"). Reconcile by renaming the DAG\n            -- entry to match the import name.\n            let d'' = reconcileModuleName importedModule d d'\n            in parseImports d'' s' (maybe m (\\v -> Map.insert importedModule v m) childPath)\n      where\n        -- all modules that have already been parsed\n        parsed = Map.keysSet d\n        -- find all (module to module) edges in the graph where the imported\n        -- module has not yet been parsed\n        unimported = filter (\\(_, importMod) -> not (Set.member importMod parsed)) (MDD.edgelist d)\n\n    -- If readProgram added a key that doesn't match importedModule, rename it.\n    -- This happens when a local import (\".units\") parses a file that declares\n    -- \"module units (...)\". We rename the DAG key and the ModE name to match\n    -- the import edge target.\n    reconcileModuleName :: MVar -> DAG MVar Import ExprI -> DAG MVar Import ExprI -> DAG MVar Import ExprI\n    reconcileModuleName importName dOld dNew =\n      case newKeys of\n        [actualName] | actualName /= importName ->\n          -- Rename: delete the old key, insert under the import name with\n          -- the ModE name rewritten to match\n          case Map.lookup actualName dNew of\n            Just (ExprI i (ModE _ es), edges) ->\n              let renamed = Map.delete actualName dNew\n              in Map.insert importName (ExprI i (ModE importName es), edges) renamed\n            _ -> dNew  -- shouldn't happen\n        _ -> dNew  -- zero or multiple new keys: nothing to reconcile\n      where\n        newKeys = filter (`Map.notMember` dOld) (Map.keys dNew)\n\n-- | assume @t@ is a filename and open it, return file name and contents\nopenLocalModule :: Path -> MorlocMonad (Maybe Path, Text)\nopenLocalModule filename = do\n  code <- liftIO $ MT.readFile filename\n  MM.modify (\\st -> st {stateSourceText = Map.insert filename code (stateSourceText st)})\n  return (Just filename, code)\n\n{- | Build a map from language aliases to Lang values, combining the\nregistry (built-in languages) with filesystem-discovered plugins.\n-}\nbuildLangMap' :: MorlocMonad (Map.Map T.Text Lang)\nbuildLangMap' = do\n  -- Get the registry-based lang map (all built-in languages)\n  reg <- MM.gets stateLangRegistry\n  let registryMap = LR.buildLangMap reg\n\n  -- Discover additional plugin languages on the filesystem\n  home <- MM.asks configHome\n  let langDir = home </> \"lang\"\n  exists <- liftIO $ doesDirectoryExist langDir\n  pluginMap <-\n    if not exists\n      then return Map.empty\n      else do\n        dirs <- liftIO $ listDirectory langDir\n        results <- liftIO $ mapM (scanLangDir langDir) dirs\n        return $ Map.fromList [(n, lang) | Just (n, lang) <- results]\n\n  -- Registry entries take precedence over filesystem discoveries\n  return $ Map.union registryMap pluginMap\n  where\n    scanLangDir :: FilePath -> String -> IO (Maybe (T.Text, Lang))\n    scanLangDir langDir dirName = do\n      let descPath = langDir </> dirName </> \"lang.yaml\"\n      hasDesc <- doesFileExist descPath\n      if not hasDesc\n        then return Nothing\n        else do\n          result <- LR.parseLangYamlFile descPath\n          case result of\n            Left _ -> return Nothing\n            Right (name, ext) -> return $ Just (name, Lang name ext)\n"
  },
  {
    "path": "library/Morloc/Frontend/AST.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Frontend.AST\nDescription : Query and traversal functions over the indexed 'ExprI' AST\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProvides structural queries over the indexed expression tree ('ExprI'):\nextracting module edges, exports, type definitions, signatures, sources,\nfixity declarations, and index ranges. Used by 'Restructure', 'Link',\nand 'Treeify' to inspect and manipulate the AST before code generation.\n-}\nmodule Morloc.Frontend.AST\n  ( findEdges\n  , findExport\n  , findExportSet\n  , findFixityMap\n  , setExport\n  , findSignatures\n  , findTypedefs\n  , findSignatureTypeTerms\n  , checkExprI\n  , findSources\n  , maxIndex\n  , getIndices\n  , mapTypeInExprI\n  ) where\n\nimport qualified Data.Set as Set\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Map as Map\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\n\n-- | In the DAG, the two MVar are the two keys, Import is the edge data, Expr is the node data\nfindEdges :: ExprI -> (MVar, [(MVar, Import)], ExprI)\nfindEdges e@(ExprI _ (ModE n es)) = (n, [(importModuleName i, i) | (ExprI _ (ImpE i)) <- es], e)\nfindEdges _ = error \"Expected a module\"\n\n-- | Collect all exported symbols into a flat set (ignoring indices).\nfindExportSet :: ExprI -> Set.Set Symbol\nfindExportSet e = case findExport e of\n  (ExportMany ss gs) -> Set.map snd ss `Set.union` Set.unions [Set.map snd (exportGroupMembers g) | g <- gs]\n  _ -> Set.empty\n\n-- | Extract the 'Export' declaration from a module expression.\nfindExport :: ExprI -> Export\nfindExport e0 = case f e0 of\n  (Just export) -> export\n  Nothing -> ExportMany Set.empty []\n  where\n    f (ExprI _ (ExpE export)) = Just export\n    f (ExprI i (ModE j (e : es))) = case f e of\n      (Just export) -> Just export\n      Nothing -> f (ExprI i (ModE j es))\n    f _ = Nothing\n\n-- | Replace the 'Export' declaration in a module expression.\nsetExport :: Export -> ExprI -> ExprI\nsetExport export = f\n  where\n    f (ExprI i (ExpE _)) = ExprI i (ExpE export)\n    f (ExprI i (ModE m es)) = ExprI i (ModE m (map f es))\n    f e = e\n\n-- | Collect all 'Source' declarations from a module.\nfindSources :: ExprI -> [Source]\nfindSources (ExprI _ (SrcE ss)) = [ss]\nfindSources (ExprI _ (ModE _ es)) = concatMap findSources es\nfindSources _ = []\n\n{- | Find all top-level type aliases in a module, split into general\n(language-independent) and concrete (language-specific) maps.\n-}\nfindTypedefs ::\n  ExprI ->\n  ( Map.Map TVar [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)]\n  , Map.Map Lang (Map.Map TVar [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)])\n  )\nfindTypedefs (ExprI _ (TypE (ExprTypeE Nothing v vs t d))) = (Map.singleton v [(vs, t, d, False)], Map.empty)\nfindTypedefs (ExprI _ (TypE (ExprTypeE (Just (lang, isTerminal)) v vs t d))) = (Map.empty, Map.singleton lang (Map.singleton v [(vs, t, d, isTerminal)]))\nfindTypedefs (ExprI _ (ModE _ es)) = foldl combine (Map.empty, Map.empty) (map findTypedefs es)\n  where\n    combine (g1, c1) (g2, c2) =\n      ( Map.unionWith (<>) g1 g2\n      , Map.unionWith (Map.unionWith (<>)) c1 c2\n      )\nfindTypedefs _ = (Map.empty, Map.empty)\n\n-- | Collect all non-generic type names used in signatures.\nfindSignatureTypeTerms :: ExprI -> [TVar]\nfindSignatureTypeTerms = unique . f\n  where\n    f :: ExprI -> [TVar]\n    f (ExprI _ (ModE _ es)) = concatMap f es\n    f (ExprI _ (SigE (Signature _ _ (EType t _ _ _)))) = findTypeTerms t\n    f (ExprI _ (AssE _ _ es)) = concatMap f es\n    f _ = []\n\n-- | find all the non-generic terms in an unresolved type\nfindTypeTerms :: TypeU -> [TVar]\nfindTypeTerms (VarU v@(TV x))\n  | isGeneric x = []\n  | otherwise = [v]\nfindTypeTerms (ExistU _ (ps1, _) (rs2, _)) = concatMap findTypeTerms (ps1 ++ map snd rs2)\nfindTypeTerms (NatVarU _) = []\nfindTypeTerms (ForallU _ e) = findTypeTerms e\nfindTypeTerms (FunU ts t) = concatMap findTypeTerms ts <> findTypeTerms t\nfindTypeTerms (AppU t ts) = findTypeTerms t <> concatMap findTypeTerms ts\nfindTypeTerms (NamU _ _ ps rs) = concatMap findTypeTerms (map snd rs <> ps)\nfindTypeTerms (EffectU _ t) = findTypeTerms t\nfindTypeTerms (OptionalU t) = findTypeTerms t\nfindTypeTerms (NatLitU _) = []\nfindTypeTerms (NatAddU a b) = findTypeTerms a <> findTypeTerms b\nfindTypeTerms (NatMulU a b) = findTypeTerms a <> findTypeTerms b\nfindTypeTerms (NatSubU a b) = findTypeTerms a <> findTypeTerms b\nfindTypeTerms (NatDivU a b) = findTypeTerms a <> findTypeTerms b\nfindTypeTerms (LabeledU _ t) = findTypeTerms t\n\n-- | Build the fixity map from top-level fixity declarations.\nfindFixityMap :: ExprI -> MorlocMonad (Map.Map EVar (Associativity, Int))\nfindFixityMap (ExprI _ (ModE _ es)) = do\n  -- collect all fixity terms.\n  -- these are allowed only at the top level, so no need for recursion.\n  let allTerms =\n        concat\n          [ [(op, (ass, pre)) | op <- ops]\n          | (ExprI _ (FixE (Fixity ass pre ops))) <- es\n          ]\n\n  foldlM tryAddTerm Map.empty allTerms\n  where\n    tryAddTerm ::\n      Map.Map EVar (Associativity, Int) ->\n      (EVar, (Associativity, Int)) ->\n      MorlocMonad (Map.Map EVar (Associativity, Int))\n    tryAddTerm m (k, v)\n      | Map.member k m = MM.throwSystemError $ \"Conflicting fixity definitions for\" <+> pretty k\n      | otherwise = return $ Map.insert k v m\nfindFixityMap _ = return Map.empty\n\n{- | Find type signatures that are in the scope of the input expression. Do not\ndescend recursively into declaration where statements except if the input\nexpression is a declaration.\n-}\nfindSignatures :: ExprI -> [(EVar, Maybe Label, EType)]\n-- v is the name of the type\n-- l is the optional label for the signature\n-- t is the type\nfindSignatures (ExprI _ (ModE _ es)) = [(v, l, t) | (ExprI _ (SigE (Signature v l t))) <- es]\nfindSignatures (ExprI _ (AssE _ _ es)) = [(v, l, t) | (ExprI _ (SigE (Signature v l t))) <- es]\nfindSignatures (ExprI _ (SigE (Signature v l t))) = [(v, l, t)]\nfindSignatures _ = []\n\n-- | Apply a monadic check function to every node in an 'ExprI' tree.\ncheckExprI :: (Monad m) => (ExprI -> m ()) -> ExprI -> m ()\ncheckExprI f e@(ExprI _ (ModE _ es)) = f e >> mapM_ (checkExprI f) es\ncheckExprI f e@(ExprI _ (AnnE e' _)) = f e >> checkExprI f e'\ncheckExprI f e@(ExprI _ (AssE _ e' es')) = f e >> checkExprI f e' >> mapM_ f es'\ncheckExprI f e@(ExprI _ (IstE _ _ es)) = f e >> mapM_ (checkExprI f) es\ncheckExprI f e@(ExprI _ (LamE _ e')) = f e >> checkExprI f e'\ncheckExprI f e@(ExprI _ (AppE e' es)) = f e >> checkExprI f e' >> mapM_ (checkExprI f) es\ncheckExprI f e@(ExprI _ (LstE es)) = f e >> mapM_ (checkExprI f) es\ncheckExprI f e@(ExprI _ (TupE es)) = f e >> mapM_ (checkExprI f) es\ncheckExprI f e@(ExprI _ (NamE rs)) = f e >> mapM_ (checkExprI f . snd) rs\ncheckExprI f e@(ExprI _ (BopE e1 _ _ e2)) = f e >> mapM_ (checkExprI f) [e1, e2]\ncheckExprI f e@(ExprI _ (LetE bindings body)) = f e >> mapM_ (checkExprI f . snd) bindings >> checkExprI f body\ncheckExprI f e@(ExprI _ (IfE c t el)) = f e >> mapM_ (checkExprI f) [c, t, el]\ncheckExprI f e@(ExprI _ (DoBlockE e')) = f e >> checkExprI f e'\ncheckExprI f e@(ExprI _ (EvalE e')) = f e >> checkExprI f e'\ncheckExprI f e@(ExprI _ (IntrinsicE _ es)) = f e >> mapM_ (checkExprI f) es\ncheckExprI f e@(ExprI _ (ParenE e')) = f e >> checkExprI f e'\ncheckExprI f e = f e\n\n-- | Find the largest index used in an 'ExprI' tree.\nmaxIndex :: ExprI -> Int\nmaxIndex (ExprI i (ModE _ es)) = maximum (i : map maxIndex es)\nmaxIndex (ExprI i (AnnE e _)) = max i (maxIndex e)\nmaxIndex (ExprI i (IstE _ _ es)) = maximum (i : map maxIndex es)\nmaxIndex (ExprI i (AssE _ e es)) = maximum (i : map maxIndex (e : es))\nmaxIndex (ExprI i (LamE _ e)) = max i (maxIndex e)\nmaxIndex (ExprI i (AppE e es)) = maximum (i : map maxIndex (e : es))\nmaxIndex (ExprI i (LstE es)) = maximum (i : map maxIndex es)\nmaxIndex (ExprI i (TupE es)) = maximum (i : map maxIndex es)\nmaxIndex (ExprI i (NamE rs)) = maximum (i : map (maxIndex . snd) rs)\nmaxIndex (ExprI i (ExpE ExportAll)) = i\nmaxIndex (ExprI i (ExpE (ExportMany ss gs))) = maximum (i : map fst (Set.toList ss) ++ concatMap (map fst . Set.toList . exportGroupMembers) gs)\nmaxIndex (ExprI i (BopE e1 j _ e2)) = maximum [i, j, maxIndex e1, maxIndex e2]\nmaxIndex (ExprI i (LetE bindings body)) = maximum (i : maxIndex body : map (maxIndex . snd) bindings)\nmaxIndex (ExprI i (IfE c t e)) = maximum [i, maxIndex c, maxIndex t, maxIndex e]\nmaxIndex (ExprI i (DoBlockE e)) = max i (maxIndex e)\nmaxIndex (ExprI i (EvalE e)) = max i (maxIndex e)\nmaxIndex (ExprI i (IntrinsicE _ es)) = maximum (i : map maxIndex es)\nmaxIndex (ExprI i (ParenE e)) = max i (maxIndex e)\nmaxIndex (ExprI i _) = i\n\n-- | Collect all indices from an 'ExprI' tree.\ngetIndices :: ExprI -> [Int]\ngetIndices (ExprI i (ModE _ es)) = i : concatMap getIndices es\ngetIndices (ExprI i (AnnE e _)) = i : getIndices e\ngetIndices (ExprI i (AssE _ e es)) = i : concatMap getIndices (e : es)\ngetIndices (ExprI i (IstE _ _ es)) = i : concatMap getIndices es\ngetIndices (ExprI i (LamE _ e)) = i : getIndices e\ngetIndices (ExprI i (AppE e es)) = i : concatMap getIndices (e : es)\ngetIndices (ExprI i (LstE es)) = i : concatMap getIndices es\ngetIndices (ExprI i (TupE es)) = i : concatMap getIndices es\ngetIndices (ExprI i (NamE rs)) = i : concatMap (getIndices . snd) rs\ngetIndices (ExprI i (ExpE ExportAll)) = [i]\ngetIndices (ExprI i (ExpE (ExportMany ss gs))) =\n  i\n    : [j | (j, _) <- Set.toList ss]\n    ++ concatMap (\\g -> [j | (j, _) <- Set.toList (exportGroupMembers g)]) gs\ngetIndices (ExprI i (BopE e1 j _ e2)) = [i, j] <> getIndices e1 <> getIndices e2\ngetIndices (ExprI i (LetE bindings body)) = i : concatMap (getIndices . snd) bindings <> getIndices body\ngetIndices (ExprI i (IfE c t e)) = i : concatMap getIndices [c, t, e]\ngetIndices (ExprI i (DoBlockE e)) = i : getIndices e\ngetIndices (ExprI i (EvalE e)) = i : getIndices e\ngetIndices (ExprI i (IntrinsicE _ es)) = i : concatMap getIndices es\ngetIndices (ExprI i (ParenE e)) = i : getIndices e\ngetIndices (ExprI i _) = [i]\n\n-- | Apply a type transformation to all types in signatures and type definitions.\nmapTypeInExprI :: (Monad m) => (TypeU -> TypeU) -> ExprI -> m ExprI\nmapTypeInExprI f = go\n  where\n    go (ExprI i (SigE (Signature v l (EType t cs doc labels)))) =\n      return $ ExprI i (SigE (Signature v l (EType (f t) cs doc labels)))\n    go (ExprI i (AnnE e t)) = do\n      e' <- go e\n      return $ ExprI i (AnnE e' (f t))\n    go (ExprI i (ModE m es)) = ExprI i . ModE m <$> mapM go es\n    go (ExprI i (AssE v e es)) = ExprI i <$> (AssE v <$> go e <*> mapM go es)\n    go (ExprI i (IstE cls ts es)) = ExprI i <$> (IstE cls (map f ts) <$> mapM go es)\n    go (ExprI i (LamE vs e)) = ExprI i . LamE vs <$> go e\n    go (ExprI i (AppE e es)) = ExprI i <$> (AppE <$> go e <*> mapM go es)\n    go (ExprI i (LstE es)) = ExprI i . LstE <$> mapM go es\n    go (ExprI i (TupE es)) = ExprI i . TupE <$> mapM go es\n    go (ExprI i (NamE rs)) = ExprI i . NamE <$> mapM (\\(k, e) -> (,) k <$> go e) rs\n    go (ExprI i (LetE bindings body)) = do\n      bindings' <- mapM (\\(v, e) -> (,) v <$> go e) bindings\n      body' <- go body\n      return $ ExprI i (LetE bindings' body')\n    go (ExprI i (IfE c t e)) = ExprI i <$> (IfE <$> go c <*> go t <*> go e)\n    go (ExprI i (DoBlockE e)) = ExprI i . DoBlockE <$> go e\n    go (ExprI i (EvalE e)) = ExprI i . EvalE <$> go e\n    go (ExprI i (IntrinsicE intr es)) = ExprI i . IntrinsicE intr <$> mapM go es\n    go e = return e\n"
  },
  {
    "path": "library/Morloc/Frontend/CST.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Frontend.CST\nDescription : Concrete syntax tree types for the Happy parser\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nTypes for the concrete syntax tree produced by the Happy-generated parser\n(@Parser.y@). These preserve source spans and syntactic structure before\ndesugaring into the internal 'Expr' AST.\n-}\nmodule Morloc.Frontend.CST\n  ( Span (..)\n  , Loc (..)\n  , HasPos (..)\n  , CstExpr (..)\n  , CstExport (..)\n  , CstSigType (..)\n  , CstTypeDef (..)\n  , CstClassHead (..)\n  , CstSigItem (..)\n  , CstDoStmt (..)\n  , CstAccessorBody (..)\n  , CstAccessorTail (..)\n  , at\n  , (<->)\n  , valOf\n  ) where\n\nimport Data.Scientific (Scientific)\nimport Data.Text (Text)\nimport Morloc.Frontend.Token (Located (..), Pos (..))\nimport Morloc.Namespace.Expr (Associativity (..), Import (..))\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.Type (Constraint (..), NamType (..), TypeU (..))\n\n-- | Source span: start position to end position\ndata Span = Span !Pos !Pos\n  deriving (Show, Eq)\n\n-- | Span-annotated wrapper\ndata Loc a = Loc !Span a\n  deriving (Show, Eq)\n\nvalOf :: Loc a -> a\nvalOf (Loc _ x) = x\n\n-- | Wrap a value at a single token's position\nat :: Located -> a -> Loc a\nat tok x = Loc (Span p p) x where p = locPos tok\n\n{- | Build a span between any two things that have positions.\nWorks with Located tokens and Loc-wrapped values.\n-}\nclass HasPos a where\n  startPos :: a -> Pos\n  endPos :: a -> Pos\n\ninstance HasPos Located where\n  startPos = locPos\n  endPos = locPos\n\ninstance HasPos (Loc a) where\n  startPos (Loc (Span s _) _) = s\n  endPos (Loc (Span _ e) _) = e\n\ninstance HasPos Span where\n  startPos (Span s _) = s\n  endPos (Span _ e) = e\n\n-- | Build a span from a start-positioned thing to an end-positioned thing\n(<->) :: (HasPos a, HasPos b) => a -> b -> Span\na <-> b = Span (startPos a) (endPos b)\n\ninfixl 5 <->\n\n--------------------------------------------------------------------\n-- CST node types\n--------------------------------------------------------------------\n\ndata CstExpr\n  = -- Top-level declarations\n    CModE (Maybe Text) CstExport [Loc CstExpr]\n  | CImpE Import\n  | CSigE EVar CstSigType\n  | CAssE EVar [Text] (Loc CstExpr) [Loc CstExpr]\n  | CGuardedAssE EVar [Text] [(Loc CstExpr, Loc CstExpr)] (Loc CstExpr) [Loc CstExpr]\n  | CTypE CstTypeDef\n  | CClsE CstClassHead [CstSigItem]\n  | CIstE ClassName [TypeU] [Loc CstExpr]\n  | CFixE Associativity Int [EVar]\n  | CSrcOldE Located (Maybe Text) [(Text, Maybe Text)]\n  | CSrcNewE Located (Maybe Text) [(Bool, Text, Located)]\n  | -- Expressions\n    CAppE (Loc CstExpr) [Loc CstExpr]\n  | CLamE [EVar] (Loc CstExpr)\n  | CLetE [(EVar, Loc CstExpr)] (Loc CstExpr)\n  | CBopE (Loc CstExpr) Located (Loc CstExpr)\n  | CLabeledVarE Text EVar  -- label:var (e.g., large:mean)\n  | CVarE EVar\n  | CIntE Integer\n  | CRealE Scientific\n  | CStrE Text\n  | CLogE Bool\n  | CUniE\n  | CNullE\n  | CHolE\n  | CLstE [Loc CstExpr]\n  | CTupE [Loc CstExpr]\n  | CNamE [(Key, Loc CstExpr)]\n  | CAnnE (Loc CstExpr) TypeU\n  | CDoE [CstDoStmt]\n  | CAccessorE CstAccessorBody\n  | CInterpE Text [Loc CstExpr] [Text] Text\n  | CGuardExprE [(Loc CstExpr, Loc CstExpr)] (Loc CstExpr)\n  | CForceE (Loc CstExpr)  -- ^ !expr force operator (only valid inside do-blocks)\n  | CIntrinsicE Text  -- ^ @name intrinsic reference (text is the name without @)\n  | CParenE !(Loc CstExpr)  -- ^ parenthesized expression (preserves grouping for BopE chains)\n  | CInlineE (Loc CstExpr) -- ^ %inline wrapper for source declarations\n  deriving (Show, Eq)\n\ndata CstExport\n  = CstExportAll\n  | CstExportMany [Located]\n  deriving (Show, Eq)\n\ndata CstSigType = CstSigType\n  { cstSigConstraintArgs :: Maybe [(Pos, TypeU)]\n  , cstSigArgs :: [(Pos, TypeU)]\n  }\n  deriving (Show, Eq)\n\ndata CstTypeDef\n  = CstTypeAlias (Maybe Located) (TVar, [Either (TVar, Kind) TypeU]) (TypeU, Bool)\n  | CstTypeAliasForward (TVar, [Either (TVar, Kind) TypeU])\n  | CstNamTypeWhere NamType (TVar, [Either (TVar, Kind) TypeU]) [(Located, Key, TypeU)]\n  | CstNamTypeLegacy (Maybe Located) NamType (TVar, [Either (TVar, Kind) TypeU]) (Text, Bool) [(Key, TypeU)]\n  deriving (Show, Eq)\n\ndata CstClassHead\n  = CCHSimple TypeU\n  | CCHConstrained TypeU TypeU\n  | CCHMultiConstrained [Constraint] TypeU\n  deriving (Show, Eq)\n\ndata CstSigItem = CstSigItem EVar CstSigType\n  deriving (Show, Eq)\n\ndata CstDoStmt\n  = CstDoBind EVar (Loc CstExpr)\n  | CstDoBare (Loc CstExpr)\n  | CstDoLet EVar (Loc CstExpr)\n  deriving (Show, Eq)\n\ndata CstAccessorBody\n  = CABKey Text CstAccessorTail\n  | CABIdx Int CstAccessorTail\n  | CABGroup [CstAccessorBody]\n  deriving (Show, Eq)\n\ndata CstAccessorTail\n  = CATEnd\n  | CATSet (Loc CstExpr)\n  | CATChain CstAccessorBody\n  deriving (Show, Eq)\n"
  },
  {
    "path": "library/Morloc/Frontend/Desugar.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Frontend.Desugar\nDescription : Transform the concrete syntax tree (CST) into the internal ExprI AST\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nDesugars CST nodes produced by the Happy parser into the indexed 'ExprI'\nAST used by later compiler passes. Handles: binary operator insertion,\nhole-to-lambda expansion, do-notation lowering, string interpolation,\naccessor patterns, type quantification, source resolution, and implicit\nmain wrapping.\n-}\nmodule Morloc.Frontend.Desugar\n  ( desugarProgram\n  , desugarExpr\n  , DState (..)\n  , D\n  , ParseError (..)\n  , showParseError\n  ) where\n\nimport qualified Control.Monad.State.Strict as State\nimport qualified Data.Map.Strict as Map\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Morloc.BaseTypes as BT\nimport Morloc.Frontend.CST\nimport Morloc.Frontend.Token hiding (startPos)\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.Type\nimport System.FilePath (combine, dropExtension, makeRelative, splitDirectories, takeDirectory)\n\n--------------------------------------------------------------------\n-- Desugar state and monad\n--------------------------------------------------------------------\n\ndata ParseError = ParseError\n  { pePos :: !Pos\n  , peMsg :: !String\n  , peExpected :: ![String]\n  , peSourceLines :: ![Text]\n  }\n  deriving (Show)\n\nshowParseError :: String -> ParseError -> String\nshowParseError filename (ParseError pos msg expected srcLines) =\n  let ln = posLine pos\n      col = posCol pos\n      header = filename ++ \":\" ++ show ln ++ \":\" ++ show col ++ \": \" ++ msg\n      context = formatSourceContext srcLines ln col\n      expectMsg = case cleanExpected expected of\n        [] -> \"\"\n        [x] -> \"\\n  expected \" ++ x\n        xs -> \"\\n  expected one of: \" ++ intercalate \", \" xs\n   in header ++ context ++ expectMsg\n\nformatSourceContext :: [Text] -> Int -> Int -> String\nformatSourceContext srcLines ln col\n  | ln < 1 || ln > length srcLines = \"\"\n  | otherwise =\n      let srcLine = srcLines !! (ln - 1)\n          lineNum = show ln\n          pad = replicate (length lineNum) ' '\n          pointer = replicate (col - 1) ' ' ++ \"^\"\n       in \"\\n  \"\n            ++ pad\n            ++ \" |\\n  \"\n            ++ lineNum\n            ++ \" | \"\n            ++ T.unpack srcLine\n            ++ \"\\n  \"\n            ++ pad\n            ++ \" | \"\n            ++ pointer\n\ncleanExpected :: [String] -> [String]\ncleanExpected = filter (not . isInternal) . nub . map friendlyName\n  where\n    isInternal s = s `elem` [\"VLBRACE\", \"VRBRACE\", \"VSEMI\", \"EOF\"]\n    friendlyName \"LOWER\" = \"identifier\"\n    friendlyName \"UPPER\" = \"type name\"\n    friendlyName \"OPERATOR\" = \"operator\"\n    friendlyName \"INTEGER\" = \"integer\"\n    friendlyName \"FLOAT\" = \"number\"\n    friendlyName \"STRING\" = \"string\"\n    friendlyName \"STRSTART\" = \"string\"\n    friendlyName \"STRMID\" = \"string\"\n    friendlyName \"STREND\" = \"string\"\n    friendlyName \"INTERPOPEN\" = \"'#{'\"\n    friendlyName \"INTERPCLOSE\" = \"'}'\"\n    friendlyName \"GDOT\" = \"'.'\"\n    friendlyName s = s\n\ndata DState = DState\n  { dsExpIndex :: !Int\n  , dsSourceMap :: !(Map.Map Int SrcLoc)\n  , dsDocMap :: !(Map.Map Pos [Text])\n  , dsModulePath :: !(Maybe Path)\n  , dsModuleConfig :: !ModuleConfig\n  , dsSourceLines :: ![Text]\n  , dsLangMap :: !(Map.Map Text Lang) -- alias -> Lang for all known languages\n  , dsProjectRoot :: !(Maybe Path) -- project root (directory of entry-point file)\n  , dsTermDocs :: !(Map.Map EVar [Text]) -- declaration-level docstrings\n  , dsWarnings :: ![Text] -- accumulated docstring warnings, drained by the caller\n  , dsModuleDoc :: ![Text] -- module-level description lines\n  , dsModuleEpilogues :: ![[Text]] -- epilogue blocks for top-level help\n  }\n  deriving (Show)\n\ntype D a = State.StateT DState (Either ParseError) a\n\ndfail :: Pos -> String -> D a\ndfail pos msg = do\n  srcLines <- State.gets dsSourceLines\n  State.lift (Left (ParseError pos msg [] srcLines))\n\ndwarn :: [Text] -> D ()\ndwarn [] = return ()\ndwarn ws = State.modify (\\s -> s { dsWarnings = dsWarnings s <> ws })\n\n--------------------------------------------------------------------\n-- ID generation with proper spans\n--------------------------------------------------------------------\n\nfreshIdSpan :: Span -> D Int\nfreshIdSpan (Span start end) = do\n  s <- State.get\n  let i = dsExpIndex s\n      loc = SrcLoc (Just (posFile start)) (posLine start) (posCol start) (posLine end) (posCol end)\n  State.put\n    s\n      { dsExpIndex = i + 1\n      , dsSourceMap = Map.insert i loc (dsSourceMap s)\n      }\n  return i\n\nfreshIdPos :: Pos -> D Int\nfreshIdPos p = freshIdSpan (Span p p)\n\nfreshExprSpan :: Span -> Expr -> D ExprI\nfreshExprSpan sp e = do\n  i <- freshIdSpan sp\n  return (ExprI i e)\n\nnoSrcLoc :: SrcLoc\nnoSrcLoc = SrcLoc Nothing 0 0 0 0\n\nfreshExprFrom :: ExprI -> Expr -> D ExprI\nfreshExprFrom (ExprI refId _) e = do\n  s <- State.get\n  let i = dsExpIndex s\n      loc = Map.findWithDefault noSrcLoc refId (dsSourceMap s)\n  State.put\n    s\n      { dsExpIndex = i + 1\n      , dsSourceMap = Map.insert i loc (dsSourceMap s)\n      }\n  return (ExprI i e)\n\n--------------------------------------------------------------------\n-- Docstring helpers\n--------------------------------------------------------------------\n\nlookupDocsAt :: Pos -> D [Text]\nlookupDocsAt pos = do\n  docMap <- State.gets dsDocMap\n  return (Map.findWithDefault [] pos docMap)\n\n-- | Capture declaration-level docstring lines, keyed by term name.\n-- Extracts only free description lines (the same way processArgDocLines\n-- does via docLines); key-value entries like metavar:, arg:, etc. are\n-- intentionally ignored at the declaration level since those describe\n-- type-signature interface details.\ncaptureDeclDocs :: Pos -> EVar -> D ()\ncaptureDeclDocs pos name = do\n  docs <- lookupDocsAt pos\n  vars <- processArgDocLinesD docs\n  let descLines = docLines vars\n  case descLines of\n    [] -> return ()\n    _ -> State.modify (\\s -> s { dsTermDocs = Map.insert name descLines (dsTermDocs s) })\n\n-- | Result of classifying a single docstring line.\ndata ParsedDocLine\n  = DocDirective !Text !Text  -- recognized `<key>: <value>` shape (key not yet validated against an allowlist)\n  | DocDesc !Text             -- free-form description line (possibly empty)\n  deriving (Show)\n\n-- | Classify one `--'` line into a directive or a description.\n--\n-- Rules:\n--  * A single leading space (the conventional space after `--'`) is trimmed\n--    from description lines so authors can indent list items with extra\n--    spaces. Trailing whitespace is stripped.\n--  * A leading backslash (after stripping surrounding whitespace) is an\n--    escape: it is consumed and the rest of the line becomes a description\n--    line with no directive parsing. Literal `\\foo:` therefore needs\n--    `\\\\foo:` (standard backslash-doubling convention).\n--  * Otherwise, if the line starts with `<word>:` (no spaces in `<word>`)\n--    it is returned as a DocDirective. Validation against the allowlist\n--    of known directive names is done by the caller.\nparseDocKV :: Text -> ParsedDocLine\nparseDocKV txt =\n  let stripped = T.strip txt\n      descLine = T.stripEnd $ case T.uncons txt of\n        Just (' ', rest) -> rest\n        _ -> txt\n   in case T.uncons stripped of\n        Just ('\\\\', rest) -> DocDesc (T.stripEnd rest)\n        _ -> case T.breakOn \":\" stripped of\n          (key, colonRest)\n            | not (T.null colonRest)\n            , not (T.null key)\n            , not (T.any (== ' ') key) ->\n                DocDirective key (T.strip (T.drop 1 colonRest))\n          _ -> DocDesc descLine\n\nparseCliOpt :: Text -> Maybe CliOpt\nparseCliOpt txt = case T.unpack (T.strip txt) of\n  '-' : '-' : rest@(_ : _) -> Just (CliOptLong (T.pack rest))\n  '-' : c : '/' : '-' : '-' : rest@(_ : _) -> Just (CliOptBoth c (T.pack rest))\n  '-' : c : [] -> Just (CliOptShort c)\n  _ -> Nothing\n\n-- Known directive keys recognized inside argument / signature docstrings.\nargDocDirectiveKeys :: [Text]\nargDocDirectiveKeys =\n  [\"name\", \"literal\", \"unroll\", \"default\", \"metavar\", \"arg\", \"true\", \"false\", \"return\"]\n\n-- Known directive keys recognized on `source` declarations.\nsourceDocDirectiveKeys :: [Text]\nsourceDocDirectiveKeys = [\"name\", \"rsize\"]\n\nunknownDirectiveWarning :: [Text] -> Text -> Text\nunknownDirectiveWarning knownKeys k =\n  \"warning: unknown docstring directive '\" <> k <> \"'\"\n  <> \" (recognized: \" <> T.intercalate \", \" knownKeys <> \"); \"\n  <> \"if this was intended as prose, prefix the line with '\\\\' to suppress this warning (e.g. '\\\\\"\n  <> k <> \":')\"\n\nprocessArgDocLines :: [Text] -> ([Text], ArgDocVars)\nprocessArgDocLines = foldl step ([], defaultValue)\n  where\n    step (ws, d) line = case parseDocKV line of\n      DocDesc v\n        | T.null v -> (ws, d)\n        | otherwise -> (ws, d {docLines = docLines d <> [v]})\n      DocDirective k v -> case k of\n        \"name\" -> (ws, d {docName = Just v})\n        \"literal\" -> (ws, d {docLiteral = Just (parseDocBool v)})\n        \"unroll\" -> (ws, d {docUnroll = Just (parseDocBool v)})\n        \"default\" -> (ws, d {docDefault = Just v})\n        \"metavar\" -> (ws, d {docMetavar = Just v})\n        \"arg\" -> (ws, d {docArg = parseCliOpt v})\n        \"true\" -> (ws, d {docTrue = parseCliOpt v})\n        \"false\" -> (ws, d {docFalse = parseCliOpt v})\n        \"return\" -> (ws, d {docReturn = Just v})\n        _ ->\n          let w = unknownDirectiveWarning argDocDirectiveKeys k\n              desc = k <> \": \" <> v\n           in (ws <> [w], d {docLines = docLines d <> [desc]})\n\nparseDocBool :: Text -> Bool\nparseDocBool v = v == \"true\" || v == \"True\"\n\nprocessModuleDocLines :: [Text] -> ([Text], [Text], [[Text]])\nprocessModuleDocLines = finalize . foldl step ([], Nothing, [])\n  where\n    step (desc, curEpi, epis) line = case parseDocKV line of\n      DocDesc v -> case curEpi of\n        Nothing -> (desc <> [v], Nothing, epis)\n        Just epi -> (desc, Just (epi <> [v]), epis)\n      DocDirective k _v -> case k of\n        \"epilogue\" ->\n          let epis' = case curEpi of\n                Nothing -> epis\n                Just epi -> epis <> [epi]\n          in (desc, Just [], epis')\n        _ ->\n          let line' = k <> \": \" <> _v\n          in case curEpi of\n            Nothing -> (desc <> [line'], Nothing, epis)\n            Just epi -> (desc, Just (epi <> [line']), epis)\n\n    finalize (desc, curEpi, epis) =\n      let epis' = case curEpi of\n            Nothing -> epis\n            Just epi -> epis <> [epi]\n      in ([], desc, epis')\n\napplySourceDocs :: [Text] -> Source -> ([Text], Source)\napplySourceDocs lns src = foldl step ([], src) lns\n  where\n    step (ws, s) line = case parseDocKV line of\n      DocDesc v\n        | T.null v -> (ws, s)\n        | otherwise -> (ws, s {srcNote = srcNote s <> [v]})\n      DocDirective k v -> case k of\n        \"name\" -> (ws, s {srcName = SrcName v})\n        \"rsize\" -> (ws, s {srcRsize = mapMaybe readMaybeInt (T.words v)})\n        _ ->\n          let w = unknownDirectiveWarning sourceDocDirectiveKeys k\n              desc = k <> \": \" <> v\n           in (ws <> [w], s {srcNote = srcNote s <> [desc]})\n    readMaybeInt t = case reads (T.unpack t) of\n      [(n, \"\")] -> Just n\n      _ -> Nothing\n\n-- | D-monad wrapper: parse argument docstring lines and accumulate any\n-- warnings into 'dsWarnings' for the caller to drain.\nprocessArgDocLinesD :: [Text] -> D ArgDocVars\nprocessArgDocLinesD ls = do\n  let (ws, v) = processArgDocLines ls\n  dwarn ws\n  return v\n\n-- | D-monad wrapper: apply `source` docstring lines and accumulate warnings.\napplySourceDocsD :: [Text] -> Source -> D Source\napplySourceDocsD ls src = do\n  let (ws, s) = applySourceDocs ls src\n  dwarn ws\n  return s\n\n--------------------------------------------------------------------\n-- Type helpers\n--------------------------------------------------------------------\n\nforallWrap :: [TVar] -> TypeU -> TypeU\nforallWrap [] t = t\nforallWrap (v : vs) t = ForallU v (forallWrap vs t)\n\n-- | Extract LabeledU wrappers from function argument types.\n-- Returns a map from nat var name to argument position index,\n-- and the type with all LabeledU stripped.\nextractLabels :: TypeU -> (Map.Map TVar Int, TypeU)\nextractLabels = go\n  where\n    go (ForallU v t) = let (labels, t') = go t in (labels, ForallU v t')\n    go (FunU args ret) =\n      let (labels, args') = extractFromArgs 0 args\n          ret' = stripLabels ret\n       in (labels, FunU args' ret')\n    go t = (Map.empty, stripLabels t)\n\n    extractFromArgs :: Int -> [TypeU] -> (Map.Map TVar Int, [TypeU])\n    extractFromArgs _ [] = (Map.empty, [])\n    extractFromArgs idx (LabeledU v inner : rest) =\n      let (labels, rest') = extractFromArgs (idx + 1) rest\n       in (Map.insert v idx labels, stripLabels inner : rest')\n    extractFromArgs idx (t : rest) =\n      let (labels, rest') = extractFromArgs (idx + 1) rest\n       in (labels, stripLabels t : rest')\n\n    stripLabels :: TypeU -> TypeU\n    stripLabels (LabeledU _ t) = stripLabels t\n    stripLabels (ForallU v t) = ForallU v (stripLabels t)\n    stripLabels (FunU ts t) = FunU (map stripLabels ts) (stripLabels t)\n    stripLabels (AppU t ts) = AppU (stripLabels t) (map stripLabels ts)\n    stripLabels (NamU o v ps rs) = NamU o v (map stripLabels ps) [(k, stripLabels t) | (k, t) <- rs]\n    stripLabels (EffectU effs t) = EffectU effs (stripLabels t)\n    stripLabels (OptionalU t) = OptionalU (stripLabels t)\n    stripLabels (NatAddU a b) = NatAddU (stripLabels a) (stripLabels b)\n    stripLabels (NatMulU a b) = NatMulU (stripLabels a) (stripLabels b)\n    stripLabels (NatSubU a b) = NatSubU (stripLabels a) (stripLabels b)\n    stripLabels (NatDivU a b) = NatDivU (stripLabels a) (stripLabels b)\n    stripLabels (ExistU v (ps, pc) (rs, rc)) = ExistU v (map stripLabels ps, pc) (map (second stripLabels) rs, rc)\n    stripLabels t = t\n\nquantifyType :: TypeU -> TypeU\nquantifyType t =\n  let natVars = collectNatVars t\n      t' = promoteNatVars natVars t\n      typeVars = nub (collectGenVars t')\n   in forallWrap typeVars t'\n  where\n    -- Collect type variables (excluding NatVarU which are already promoted)\n    collectGenVars :: TypeU -> [TVar]\n    collectGenVars (VarU v@(TV name))\n      | not (T.null name), isLower (T.head name) = [v]\n      | otherwise = []\n    collectGenVars (NatVarU _) = []\n    collectGenVars (ForallU v inner) = filter (/= v) (collectGenVars inner)\n    collectGenVars (AppU f args) = collectGenVars f ++ concatMap collectGenVars args\n    collectGenVars (FunU args ret) = concatMap collectGenVars args ++ collectGenVars ret\n    collectGenVars (NamU _ _ ts entries) = concatMap collectGenVars ts ++ concatMap (collectGenVars . snd) entries\n    collectGenVars (EffectU _ inner) = collectGenVars inner\n    collectGenVars (OptionalU inner) = collectGenVars inner\n    collectGenVars (NatLitU _) = []\n    collectGenVars (NatAddU a b) = collectGenVars a ++ collectGenVars b\n    collectGenVars (NatMulU a b) = collectGenVars a ++ collectGenVars b\n    collectGenVars (NatSubU a b) = collectGenVars a ++ collectGenVars b\n    collectGenVars (NatDivU a b) = collectGenVars a ++ collectGenVars b\n    collectGenVars (LabeledU _ inner) = collectGenVars inner\n    collectGenVars _ = []\n\n    -- Collect variables that appear in nat-kinded positions:\n    -- Only inside NatAddU, NatMulU, NatSubU, NatDivU.\n    -- Typedef-based detection (e.g., Tensor2 params) is handled by refineKinds.\n    collectNatVars :: TypeU -> Set.Set TVar\n    collectNatVars = go False\n      where\n        go inNat (VarU v@(TV name))\n          | inNat, not (T.null name), isLower (T.head name) = Set.singleton v\n          | otherwise = Set.empty\n        go _ (NatVarU v) = Set.singleton v\n        go _ (ForallU _ inner) = go False inner\n        go _ (AppU f args) = go False f <> Set.unions (map (go False) args)\n        go _ (FunU args ret) = Set.unions (map (go False) args) <> go False ret\n        go _ (NamU _ _ ts entries) = Set.unions (map (go False) ts) <> Set.unions (map (go False . snd) entries)\n        go _ (EffectU _ inner) = go False inner\n        go _ (OptionalU inner) = go False inner\n        go _ (NatLitU _) = Set.empty\n        go _ (NatAddU a b) = go True a <> go True b\n        go _ (NatMulU a b) = go True a <> go True b\n        go _ (NatSubU a b) = go True a <> go True b\n        go _ (NatDivU a b) = go True a <> go True b\n        go inNat (LabeledU _ inner) = go inNat inner\n        go _ _ = Set.empty\n\n-- | Promote VarU to NatVarU for variables identified as nat-kinded\npromoteNatVars :: Set.Set TVar -> TypeU -> TypeU\npromoteNatVars natVars = go\n  where\n    go (VarU v)\n      | Set.member v natVars = NatVarU v\n      | otherwise = VarU v\n    go t@(NatVarU _) = t\n    go (ExistU v (ps, pc) (rs, rc)) = ExistU v (map go ps, pc) (map (second go) rs, rc)\n    go (ForallU v t) = ForallU v (go t)\n    go (FunU ts t) = FunU (map go ts) (go t)\n    go (AppU t ts) = AppU (go t) (map go ts)\n    go (NamU o n ps rs) = NamU o n (map go ps) [(k, go t) | (k, t) <- rs]\n    go (EffectU effs t) = EffectU effs (go t)\n    go (OptionalU t) = OptionalU (go t)\n    go t@(NatLitU _) = t\n    go (NatAddU a b) = NatAddU (go a) (go b)\n    go (NatMulU a b) = NatMulU (go a) (go b)\n    go (NatSubU a b) = NatSubU (go a) (go b)\n    go (NatDivU a b) = NatDivU (go a) (go b)\n    go (LabeledU n t) = LabeledU n (go t)\n\nparseLang :: Located -> D Lang\nparseLang tok = do\n  langs <- State.gets dsLangMap\n  case Map.lookup (T.toLower name) langs of\n    Just lang -> return lang\n    Nothing -> dfail (locPos tok) (\"unknown language: \" ++ T.unpack name)\n  where\n    name = getName' tok\n\ngetName' :: Located -> Text\ngetName' (Located _ (TokLowerName n) _) = n\ngetName' (Located _ (TokUpperName n) _) = n\ngetName' (Located _ _ t) = t\n\n--------------------------------------------------------------------\n-- Constraint extraction\n--------------------------------------------------------------------\n\nextractConstraints :: TypeU -> D [Constraint]\nextractConstraints (AppU (VarU (TV name)) args) =\n  return [Constraint (ClassName name) args]\nextractConstraints (VarU (TV name)) =\n  return [Constraint (ClassName name) []]\nextractConstraints (NamU NamRecord _ _ _) =\n  dfail (Pos 0 0 \"\") \"invalid constraint syntax\"\nextractConstraints t =\n  case flattenTupleConstraint t of\n    Just cs -> return cs\n    Nothing -> dfail (Pos 0 0 \"\") (\"invalid constraint: \" ++ show t)\n\nflattenTupleConstraint :: TypeU -> Maybe [Constraint]\nflattenTupleConstraint (AppU (VarU (TV name)) args)\n  | T.isPrefixOf \"Tuple\" name = mapM typeToConstraint args\n  | otherwise = Just [Constraint (ClassName name) args]\nflattenTupleConstraint (VarU (TV name)) =\n  Just [Constraint (ClassName name) []]\nflattenTupleConstraint _ = Nothing\n\ntypeToConstraint :: TypeU -> Maybe Constraint\ntypeToConstraint (AppU (VarU (TV name)) args) =\n  Just (Constraint (ClassName name) args)\ntypeToConstraint (VarU (TV name)) =\n  Just (Constraint (ClassName name) [])\ntypeToConstraint _ = Nothing\n\nextractClassDef :: TypeU -> D (ClassName, [TVar])\nextractClassDef (AppU (VarU (TV name)) args) = do\n  tvs <- mapM typeToTVar args\n  return (ClassName name, tvs)\nextractClassDef (VarU (TV name)) =\n  return (ClassName name, [])\nextractClassDef _ = dfail (Pos 0 0 \"\") \"invalid class head\"\n\ntypeToTVar :: TypeU -> D TVar\ntypeToTVar (VarU tv) = return tv\ntypeToTVar _ = dfail (Pos 0 0 \"\") \"expected type variable in class head\"\n\n--------------------------------------------------------------------\n-- Signature and type construction\n--------------------------------------------------------------------\n\nargsToType :: [(Pos, TypeU)] -> TypeU\nargsToType [] = BT.unitU\nargsToType [(_, t)] = t\nargsToType ts = FunU (map snd (init ts)) (snd (last ts))\n\ndesugarSigType :: Pos -> CstSigType -> D ([Constraint], [ArgDocVars], TypeU)\ndesugarSigType _pos (CstSigType (Just constraintArgs) args) = do\n  cs <- extractConstraints (argsToType constraintArgs)\n  argDocs <- mapM (\\(p, _) -> lookupDocsAt p) args\n  argDocVars <- mapM processArgDocLinesD argDocs\n  return (cs, argDocVars, argsToType args)\ndesugarSigType _pos (CstSigType Nothing args) = do\n  argDocs <- mapM (\\(p, _) -> lookupDocsAt p) args\n  argDocVars <- mapM processArgDocLinesD argDocs\n  return ([], argDocVars, argsToType args)\n\ndesugarTableEntries :: NamType -> [(Key, TypeU)] -> [(Key, TypeU)]\ndesugarTableEntries NamTable entries = [(k, wrapList t) | (k, t) <- entries]\n  where\n    wrapList (ForallU v t) = ForallU v (wrapList t)\n    wrapList t = BT.listU t\ndesugarTableEntries _ entries = entries\n\nresolveSourceFile :: Maybe Path -> Maybe Text -> Maybe Path\nresolveSourceFile modulePath srcFile =\n  case (modulePath, srcFile) of\n    (Just f, Just srcfile') -> Just $ combine (takeDirectory f) (T.unpack srcfile')\n    (Just _, Nothing) -> Nothing\n    (Nothing, s) -> fmap T.unpack s\n\n--------------------------------------------------------------------\n-- Intrinsic resolution\n--------------------------------------------------------------------\n\nresolveIntrinsic :: Pos -> Text -> D Intrinsic\nresolveIntrinsic pos name = case parseIntrinsic name of\n  Just intr -> return intr\n  Nothing -> dfail pos (\"unknown intrinsic: @\" ++ T.unpack name)\n\n--------------------------------------------------------------------\n-- Accessor resolution\n--------------------------------------------------------------------\n\ndata AccessorResult\n  = ARGetter Selector\n  | ARSetter Selector [ExprI]\n\nbuildAccessor :: Span -> CstAccessorBody -> D ExprI\nbuildAccessor sp body = do\n  desBody <- desugarAccessorBody body\n  result <- resolveBody desBody\n  case result of\n    ARGetter sel -> freshExprSpan sp (PatE (PatternStruct sel))\n    ARSetter sel vals -> do\n      patI <- freshExprSpan sp (PatE (PatternStruct sel))\n      lamI <- freshIdSpan sp\n      let v = EV (\".setter_\" <> T.pack (show lamI))\n      vArg <- freshExprSpan sp (VarE defaultValue v)\n      appI <- freshExprSpan sp (AppE patI (vArg : vals))\n      return (ExprI lamI (LamE [v] appI))\n\n-- Intermediate accessor types (with ExprI values after desugaring)\ndata IAccessorBody\n  = IABKey Text IAccessorTail\n  | IABIdx Int IAccessorTail\n  | IABGroup [IAccessorBody]\n\ndata IAccessorTail\n  = IATEnd\n  | IATSet ExprI\n  | IATChain IAccessorBody\n\ndesugarAccessorBody :: CstAccessorBody -> D IAccessorBody\ndesugarAccessorBody (CABKey name tail') = IABKey name <$> desugarAccessorTail tail'\ndesugarAccessorBody (CABIdx idx tail') = IABIdx idx <$> desugarAccessorTail tail'\ndesugarAccessorBody (CABGroup bodies) = IABGroup <$> mapM desugarAccessorBody bodies\n\ndesugarAccessorTail :: CstAccessorTail -> D IAccessorTail\ndesugarAccessorTail CATEnd = return IATEnd\ndesugarAccessorTail (CATSet e) = IATSet <$> desugarExpr e\ndesugarAccessorTail (CATChain body) = IATChain <$> desugarAccessorBody body\n\nresolveBody :: IAccessorBody -> D AccessorResult\nresolveBody (IABKey name tail') = do\n  inner <- resolveTail tail'\n  return (wrapKey name inner)\nresolveBody (IABIdx idx tail') = do\n  inner <- resolveTail tail'\n  return (wrapIdx idx inner)\nresolveBody (IABGroup entries) = resolveGroup entries\n\nresolveTail :: IAccessorTail -> D AccessorResult\nresolveTail IATEnd = return (ARGetter SelectorEnd)\nresolveTail (IATSet expr) = return (ARSetter SelectorEnd [expr])\nresolveTail (IATChain body) = resolveBody body\n\nwrapKey :: Text -> AccessorResult -> AccessorResult\nwrapKey name (ARGetter sel) = ARGetter (SelectorKey (name, sel) [])\nwrapKey name (ARSetter sel vals) = ARSetter (SelectorKey (name, sel) []) vals\n\nwrapIdx :: Int -> AccessorResult -> AccessorResult\nwrapIdx idx (ARGetter sel) = ARGetter (SelectorIdx (idx, sel) [])\nwrapIdx idx (ARSetter sel vals) = ARSetter (SelectorIdx (idx, sel) []) vals\n\nresolveGroup :: [IAccessorBody] -> D AccessorResult\nresolveGroup bodies = do\n  results <- mapM resolveBody bodies\n  let getters = [s | ARGetter s <- results]\n      setterPairs = [(s, vs) | ARSetter s vs <- results]\n  case (getters, setterPairs) of\n    (gs, []) -> return (ARGetter (mergeSelectors gs))\n    ([], ss) -> return (ARSetter (mergeSelectors (map fst ss)) (concatMap snd ss))\n    _ -> dfail (Pos 0 0 \"\") \"cannot mix getter and setter entries in .()\"\n\nmergeSelectors :: [Selector] -> Selector\nmergeSelectors [] = SelectorEnd\nmergeSelectors [s] = s\nmergeSelectors sels =\n  let idxEntries = concat [s : ss | SelectorIdx s ss <- sels]\n      keyEntries = concat [s : ss | SelectorKey s ss <- sels]\n   in case (idxEntries, keyEntries) of\n        (is, []) -> case is of [] -> SelectorEnd; (x : xs) -> SelectorIdx x xs\n        ([], (x : xs)) -> SelectorKey x xs\n        _ -> error \"Cannot mix key and index selectors in getter\"\n\n--------------------------------------------------------------------\n-- Do-notation desugaring\n--------------------------------------------------------------------\n\n-- Desugar a do-block to a let-chain. Non-final bare statements and <- binds\n-- are wrapped in EvalE so the typechecker sees them as forced effects (pure\n-- non-finals are therefore rejected). The final bare statement is returned\n-- unwrapped so synthE DoBlockS can flatten it (if effectful) or let tryCoerce\n-- lift it (if pure).\ndesugarDo :: Span -> [CstDoStmt] -> D ExprI\ndesugarDo sp [] = dfail (startPos sp) \"empty do block\"\ndesugarDo _sp [CstDoBare e] = desugarExpr e\ndesugarDo sp [CstDoBind _ _] = dfail (startPos sp) \"do block cannot end with a bind (<-)\"\ndesugarDo sp [CstDoLet _ _] = dfail (startPos sp) \"do block cannot end with a let binding\"\ndesugarDo sp (CstDoLet v e : rest) = do\n  e' <- desugarExpr e\n  restE <- desugarDo sp rest\n  freshExprSpan sp (LetE [(v, e')] restE)\ndesugarDo sp (CstDoBind v e : rest) = do\n  e' <- desugarExpr e\n  forceE <- freshExprSpan sp (EvalE e')\n  restE <- desugarDo sp rest\n  freshExprSpan sp (LetE [(v, forceE)] restE)\ndesugarDo sp (CstDoBare e : rest) = do\n  idx <- freshIdSpan sp\n  let discardVar = EV (\"_do_\" <> T.pack (show idx))\n  e' <- desugarExpr e\n  forceE <- freshExprSpan sp (EvalE e')\n  restE <- desugarDo sp rest\n  freshExprSpan sp (LetE [(discardVar, forceE)] restE)\n\n--------------------------------------------------------------------\n-- Interpolation desugaring\n--------------------------------------------------------------------\n\nmkInterpString :: Span -> Text -> [ExprI] -> [Text] -> Text -> D ExprI\nmkInterpString sp startText exprs mids endText = do\n  let suffixes = mids ++ [endText]\n  patI <- freshExprSpan sp (PatE (PatternText startText suffixes))\n  freshExprSpan sp (AppE patI exprs)\n\n--------------------------------------------------------------------\n-- Implicit main wrapping\n--------------------------------------------------------------------\n\nmkImplicitMain :: [ExprI] -> D [ExprI]\nmkImplicitMain es = do\n  modI <- freshIdPos (Pos 0 0 \"\")\n  return [ExprI modI (ModE (MV \"main\") es)]\n\n--------------------------------------------------------------------\n-- Expression desugaring: Loc CstExpr -> D ExprI\n--------------------------------------------------------------------\n\ndesugarExpr :: Loc CstExpr -> D ExprI\n-- Variables and literals\ndesugarExpr (Loc sp (CLabeledVarE label v)) = do\n  moduleConfig <- State.gets dsModuleConfig\n  case Map.lookup label (moduleConfigLabeledGroups moduleConfig) of\n    Just config -> freshExprSpan sp (VarE config v)\n    Nothing -> dfail (startPos sp)\n      (\"Undefined label '\" ++ T.unpack label\n       ++ \"': no matching entry in module config labeled-groups\")\ndesugarExpr (Loc sp (CVarE v)) = freshExprSpan sp (VarE defaultValue v)\ndesugarExpr (Loc sp (CIntE n)) = freshExprSpan sp (IntE n)\ndesugarExpr (Loc sp (CRealE n)) = freshExprSpan sp (RealE n)\ndesugarExpr (Loc sp (CStrE s)) = freshExprSpan sp (StrE s)\ndesugarExpr (Loc sp (CLogE b)) = freshExprSpan sp (LogE b)\ndesugarExpr (Loc sp CUniE) = freshExprSpan sp UniE\ndesugarExpr (Loc sp CNullE) = freshExprSpan sp NullE\ndesugarExpr (Loc sp CHolE) = freshExprSpan sp HolE\n-- Intrinsics: eta-expand when under-applied so they behave as first-class functions\ndesugarExpr (Loc sp (CIntrinsicE name)) = do\n  intr <- resolveIntrinsic (startPos sp) name\n  etaExpandIntrinsic sp intr []\ndesugarExpr (Loc sp (CAppE (Loc _ (CIntrinsicE name)) args)) = do\n  intr <- resolveIntrinsic (startPos sp) name\n  args' <- mapM desugarExpr args\n  etaExpandIntrinsic sp intr args'\n-- Compound expressions\ndesugarExpr (Loc _ (CAppE f args)) = do\n  f' <- desugarExpr f\n  args' <- mapM desugarExpr args\n  freshExprFrom f' (AppE f' args')\ndesugarExpr (Loc sp (CLamE vs body)) = do\n  body' <- desugarExpr body\n  freshExprSpan sp (LamE vs body')\ndesugarExpr (Loc sp (CLetE bindings body)) = do\n  bindings' <- mapM (\\(v, e) -> do e' <- desugarExpr e; return (v, e')) bindings\n  body' <- desugarExpr body\n  freshExprSpan sp (LetE bindings' body')\ndesugarExpr (Loc sp (CParenE inner@(Loc _ CBopE{}))) = do\n  inner' <- desugarExpr inner\n  freshExprSpan sp (ParenE inner')\ndesugarExpr (Loc _ (CParenE inner)) = desugarExpr inner\ndesugarExpr (Loc _ (CBopE lhs opTok rhs)) = do\n  lhs' <- desugarExpr lhs\n  rhs' <- desugarExpr rhs\n  opI <- freshIdSpan (Span (locPos opTok) (locPos opTok))\n  freshExprSpan (Span (locPos opTok) (locPos opTok)) (BopE lhs' opI (tokToEVar opTok) rhs')\ndesugarExpr (Loc sp (CLstE es)) = do\n  es' <- mapM desugarExpr es\n  freshExprSpan sp (LstE es')\ndesugarExpr (Loc sp (CTupE es)) = do\n  es' <- mapM desugarExpr es\n  freshExprSpan sp (TupE es')\ndesugarExpr (Loc sp (CNamE entries)) = do\n  entries' <- mapM (\\(k, e) -> do e' <- desugarExpr e; return (k, e')) entries\n  freshExprSpan sp (NamE entries')\ndesugarExpr (Loc sp (CAnnE e t)) = do\n  e' <- desugarExpr e\n  freshExprSpan sp (AnnE e' (quantifyType t))\ndesugarExpr (Loc sp (CDoE stmts)) = do\n  body <- desugarDo sp stmts\n  freshExprSpan sp (DoBlockE body)\ndesugarExpr (Loc sp (CAccessorE body)) = buildAccessor sp body\ndesugarExpr (Loc sp (CInterpE startText exprs mids endText)) = do\n  exprs' <- mapM desugarExpr exprs\n  mkInterpString sp startText exprs' mids endText\ndesugarExpr (Loc sp (CGuardExprE guards defaultExpr)) = desugarGuards sp guards defaultExpr\ndesugarExpr (Loc sp (CForceE e)) = do\n  e' <- desugarExpr e\n  freshExprSpan sp (EvalE e')\n\n-- Top-level declarations should not appear inside expressions\ndesugarExpr (Loc _ CModE{}) = error \"desugarExpr: unexpected CModE in expression position\"\ndesugarExpr (Loc _ (CImpE {})) = error \"desugarExpr: unexpected CImpE in expression position\"\ndesugarExpr (Loc _ (CSigE {})) = error \"desugarExpr: unexpected CSigE in expression position\"\ndesugarExpr (Loc _ (CAssE {})) = error \"desugarExpr: unexpected CAssE in expression position\"\ndesugarExpr (Loc _ (CTypE {})) = error \"desugarExpr: unexpected CTypE in expression position\"\ndesugarExpr (Loc _ (CClsE {})) = error \"desugarExpr: unexpected CClsE in expression position\"\ndesugarExpr (Loc _ (CIstE {})) = error \"desugarExpr: unexpected CIstE in expression position\"\ndesugarExpr (Loc _ (CFixE {})) = error \"desugarExpr: unexpected CFixE in expression position\"\ndesugarExpr (Loc _ (CSrcOldE {})) = error \"desugarExpr: unexpected CSrcOldE in expression position\"\ndesugarExpr (Loc _ (CSrcNewE {})) = error \"desugarExpr: unexpected CSrcNewE in expression position\"\ndesugarExpr (Loc _ (CGuardedAssE {})) = error \"desugarExpr: unexpected CGuardedAssE in expression position\"\ndesugarExpr (Loc _ (CInlineE {})) = error \"desugarExpr: unexpected CInlineE in expression position\"\n\n-- | Wrap an intrinsic in a lambda if it has fewer args than its arity.\n-- Fully applied intrinsics pass through as IntrinsicE nodes.\netaExpandIntrinsic :: Span -> Intrinsic -> [ExprI] -> D ExprI\netaExpandIntrinsic sp intr args = do\n  let arity = intrinsicArity intr\n      actual = length args\n  if actual >= arity\n    then freshExprSpan sp (IntrinsicE intr args)\n    else do\n      idx <- freshIdSpan sp\n      let remaining = arity - actual\n          vars = [EV (\"_intr_\" <> T.pack (show idx) <> \"_\" <> T.pack (show j)) | j <- [0..remaining-1]]\n      varExprs <- mapM (\\v -> freshExprSpan sp (VarE defaultValue v)) vars\n      intrExpr <- freshExprSpan sp (IntrinsicE intr (args ++ varExprs))\n      freshExprSpan sp (LamE vars intrExpr)\n\n\n--------------------------------------------------------------------\n-- Top-level declaration desugaring\n--------------------------------------------------------------------\n\n-- | Infer a dot-prefixed module name from a file path relative to the project root.\n-- e.g., projectRoot=/project, filePath=/project/lib/math/main.loc -> \".lib.math\"\ninferModuleName :: Path -> Path -> Text\ninferModuleName projectRoot filePath =\n  let relPath = makeRelative projectRoot filePath\n      parts = splitDirectories relPath\n      -- Strip .loc extension from the last component\n      cleaned = case parts of\n        [] -> [\"main\"]\n        _ -> init parts ++ [dropExtension (last parts)]\n      -- Strip trailing \"main\" for directory modules (but not if it's the only component)\n      stripped = case cleaned of\n        xs | length xs > 1 && last xs == \"main\" -> init xs\n        xs -> xs\n  in \".\" <> T.intercalate \".\" (map T.pack stripped)\n\ndesugarTopLevel :: Loc CstExpr -> D [ExprI]\ndesugarTopLevel (Loc sp (CModE maybeName export body)) = do\n  name <- case maybeName of\n    Just n -> return n\n    Nothing -> do\n      modPath <- State.gets dsModulePath\n      projRoot <- State.gets dsProjectRoot\n      case (modPath, projRoot) of\n        (Just mp, Just pr) -> return (inferModuleName pr mp)\n        _ -> dfail (startPos sp) \"nameless module requires a file path and project root\"\n  -- capture module-level docstrings (--' lines before module keyword)\n  docs <- lookupDocsAt (startPos sp)\n  let (_warns, desc, epis) = processModuleDocLines docs\n  State.modify $ \\s -> s\n    { dsModuleDoc = desc\n    , dsModuleEpilogues = epis\n    }\n  expExprI <- desugarExport sp export\n  bodyExprs <- concatMapM desugarTopLevel body\n  modI <- freshIdSpan sp\n  return [ExprI modI (ModE (MV name) (expExprI : bodyExprs))]\ndesugarTopLevel (Loc sp (CImpE imp)) = do\n  e <- freshExprSpan sp (ImpE imp)\n  return [e]\ndesugarTopLevel (Loc sp (CSigE name sigType)) = do\n  docs <- lookupDocsAt (startPos sp)\n  cmdDoc <- processArgDocLinesD docs\n  (cs, argDocs, t) <- desugarSigType (startPos sp) sigType\n  let t' = quantifyType t\n      doc = ArgDocSig cmdDoc (init argDocs) (last argDocs)\n      (labels, t'') = extractLabels t'\n      et = EType t'' (Set.fromList cs) doc labels\n  e <- freshExprSpan sp (SigE (Signature name Nothing et))\n  return [e]\ndesugarTopLevel (Loc sp (CAssE name params body whereDecls)) = do\n  captureDeclDocs (startPos sp) name\n  body' <- desugarExpr body\n  whereDecls' <- concatMapM desugarTopLevel whereDecls\n  e <- case params of\n    [] -> freshExprSpan sp (AssE name body' whereDecls')\n    vs -> do\n      lam <- freshExprSpan sp (LamE (map EV vs) body')\n      freshExprSpan sp (AssE name lam whereDecls')\n  return [e]\ndesugarTopLevel (Loc sp (CGuardedAssE name params guards defaultExpr whereDecls)) = do\n  captureDeclDocs (startPos sp) name\n  body' <- desugarGuards sp guards defaultExpr\n  whereDecls' <- concatMapM desugarTopLevel whereDecls\n  e <- case params of\n    [] -> freshExprSpan sp (AssE name body' whereDecls')\n    vs -> do\n      lam <- freshExprSpan sp (LamE (map EV vs) body')\n      freshExprSpan sp (AssE name lam whereDecls')\n  return [e]\ndesugarTopLevel (Loc sp (CTypE td)) = desugarTypeDef sp td\ndesugarTopLevel (Loc sp (CClsE classHead sigs)) = do\n  (cs, cn, vs) <- desugarClassHead classHead\n  sigs' <- mapM desugarSigItem sigs\n  e <- freshExprSpan sp (ClsE (Typeclass cs cn vs sigs'))\n  return [e]\ndesugarTopLevel (Loc sp (CIstE cn types body)) = do\n  bodyExprs <- concatMapM desugarTopLevel body\n  e <- freshExprSpan sp (IstE cn (map quantifyType types) bodyExprs)\n  return [e]\ndesugarTopLevel (Loc sp (CFixE assoc prec ops)) = do\n  e <- freshExprSpan sp (FixE (Fixity assoc prec ops))\n  return [e]\ndesugarTopLevel (Loc sp (CSrcOldE langTok srcFile items)) = do\n  lang <- parseLang langTok\n  modPath <- State.gets dsModulePath\n  let path = resolveSourceFile modPath srcFile\n  mapM (mkOldSource sp lang path) items\ndesugarTopLevel (Loc sp (CSrcNewE langTok srcFile nameTuples)) = do\n  lang <- parseLang langTok\n  modPath <- State.gets dsModulePath\n  let path = resolveSourceFile modPath srcFile\n  mapM (mkNewSource sp lang path) nameTuples\ndesugarTopLevel (Loc _ (CInlineE inner)) = do\n  exprs <- desugarTopLevel inner\n  return (map markSourceInline exprs)\n  where\n    markSourceInline (ExprI i (SrcE src)) = ExprI i (SrcE src { srcInline = True })\n    markSourceInline e = e -- %inline on non-source definitions is not yet implemented\n\n-- Expression-level CST nodes should not appear at top level\ndesugarTopLevel node = do\n  e <- desugarExpr node\n  return [e]\n\n--------------------------------------------------------------------\n-- Guard desugaring\n--------------------------------------------------------------------\n\n-- | Desugar guard clauses with an explicit default into nested IfE expressions.\n-- ? cond1 = body1 ? cond2 = body2 : defaultBody\n-- becomes: IfE cond1 body1 (IfE cond2 body2 defaultBody)\ndesugarGuards :: Span -> [(Loc CstExpr, Loc CstExpr)] -> Loc CstExpr -> D ExprI\ndesugarGuards _ [] defaultExpr = desugarExpr defaultExpr\ndesugarGuards sp ((cond, body) : rest) defaultExpr = do\n  cond' <- desugarExpr cond\n  body' <- desugarExpr body\n  elseE <- desugarGuards sp rest defaultExpr\n  freshExprSpan sp (IfE cond' body' elseE)\n\n--------------------------------------------------------------------\n-- Export desugaring\n--------------------------------------------------------------------\n\ndesugarExport :: Span -> CstExport -> D ExprI\ndesugarExport sp CstExportAll = freshExprSpan sp (ExpE ExportAll)\ndesugarExport sp (CstExportMany locs) = do\n  items <- mapM (\\tok -> do i <- freshIdPos (locPos tok); return (i, symVal' tok)) locs\n  freshExprSpan sp (ExpE (ExportMany (Set.fromList items) []))\n\nsymVal' :: Located -> Symbol\nsymVal' (Located _ (TokLowerName n) _) = TermSymbol (EV n)\nsymVal' (Located _ (TokUpperName n) _) = TypeSymbol (TV n)\nsymVal' (Located _ (TokOperator n) _) = TermSymbol (EV n)\nsymVal' (Located _ TokMinus _) = TermSymbol (EV \"-\")\nsymVal' (Located _ TokStar _) = TermSymbol (EV \"*\")\nsymVal' (Located _ TokDot _) = TermSymbol (EV \".\")\nsymVal' (Located _ TokLAngle _) = TermSymbol (EV \"<\")\nsymVal' (Located _ TokRAngle _) = TermSymbol (EV \">\")\nsymVal' _ = TermSymbol (EV \"?\")\n\ntokToEVar :: Located -> EVar\ntokToEVar (Located _ (TokOperator n) _) = EV n\ntokToEVar (Located _ TokMinus _) = EV \"-\"\ntokToEVar (Located _ TokStar _) = EV \"*\"\ntokToEVar (Located _ TokDot _) = EV \".\"\ntokToEVar (Located _ TokLAngle _) = EV \"<\"\ntokToEVar (Located _ TokRAngle _) = EV \">\"\ntokToEVar _ = EV \"?\"\n\n--------------------------------------------------------------------\n-- Type definition desugaring\n--------------------------------------------------------------------\n\ndesugarTypeDef :: Span -> CstTypeDef -> D [ExprI]\ndesugarTypeDef sp (CstTypeAlias maybeLangTok (v, vs) (t, isTerminal)) = do\n  lang <- case maybeLangTok of\n    Nothing -> return Nothing\n    Just tok -> do\n      l <- parseLang tok\n      return (Just (l, isTerminal))\n  docs <- lookupDocsAt (startPos sp)\n  docVars <- if null docs then return defaultValue else processArgDocLinesD docs\n  e <- freshExprSpan sp (TypE (ExprTypeE lang v vs t (ArgDocAlias docVars)))\n  return [e]\ndesugarTypeDef sp (CstTypeAliasForward (v, vs)) = do\n  let t = if null vs then VarU v else AppU (VarU v) (map (either (VarU . fst) id) vs)\n  e <- freshExprSpan sp (TypE (ExprTypeE Nothing v vs t (ArgDocAlias defaultValue)))\n  return [e]\ndesugarTypeDef sp (CstNamTypeWhere nt (v, vs) locEntries) = do\n  recDocs <- lookupDocsAt (startPos sp)\n  recDocVars <- processArgDocLinesD recDocs\n  fieldDocs <-\n    mapM\n      (\\(loc, _, _) -> do dl <- lookupDocsAt (locPos loc); processArgDocLinesD dl)\n      locEntries\n  let entries = [(k, ty) | (_, k, ty) <- locEntries]\n      entries' = desugarTableEntries nt entries\n      doc = ArgDocRec recDocVars (zip (map fst entries') fieldDocs)\n      t = NamU nt v (map (either (VarU . fst) id) vs) entries'\n  e <- freshExprSpan sp (TypE (ExprTypeE Nothing v vs t doc))\n  return [e]\ndesugarTypeDef sp (CstNamTypeLegacy maybeLangTok nt (v, vs) (conName, isTerminal) entries) = do\n  lang <- case maybeLangTok of\n    Nothing -> return Nothing\n    Just tok -> do\n      l <- parseLang tok\n      return (Just (l, isTerminal))\n  let con = if T.null conName then v else TV conName\n      entries' = desugarTableEntries nt entries\n      t = NamU nt con (map (either (VarU . fst) id) vs) entries'\n      doc = ArgDocRec defaultValue [(k, defaultValue) | (k, _) <- entries']\n  e <- freshExprSpan sp (TypE (ExprTypeE lang v vs t doc))\n  return [e]\n\n--------------------------------------------------------------------\n-- Class/instance desugaring\n--------------------------------------------------------------------\n\ndesugarClassHead :: CstClassHead -> D ([Constraint], ClassName, [TVar])\ndesugarClassHead (CCHSimple t) = do\n  (cn, vs) <- extractClassDef t\n  return ([], cn, vs)\ndesugarClassHead (CCHConstrained constraintType headType) = do\n  cs <- extractConstraints constraintType\n  (cn, vs) <- extractClassDef headType\n  return (cs, cn, vs)\ndesugarClassHead (CCHMultiConstrained cs headType) = do\n  (cn, vs) <- extractClassDef headType\n  return (cs, cn, vs)\n\ndesugarSigItem :: CstSigItem -> D Signature\ndesugarSigItem (CstSigItem name sigType) = do\n  (cs, argDocs, t) <- desugarSigType (Pos 0 0 \"\") sigType\n  let wrappedT = quantifyType t\n      (labels, wrappedT') = extractLabels wrappedT\n      doc = ArgDocSig defaultValue (init argDocs) (last argDocs)\n      et = EType wrappedT' (Set.fromList cs) doc labels\n  return (Signature name Nothing et)\n\n--------------------------------------------------------------------\n-- Source desugaring\n--------------------------------------------------------------------\n\nmkOldSource :: Span -> Lang -> Maybe Path -> (Text, Maybe Text) -> D ExprI\nmkOldSource sp lang path (name, mayAlias) = do\n  let alias = maybe name id mayAlias\n  freshExprSpan\n    sp\n    ( SrcE\n        Source\n          { srcName = SrcName name\n          , srcLang = lang\n          , srcPath = path\n          , srcAlias = EV alias\n          , srcLabel = Nothing\n          , srcRsize = []\n          , srcNote = []\n          , srcInline = False\n          , srcOperator = isOperatorName name\n          }\n    )\n\nmkNewSource :: Span -> Lang -> Maybe Path -> (Bool, Text, Located) -> D ExprI\nmkNewSource sp lang path (isInline, name, nameTok) = do\n  docLines' <- lookupDocsAt (locPos nameTok)\n  let isOp = isOperatorName name\n      baseSrc =\n        Source\n          { srcName = SrcName name\n          , srcLang = lang\n          , srcPath = path\n          , srcAlias = EV name\n          , srcLabel = Nothing\n          , srcRsize = []\n          , srcNote = []\n          , srcInline = isInline\n          , srcOperator = isOp\n          }\n  src <- applySourceDocsD docLines' baseSrc\n  freshExprSpan sp (SrcE src)\n\nisOperatorName :: Text -> Bool\nisOperatorName t = case T.uncons t of\n  Just (c, _) -> not (isLower c) && not (isUpper c) && c /= '_'\n  Nothing -> False\n\n--------------------------------------------------------------------\n-- Program entry point\n--------------------------------------------------------------------\n\n{- | Desugar a list of CST nodes into ExprI nodes.\nHandles implicit main wrapping for bare declarations.\n-}\ndesugarProgram :: Bool -> [Loc CstExpr] -> D [ExprI]\ndesugarProgram isImplicitMain cstNodes = do\n  exprIs <- concatMapM desugarTopLevel cstNodes\n  if isImplicitMain\n    then mkImplicitMain exprIs\n    else return exprIs\n\n--------------------------------------------------------------------\n-- Utility\n--------------------------------------------------------------------\n\n-- concatMapM is imported from Morloc.Internal via Morloc.Namespace.Prim\n"
  },
  {
    "path": "library/Morloc/Frontend/Lexer.hs",
    "content": "{-# LANGUAGE BangPatterns #-}\n{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Frontend.Lexer\nDescription : Hand-written lexer for Morloc with layout token insertion\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nTokenizes morloc source code and inserts virtual layout tokens\n({, }, ;) for indentation-sensitive blocks (module bodies, where clauses,\nclass/instance bodies, do-blocks).\n-}\nmodule Morloc.Frontend.Lexer\n  ( lexMorloc\n  , LexError (..)\n  , showLexError\n  ) where\n\nimport Data.Char (isAlpha, isAlphaNum, isDigit, isHexDigit, isLower, isOctDigit, isUpper)\nimport qualified Data.Map.Strict as Map\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport Morloc.Frontend.Token\n\ndata LexError = LexError !Pos !String\n  deriving (Show, Eq)\n\nshowLexError :: LexError -> String\nshowLexError (LexError pos msg) =\n  posFile pos ++ \":\" ++ show (posLine pos) ++ \":\" ++ show (posCol pos) ++ \": \" ++ msg\n\n{- | Lex morloc source code into a token stream with layout tokens inserted,\nplus a map from positions to associated docstring lines, and a list of\ngroup annotation tokens (for command group support in export lists).\n-}\nlexMorloc :: String -> Text -> Either LexError ([Located], Map.Map Pos [Text], [Located])\nlexMorloc filename input = do\n  rawTokens <- lexRaw filename (T.unpack input) (startPos filename)\n  let rawTokens' = distinguishGetterDots rawTokens\n  let (docMap, groupToks, filtered) = extractDocstrings rawTokens'\n  return (insertLayout filtered, docMap, groupToks)\n\n-- | Distinguish chained getter dots from standalone ones based on position.\n-- A TokGetterDot that immediately follows the previous token (no whitespace)\n-- becomes TokGetterDotChain, used for accessor chaining like .foo.bar.\n-- A TokGetterDot preceded by whitespace stays as-is, parsed as a separate atom.\ndistinguishGetterDots :: [Located] -> [Located]\ndistinguishGetterDots [] = []\ndistinguishGetterDots [x] = [x]\ndistinguishGetterDots (prev : cur@(Located curPos TokGetterDot _) : rest)\n  | posLine (locPos prev) == posLine curPos\n  , posCol (locPos prev) + T.length (locText prev) == posCol curPos\n  = prev : distinguishGetterDots (cur { locToken = TokGetterDotChain } : rest)\n  | otherwise = prev : distinguishGetterDots (cur : rest)\ndistinguishGetterDots (x : rest) = x : distinguishGetterDots rest\n\n{- | Extract docstring and group annotation tokens. Docstrings are associated\nwith the position of the following non-doc token. Group annotation tokens\nare returned in order for post-processing.\n-}\nextractDocstrings :: [Located] -> (Map.Map Pos [Text], [Located], [Located])\nextractDocstrings = go [] Map.empty [] []\n  where\n    go _acc docMap groupToks accToks [] = (docMap, reverse groupToks, reverse accToks)\n    go acc docMap groupToks accToks (Located _ (TokDocLine txt) _ : rest) =\n      go (acc ++ [txt]) docMap groupToks accToks rest\n    go acc docMap groupToks accToks (tok@(Located _ (TokGroupLine _) _) : rest) =\n      -- Flush pending docstrings to the group annotation position, then record the group token\n      let docMap' = if null acc then docMap else Map.insert (locPos tok) acc docMap\n       in go [] docMap' (tok : groupToks) accToks rest\n    go acc docMap groupToks accToks (tok@(Located pos _ _) : rest) =\n      let docMap' = if null acc then docMap else Map.insert pos acc docMap\n       in go [] docMap' groupToks (tok : accToks) rest\n\n-- Raw lexer state\ndata LexState = LexState\n  { lsInput :: !String -- remaining input\n  , lsPos :: !Pos -- current position\n  , lsTokens :: ![Located] -- accumulated tokens (reversed)\n  }\n\n-- | Lex into raw tokens (no layout processing)\nlexRaw :: String -> String -> Pos -> Either LexError [Located]\nlexRaw _filename input pos0 = go (LexState input pos0 [])\n  where\n    go :: LexState -> Either LexError [Located]\n    go st = case lsInput st of\n      [] -> Right (reverse (Located (lsPos st) TokEOF \"\" : lsTokens st))\n      _ -> do\n        st' <- lexOne st\n        go st'\n\n-- | Lex a single token, advancing the state\nlexOne :: LexState -> Either LexError LexState\nlexOne st@(LexState input pos toks) = case input of\n  -- Whitespace\n  '\\n' : rest -> Right st {lsInput = rest, lsPos = nextLine pos}\n  c : rest\n    | c == ' ' || c == '\\t' || c == '\\r' ->\n        Right st {lsInput = rest, lsPos = advanceCol pos 1}\n  -- Block comments {- ... -}\n  '{' : '-' : rest -> skipBlockComment (advanceCol pos 2) rest (1 :: Int)\n    where\n      skipBlockComment p s 0 = Right st {lsInput = s, lsPos = p}\n      skipBlockComment p ('{' : '-' : s) n = skipBlockComment (advanceCol p 2) s (n + 1)\n      skipBlockComment p ('-' : '}' : s) n = skipBlockComment (advanceCol p 2) s (n - 1)\n      skipBlockComment p ('\\n' : s) n = skipBlockComment (nextLine p) s n\n      skipBlockComment p (_ : s) n = skipBlockComment (advanceCol p 1) s n\n      skipBlockComment p [] _ = Left (LexError p \"unterminated block comment\")\n\n  -- Group annotation comments: --* ...\n  '-' : '-' : '*' : rest ->\n    let (line, rest') = span (/= '\\n') rest\n        txt = T.pack line\n        len = 3 + length line\n     in Right\n          st\n            { lsInput = rest'\n            , lsPos = advanceCol pos len\n            , lsTokens = Located pos (TokGroupLine txt) (T.pack (\"--*\" ++ line)) : toks\n            }\n  -- Docstring comments: --' ...\n  '-' : '-' : '\\'' : rest ->\n    let (line, rest') = span (/= '\\n') rest\n        txt = T.pack line\n        len = 3 + length line\n     in Right\n          st\n            { lsInput = rest'\n            , lsPos = advanceCol pos len\n            , lsTokens = Located pos (TokDocLine txt) (T.pack (\"--'\" ++ line)) : toks\n            }\n  -- Line comments: -- (but not --' or --^)\n  '-' : '-' : rest\n    | not (null rest) && head rest `elem` ['\\'', '^'] ->\n        Left (LexError pos \"unexpected docstring marker\")\n    | otherwise ->\n        let (_, rest') = span (/= '\\n') rest\n         in Right st {lsInput = rest', lsPos = advanceCol pos (2 + length (takeWhile (/= '\\n') rest))}\n  -- Triple-quoted strings\n  '\\'' : '\\'' : '\\'' : rest -> lexMultilineString pos \"'''\" rest st\n  '\"' : '\"' : '\"' : rest -> lexMultilineString pos \"\\\"\\\"\\\"\" rest st\n  -- Double-quoted strings (with interpolation support)\n  '\"' : rest -> lexDoubleString pos rest st\n  -- Numbers: try hex, octal, binary first, then decimal/float\n  '0' : 'x' : rest -> lexHexNumber pos rest st\n  '0' : 'X' : rest -> lexHexNumber pos rest st\n  '0' : 'o' : rest -> lexOctalNumber pos rest st\n  '0' : 'O' : rest -> lexOctalNumber pos rest st\n  '0' : 'b' : rest -> lexBinaryNumber pos rest st\n  '0' : 'B' : rest -> lexBinaryNumber pos rest st\n  c : _ | isDigit c -> lexDecNumber pos input st\n  -- Delimiters and punctuation\n  '(' : rest -> emit1 TokLParen \"(\" rest\n  ')' : rest -> emit1 TokRParen \")\" rest\n  '[' : rest -> emit1 TokLBracket \"[\" rest\n  ']' : rest -> emit1 TokRBracket \"]\" rest\n  '{' : rest -> emit1 TokLBrace \"{\" rest\n  '}' : rest -> emit1 TokRBrace \"}\" rest\n  ',' : rest -> emit1 TokComma \",\" rest\n  ';' : rest -> emit1 TokSemicolon \";\" rest\n  -- Underscore: standalone '_' is a hole, '_var' is an identifier\n  '_' : c : rest\n    | isAlphaNum c || c == '\\'' || c == '_' ->\n        -- identifier starting with underscore (e.g., _do_5)\n        lexIdent pos ('_' : c : rest) st\n  '_' : rest ->\n    emit1 TokUnderscore \"_\" rest\n  -- Backslash (lambda)\n  '\\\\' : rest -> emit1 TokBackslash \"\\\\\" rest\n  -- Dot: TokGetterDot when immediately followed by lowercase letter, digit, or '(' (getter/setter),\n  -- TokDot otherwise (composition operator, module separator).\n  -- When followed by an operator char (e.g., '..' or '.='), falls through to the operator lexer.\n  -- When followed by a digit, emit both TokGetterDot and TokInteger to prevent float parsing\n  -- (e.g., .1.2 should be getter .1 then getter .2, not getter then float 1.2)\n  '.' : c : rest\n    | isDigit c ->\n        let (digits, rest') = span isDigit (c : rest)\n            val = read digits :: Integer\n            dotPos = pos\n            numPos = advanceCol pos 1\n         in Right\n              st\n                { lsInput = rest'\n                , lsPos = advanceCol numPos (length digits)\n                , lsTokens =\n                    Located numPos (TokInteger val) (T.pack digits)\n                      : Located dotPos TokGetterDot \".\"\n                      : toks\n                }\n  '.' : c : rest\n    | isLower c || c == '(' ->\n        emit1 TokGetterDot \".\" (c : rest)\n  '.' : c : rest\n    | not (isOperatorChar c) ->\n        emit1 TokDot \".\" (c : rest)\n  '.' : [] ->\n    emit1 TokDot \".\" []\n  -- Bang -- special: ! is force operator, !! is chained force\n  -- But !!! or != etc. are user-defined operators (fall through to operator lexer)\n  '!' : '!' : c : rest\n    | not (isOperatorChar c) ->\n        Right\n          st\n            { lsInput = c : rest\n            , lsPos = advanceCol pos 2\n            , lsTokens =\n                Located (advanceCol pos 1) TokBang \"!\"\n                  : Located pos TokBang \"!\"\n                  : toks\n            }\n  '!' : '!' : [] ->\n    Right\n      st\n        { lsInput = []\n        , lsPos = advanceCol pos 2\n        , lsTokens =\n            Located (advanceCol pos 1) TokBang \"!\"\n              : Located pos TokBang \"!\"\n              : toks\n        }\n  '!' : c : rest\n    | not (isOperatorChar c) ->\n        emit1 TokBang \"!\" (c : rest)\n  '!' : [] ->\n    emit1 TokBang \"!\" []\n  -- Question mark -- standalone ? is guard token, multi-char like ?= are operators\n  '?' : c : rest\n    | not (isOperatorChar c) ->\n        emit1 TokQuestion \"?\" (c : rest)\n  '?' : [] ->\n    emit1 TokQuestion \"?\" []\n  -- Pragmas: %inline\n  '%' : 'i' : 'n' : 'l' : 'i' : 'n' : 'e' : rest\n    | null rest || not (isAlphaNum (head rest) || head rest == '_' || head rest == '\\'') ->\n        Right st { lsInput = rest, lsPos = advanceCol pos 7\n                 , lsTokens = Located pos TokPragmaInline \"%inline\" : toks }\n  -- Intrinsics: @name (@ followed by lowercase letter)\n  '@' : c : rest | isLower c ->\n    let (word, rest') = span (\\x -> isAlphaNum x || x == '\\'' || x == '_') (c : rest)\n        name = T.pack word\n        len = 1 + length word\n     in Right st { lsInput = rest', lsPos = advanceCol pos len\n                 , lsTokens = Located pos (TokIntrinsic name) (T.cons '@' name) : toks }\n  -- Operators and reserved operator sequences\n  c : rest | isOperatorChar c -> lexOperator pos (c : rest) st\n  -- Identifiers and keywords\n  c : rest | isAlpha c -> lexIdent pos (c : rest) st\n  -- Negative numbers: sign directly attached\n  -- This is handled in the parser by parsing - as a unary operator\n\n  -- Unknown character\n  c : _ -> Left (LexError pos (\"unexpected character: \" ++ show c))\n  [] -> Right st -- handled by go\n  where\n    emit1 tok txt rest =\n      Right\n        st\n          { lsInput = rest\n          , lsPos = advanceCol pos (T.length txt)\n          , lsTokens = Located pos tok txt : toks\n          }\n\n-- | Lex an identifier or keyword. The first character may be a letter or underscore.\n-- When a lowercase identifier is immediately followed by '.' and a lowercase letter\n-- (no space), emit TokNsDot between them to support qualified names (e.g., f.map).\n-- Exception: if the preceding token is TokGetterDot, we're in a getter chain\n-- (e.g., .home.altitude) and the dot should NOT be treated as a namespace dot.\nlexIdent :: Pos -> String -> LexState -> Either LexError LexState\nlexIdent pos input st =\n  let (word, rest) = spanIdent input\n      txt = T.pack word\n      tok = classifyWord txt\n      len = length word\n   in case (tok, rest) of\n        (TokLowerName _, '.' : c : rest')\n          | isLower c, not (afterGetterDot (lsTokens st)) ->\n              let dotPos = advanceCol pos len\n               in Right st\n                    { lsInput = c : rest'\n                    , lsPos = advanceCol dotPos 1\n                    , lsTokens = Located dotPos TokNsDot \".\"\n                                   : Located pos tok txt\n                                   : lsTokens st\n                    }\n        -- Label colon: label:id (no space)\n        (TokLowerName _, ':' : c : rest')\n          | isLower c || c == '_' ->\n              let colonPos = advanceCol pos len\n               in Right st\n                    { lsInput = c : rest'\n                    , lsPos = advanceCol colonPos 1\n                    , lsTokens = Located colonPos TokLabelColon \":\"\n                                   : Located pos tok txt\n                                   : lsTokens st\n                    }\n        _ -> Right\n              st\n                { lsInput = rest\n                , lsPos = advanceCol pos len\n                , lsTokens = Located pos tok txt : lsTokens st\n                }\n  where\n    afterGetterDot :: [Located] -> Bool\n    afterGetterDot (Located _ TokGetterDot _ : _) = True\n    afterGetterDot _ = False\n\nspanIdent :: String -> (String, String)\nspanIdent [] = ([], [])\nspanIdent (c : cs)\n  | isAlpha c || c == '_' =\n      let (rest, remaining) = span (\\x -> isAlphaNum x || x == '\\'' || x == '_') cs\n       in (c : rest, remaining)\n  | otherwise = ([], c : cs)\n\n{- | Module component: lowercase start, may contain dashes\nWe handle dashes in module names in the parser by combining tokens.\nThe lexer just produces normal identifiers.\n-}\nclassifyWord :: Text -> Token\nclassifyWord \"module\" = TokModule\nclassifyWord \"import\" = TokImport\nclassifyWord \"export\" = TokExport\nclassifyWord \"source\" = TokSource\nclassifyWord \"from\" = TokFrom\nclassifyWord \"where\" = TokWhere\nclassifyWord \"as\" = TokAs\nclassifyWord \"True\" = TokTrue\nclassifyWord \"False\" = TokFalse\nclassifyWord \"type\" = TokType\nclassifyWord \"record\" = TokRecord\nclassifyWord \"object\" = TokObject\nclassifyWord \"table\" = TokTable\nclassifyWord \"class\" = TokClass\nclassifyWord \"instance\" = TokInstance\nclassifyWord \"infixl\" = TokInfixl\nclassifyWord \"infixr\" = TokInfixr\nclassifyWord \"infix\" = TokInfix\nclassifyWord \"let\" = TokLet\nclassifyWord \"in\" = TokIn\nclassifyWord \"do\" = TokDo\nclassifyWord \"Null\" = TokNull\nclassifyWord t\n  | isUpper (T.head t) = TokUpperName t\n  | otherwise = TokLowerName t\n\n-- | Lex an operator\nlexOperator :: Pos -> String -> LexState -> Either LexError LexState\nlexOperator pos input st =\n  let (opStr, rest) = span isOperatorChar input\n      txt = T.pack opStr\n      tok = classifyOp txt\n      len = length opStr\n   in Right\n        st\n          { lsInput = rest\n          , lsPos = advanceCol pos len\n          , lsTokens = Located pos tok txt : lsTokens st\n          }\n\nclassifyOp :: Text -> Token\nclassifyOp \"::\" = TokDColon\nclassifyOp \"->\" = TokArrow\nclassifyOp \"=>\" = TokFatArrow\nclassifyOp \"<-\" = TokBind\nclassifyOp \"=\" = TokEquals\nclassifyOp \":\" = TokColon\nclassifyOp \"*\" = TokStar\nclassifyOp \"-\" = TokMinus\n-- < and > are also operators but we need them as angle brackets in some contexts\n-- The parser handles disambiguation\nclassifyOp \"<\" = TokLAngle\nclassifyOp \">\" = TokRAngle\nclassifyOp t = TokOperator t\n\nisOperatorChar :: Char -> Bool\nisOperatorChar c = c `elem` (\":!$%&*+./<=>?@\\\\^|-~#\" :: String)\n\n-- | Lex a double-quoted string, handling interpolation\nlexDoubleString :: Pos -> String -> LexState -> Either LexError LexState\nlexDoubleString start input st = go (advanceCol start 1) input []\n  where\n    go pos ('\"' : rest) acc =\n      let txt = T.pack (reverse acc)\n          fullTxt = \"\\\"\" <> txt <> \"\\\"\"\n       in Right\n            st\n              { lsInput = rest\n              , lsPos = advanceCol pos 1\n              , lsTokens = Located start (TokString txt) fullTxt : lsTokens st\n              }\n    go pos ('#' : '{' : rest) acc =\n      -- start of interpolation\n      let prefix = T.pack (reverse acc)\n          tok =\n            if null (lsTokens st) || not (isStringContinuation (lsTokens st))\n              then TokStringStart prefix\n              else TokStringMid prefix\n          prefixTxt = \"\\\"\" <> prefix <> \"#{\"\n       in lexInterpBody\n            (advanceCol pos 2)\n            rest\n            1\n            st\n              { lsTokens = Located (advanceCol pos 0) TokInterpOpen \"#{\" : Located start tok prefixTxt : lsTokens st\n              }\n            start\n    go pos ('\\\\' : c : rest) acc =\n      let escaped = case c of\n            'n' -> '\\n'\n            't' -> '\\t'\n            '\\\\' -> '\\\\'\n            '\"' -> '\"'\n            _ -> c\n       in go (advanceCol pos 2) rest (escaped : acc)\n    go pos ('\\n' : _) _ = Left (LexError pos \"unterminated string literal (use triple quotes for multi-line strings)\")\n    go pos (c : rest) acc = go (advanceCol pos 1) rest (c : acc)\n    go pos [] _ = Left (LexError pos \"unterminated string literal\")\n\n    isStringContinuation (Located _ (TokStringStart _) _ : _) = True\n    isStringContinuation (Located _ (TokStringMid _) _ : _) = True\n    isStringContinuation _ = False\n\n-- | Lex the body of an interpolation #{...}, tracking brace depth\nlexInterpBody :: Pos -> String -> Int -> LexState -> Pos -> Either LexError LexState\nlexInterpBody pos ('}' : rest) 1 st stringStartPos =\n  -- end of interpolation, resume string lexing\n  let st' = st {lsTokens = Located pos TokInterpClose \"}\" : lsTokens st}\n   in lexStringAfterInterp (advanceCol pos 1) rest st' stringStartPos\nlexInterpBody pos ('}' : rest) depth st strPos =\n  lexInterpBody (advanceCol pos 1) rest (depth - 1) st strPos\nlexInterpBody pos ('{' : rest) depth st strPos =\n  lexInterpBody (advanceCol pos 1) rest (depth + 1) st strPos\nlexInterpBody pos input _ st _ = do\n  -- lex one token from the interpolated expression\n  st' <- lexOne st {lsInput = input, lsPos = pos}\n  -- continue lexing the interpolation body\n  case lsInput st' of\n    [] -> Left (LexError pos \"unterminated string interpolation\")\n    _ -> case lsTokens st' of\n      (Located _ TokEOF _ : _) -> Left (LexError pos \"unterminated string interpolation\")\n      _ -> do\n        -- figure out remaining brace depth from what was consumed\n        let consumed = length input - length (lsInput st')\n            braceChange = countBraces (take consumed input)\n        lexInterpBody (lsPos st') (lsInput st') (1 + braceChange) st' (Pos 0 0 \"\")\n  where\n    countBraces = foldl (\\n c -> case c of '{' -> n + 1; '}' -> n - 1; _ -> n) 0\n\n-- | After interpolation closes, resume lexing the string\nlexStringAfterInterp :: Pos -> String -> LexState -> Pos -> Either LexError LexState\nlexStringAfterInterp pos ('\"' : rest) st _ =\n  let txt = T.empty\n   in Right\n        st\n          { lsInput = rest\n          , lsPos = advanceCol pos 1\n          , lsTokens = Located pos (TokStringEnd txt) \"\\\"\" : lsTokens st\n          }\nlexStringAfterInterp pos ('#' : '{' : rest) st strStartPos =\n  -- another interpolation immediately\n  let tok = TokStringMid T.empty\n   in lexInterpBody\n        (advanceCol pos 2)\n        rest\n        1\n        st {lsTokens = Located pos TokInterpOpen \"#{\" : Located pos tok \"\" : lsTokens st}\n        strStartPos\nlexStringAfterInterp pos input st _ = go pos input []\n  where\n    go p ('\"' : rest) acc =\n      let txt = T.pack (reverse acc)\n       in Right\n            st\n              { lsInput = rest\n              , lsPos = advanceCol p 1\n              , lsTokens = Located pos (TokStringEnd txt) (\"\" <> txt <> \"\\\"\") : lsTokens st\n              }\n    go p ('#' : '{' : rest) acc =\n      let txt = T.pack (reverse acc)\n       in lexInterpBody\n            (advanceCol p 2)\n            rest\n            1\n            st {lsTokens = Located p TokInterpOpen \"#{\" : Located pos (TokStringMid txt) \"\" : lsTokens st}\n            pos\n    go p ('\\\\' : c : rest) acc =\n      let escaped = case c of\n            'n' -> '\\n'\n            't' -> '\\t'\n            '\\\\' -> '\\\\'\n            '\"' -> '\"'\n            _ -> c\n       in go (advanceCol p 2) rest (escaped : acc)\n    go p ('\\n' : _) _ = Left (LexError p \"unterminated string literal\")\n    go p (c : rest) acc = go (advanceCol p 1) rest (c : acc)\n    go p [] _ = Left (LexError p \"unterminated string literal\")\n\n-- | Lex a multiline (triple-quoted) string with interpolation\nlexMultilineString :: Pos -> String -> String -> LexState -> Either LexError LexState\nlexMultilineString start delim input st = go (advanceCol start 3) input []\n  where\n    delimLen = length delim\n\n    go pos s acc\n      | take delimLen s == delim =\n          let rawTxt = T.pack (reverse acc)\n              txt = processMultilineString rawTxt\n              fullTxt = T.pack delim <> rawTxt <> T.pack delim\n           in Right\n                st\n                  { lsInput = drop delimLen s\n                  , lsPos = advanceCol pos delimLen\n                  , lsTokens = Located start (TokString txt) fullTxt : lsTokens st\n                  }\n    go pos ('#' : '{' : rest) acc =\n      -- interpolation inside multiline string\n      let prefix = T.pack (reverse acc)\n          tok = TokStringStart prefix\n       in lexMultilineInterpBody\n            (advanceCol pos 2)\n            rest\n            1\n            st {lsTokens = Located pos TokInterpOpen \"#{\" : Located start tok \"\" : lsTokens st}\n            start\n            delim\n    go pos ('\\\\' : c : rest) acc =\n      let escaped = case c of\n            'n' -> '\\n'\n            't' -> '\\t'\n            '\\\\' -> '\\\\'\n            '\\'' -> '\\''\n            '\"' -> '\"'\n            _ -> c\n       in go (advanceCol pos 2) rest (escaped : acc)\n    go pos ('\\n' : rest) acc = go (nextLine pos) rest ('\\n' : acc)\n    go pos (c : rest) acc = go (advanceCol pos 1) rest (c : acc)\n    go pos [] _ = Left (LexError pos \"unterminated multiline string literal\")\n\n-- | Lex interpolation body inside a multiline string\nlexMultilineInterpBody ::\n  Pos -> String -> Int -> LexState -> Pos -> String -> Either LexError LexState\nlexMultilineInterpBody pos ('}' : rest) 1 st strStartPos delim =\n  -- end of interpolation, resume multiline string\n  let st' = st {lsTokens = Located pos TokInterpClose \"}\" : lsTokens st}\n   in lexMultilineAfterInterp (advanceCol pos 1) rest st' strStartPos delim\nlexMultilineInterpBody pos input _ st _ _ = do\n  st' <- lexOne st {lsInput = input, lsPos = pos}\n  case lsInput st' of\n    [] -> Left (LexError pos \"unterminated string interpolation\")\n    ('}' : rest) ->\n      let st'' = st' {lsTokens = Located (lsPos st') TokInterpClose \"}\" : lsTokens st'}\n       in lexMultilineAfterInterp (advanceCol (lsPos st') 1) rest st'' pos \"\"\n    _ -> lexMultilineInterpBody (lsPos st') (lsInput st') 1 st' pos \"\"\n\n-- | Resume multiline string after interpolation\nlexMultilineAfterInterp :: Pos -> String -> LexState -> Pos -> String -> Either LexError LexState\nlexMultilineAfterInterp pos input st _ delim = go pos input []\n  where\n    delimLen = length delim\n\n    go p s acc\n      | delimLen > 0 && take delimLen s == delim =\n          let txt = T.pack (reverse acc)\n           in Right\n                st\n                  { lsInput = drop delimLen s\n                  , lsPos = advanceCol p delimLen\n                  , lsTokens = Located pos (TokStringEnd txt) \"\" : lsTokens st\n                  }\n    go p ('#' : '{' : rest) acc =\n      let txt = T.pack (reverse acc)\n       in lexMultilineInterpBody\n            (advanceCol p 2)\n            rest\n            1\n            st {lsTokens = Located p TokInterpOpen \"#{\" : Located pos (TokStringMid txt) \"\" : lsTokens st}\n            pos\n            delim\n    go p ('\\n' : rest) acc = go (nextLine p) rest ('\\n' : acc)\n    go p (c : rest) acc = go (advanceCol p 1) rest (c : acc)\n    go p [] _ = Left (LexError p \"unterminated multiline string literal\")\n\n-- | Lex a hexadecimal number after 0x prefix\nlexHexNumber :: Pos -> String -> LexState -> Either LexError LexState\nlexHexNumber pos input st =\n  let (digits, rest) = span isHexDigit input\n   in if null digits\n        then Left (LexError pos \"expected hexadecimal digits after 0x\")\n        else\n          let val = foldl (\\n d -> n * 16 + fromIntegral (hexVal d)) 0 digits\n              len = 2 + length digits\n              txt = T.pack (\"0x\" ++ digits)\n           in Right\n                st\n                  { lsInput = rest\n                  , lsPos = advanceCol pos len\n                  , lsTokens = Located pos (TokInteger val) txt : lsTokens st\n                  }\n  where\n    hexVal c\n      | c >= '0' && c <= '9' = fromEnum c - fromEnum '0'\n      | c >= 'a' && c <= 'f' = fromEnum c - fromEnum 'a' + 10\n      | c >= 'A' && c <= 'F' = fromEnum c - fromEnum 'A' + 10\n      | otherwise = 0\n\n-- | Lex an octal number after 0o prefix\nlexOctalNumber :: Pos -> String -> LexState -> Either LexError LexState\nlexOctalNumber pos input st =\n  let (digits, rest) = span isOctDigit input\n   in if null digits\n        then Left (LexError pos \"expected octal digits after 0o\")\n        else\n          let val = foldl (\\n d -> n * 8 + fromIntegral (fromEnum d - fromEnum '0')) 0 digits\n              len = 2 + length digits\n              txt = T.pack (\"0o\" ++ digits)\n           in Right\n                st\n                  { lsInput = rest\n                  , lsPos = advanceCol pos len\n                  , lsTokens = Located pos (TokInteger val) txt : lsTokens st\n                  }\n\n-- | Lex a binary number after 0b prefix\nlexBinaryNumber :: Pos -> String -> LexState -> Either LexError LexState\nlexBinaryNumber pos input st =\n  let (digits, rest) = span (\\c -> c == '0' || c == '1') input\n   in if null digits\n        then Left (LexError pos \"expected binary digits after 0b\")\n        else\n          let val = foldl (\\n d -> n * 2 + fromIntegral (fromEnum d - fromEnum '0')) 0 digits\n              len = 2 + length digits\n              txt = T.pack (\"0b\" ++ digits)\n           in Right\n                st\n                  { lsInput = rest\n                  , lsPos = advanceCol pos len\n                  , lsTokens = Located pos (TokInteger val) txt : lsTokens st\n                  }\n\n-- | Lex a decimal integer or float, with optional scientific notation\nlexDecNumber :: Pos -> String -> LexState -> Either LexError LexState\nlexDecNumber pos input st =\n  let (intPart, rest1) = span isDigit input\n   in case rest1 of\n        -- Float: digits.digits\n        '.' : c : rest2\n          | isDigit c ->\n              let (fracPart, rest3) = span isDigit (c : rest2)\n                  (expPart, rest4) = lexExponent rest3\n                  numStr = intPart ++ \".\" ++ fracPart ++ expPart\n                  val = read numStr :: Double\n                  len = length numStr\n               in Right\n                    st\n                      { lsInput = rest4\n                      , lsPos = advanceCol pos len\n                      , lsTokens = Located pos (TokFloat val) (T.pack numStr) : lsTokens st\n                      }\n        -- Integer with exponent (e.g., 5e10) -- treated as float\n        'e' : _ ->\n          let (expPart, rest3) = lexExponent rest1\n           in if null expPart\n                then mkInt intPart rest1\n                else\n                  let numStr = intPart ++ expPart\n                      val = read numStr :: Double\n                      len = length numStr\n                   in Right\n                        st\n                          { lsInput = rest3\n                          , lsPos = advanceCol pos len\n                          , lsTokens = Located pos (TokFloat val) (T.pack numStr) : lsTokens st\n                          }\n        'E' : _ ->\n          let (expPart, rest3) = lexExponent rest1\n           in if null expPart\n                then mkInt intPart rest1\n                else\n                  let numStr = intPart ++ expPart\n                      val = read numStr :: Double\n                      len = length numStr\n                   in Right\n                        st\n                          { lsInput = rest3\n                          , lsPos = advanceCol pos len\n                          , lsTokens = Located pos (TokFloat val) (T.pack numStr) : lsTokens st\n                          }\n        -- Plain integer\n        _ -> mkInt intPart rest1\n  where\n    mkInt digits rest =\n      let val = read digits :: Integer\n          len = length digits\n       in Right\n            st\n              { lsInput = rest\n              , lsPos = advanceCol pos len\n              , lsTokens = Located pos (TokInteger val) (T.pack digits) : lsTokens st\n              }\n\n{- | Try to lex a scientific notation exponent (e.g., e10, e-3, E+5)\nReturns the exponent string and remaining input. Empty string if no exponent.\n-}\nlexExponent :: String -> (String, String)\nlexExponent ('e' : '+' : rest) =\n  let (digits, rest') = span isDigit rest\n   in if null digits\n        then (\"\", 'e' : '+' : rest)\n        else (\"e+\" ++ digits, rest')\nlexExponent ('e' : '-' : rest) =\n  let (digits, rest') = span isDigit rest\n   in if null digits\n        then (\"\", 'e' : '-' : rest)\n        else (\"e-\" ++ digits, rest')\nlexExponent ('e' : rest) =\n  let (digits, rest') = span isDigit rest\n   in if null digits\n        then (\"\", 'e' : rest)\n        else (\"e\" ++ digits, rest')\nlexExponent ('E' : '+' : rest) =\n  let (digits, rest') = span isDigit rest\n   in if null digits\n        then (\"\", 'E' : '+' : rest)\n        else (\"E+\" ++ digits, rest')\nlexExponent ('E' : '-' : rest) =\n  let (digits, rest') = span isDigit rest\n   in if null digits\n        then (\"\", 'E' : '-' : rest)\n        else (\"E-\" ++ digits, rest')\nlexExponent ('E' : rest) =\n  let (digits, rest') = span isDigit rest\n   in if null digits\n        then (\"\", 'E' : rest)\n        else (\"E\" ++ digits, rest')\nlexExponent rest = (\"\", rest)\n\n{- | Process a multiline (triple-quoted) string: strip leading/trailing blank\nlines and remove common indentation.\n-}\nprocessMultilineString :: Text -> Text\nprocessMultilineString txt =\n  let stripped = removeTrailingSpace (removeLeadingSpace txt)\n   in reindent stripped\n  where\n    removeLeadingSpace :: Text -> Text\n    removeLeadingSpace s = case T.lines s of\n      [] -> \"\"\n      (first : rest)\n        | T.null (T.strip first) -> T.unlines rest\n        | otherwise -> T.unlines (first : rest)\n\n    removeTrailingSpace :: Text -> Text\n    removeTrailingSpace s = case T.lines s of\n      [] -> \"\"\n      ls\n        | T.null (T.strip (last ls)) -> T.unlines (init ls)\n        | otherwise -> T.unlines ls\n\n    reindent :: Text -> Text\n    reindent s = case T.lines s of\n      [] -> \"\"\n      ls ->\n        let nonEmpty = filter (not . T.null . T.strip) ls\n            spaces = map (T.length . T.takeWhile (== ' ')) nonEmpty\n            minSpaces = if null spaces then 0 else minimum spaces\n         in T.unlines (map (T.drop minSpaces) ls)\n\n-- Position helpers\nadvanceCol :: Pos -> Int -> Pos\nadvanceCol (Pos l c f) n = Pos l (c + n) f\n\nnextLine :: Pos -> Pos\nnextLine (Pos l _ f) = Pos (l + 1) 1 f\n\n--------------------------------------------------------------------\n-- Layout token insertion\n--------------------------------------------------------------------\n\n{- | Insert virtual braces and semicolons based on indentation.\n\nLayout contexts:\n  1. Top-level: the body of a module (or implicit main)\n  2. After 'where' keyword (function where, class/instance bodies)\n  3. After 'do' keyword\n\nAlgorithm (GHC-inspired):\n  - When we see a layout keyword (where, do), the next token's column\n    defines a new layout context. Emit virtual {.\n  - For the top-level, the first declaration's column starts the context.\n  - When the next token aligns with the context column, emit ;.\n  - When the next token is left of the context column, emit } and pop.\n  - Explicit { after a layout keyword enters a non-indentation context.\n-}\ndata LayoutContext\n  = -- | virtual { at this column\n    IndentCtx !Int\n  | -- | let-introduced layout context at this column (closed by 'in')\n    LetCtx !Int\n  | -- | real {, no indentation tracking\n    ExplicitCtx\n  deriving (Show, Eq)\n\n-- | Is the token a layout keyword (introduces an indented block)?\nisLayoutKeyword :: Token -> Bool\nisLayoutKeyword TokWhere = True\nisLayoutKeyword TokDo = True\nisLayoutKeyword TokLet = True\nisLayoutKeyword _ = False\n\ninsertLayout :: [Located] -> [Located]\ninsertLayout [] = []\ninsertLayout toks = beginTopLevel toks\n  where\n    -- Handle the top-level layout. The top-level body (after module header\n    -- or at the start for implicit main) gets a layout context.\n    beginTopLevel :: [Located] -> [Located]\n    beginTopLevel ts = case ts of\n      -- File starts with 'module': skip the header, then start layout\n      (Located p TokModule _ : rest) ->\n        Located p TokModule \"\" : skipModuleHeader rest\n      -- File starts with something else: implicit main, start layout immediately\n      (_ : _) -> startLayoutCtx TokModule [] ts\n      [] -> []\n\n    -- Skip past 'module Name (exports)' to find where the body starts.\n    -- We need to find the opening '(' of the export list, then track\n    -- paren depth to find the matching ')'.\n    skipModuleHeader :: [Located] -> [Located]\n    skipModuleHeader [] = []\n    skipModuleHeader (t@(Located _ TokLParen _) : rest) =\n      -- Found the opening ( of exports, now track depth starting at 1\n      t : skipExportList 1 rest\n    skipModuleHeader (t : rest) = t : skipModuleHeader rest\n\n    -- Track paren depth inside the export list\n    skipExportList :: Int -> [Located] -> [Located]\n    skipExportList _ [] = []\n    skipExportList depth (t@(Located _ TokLParen _) : rest) =\n      t : skipExportList (depth + 1) rest\n    skipExportList depth (t@(Located _ TokRParen _) : rest)\n      | depth <= 1 = t : startLayoutCtx TokModule [] rest -- closing ) of export list\n      | otherwise = t : skipExportList (depth - 1) rest\n    skipExportList depth (t : rest) = t : skipExportList depth rest\n\n    -- Skip module header during processing (for multi-module files).\n    -- Same as skipModuleHeader/skipExportList but preserves existing contexts.\n    skipModuleHeaderInBody :: [LayoutContext] -> [Located] -> [Located]\n    skipModuleHeaderInBody ctxs [] = closingBraces ctxs []\n    skipModuleHeaderInBody ctxs (t@(Located _ TokLParen _) : rest) =\n      t : skipExportListInBody ctxs 1 rest\n    skipModuleHeaderInBody ctxs (t : rest) = t : skipModuleHeaderInBody ctxs rest\n\n    skipExportListInBody :: [LayoutContext] -> Int -> [Located] -> [Located]\n    skipExportListInBody ctxs _ [] = closingBraces ctxs []\n    skipExportListInBody ctxs depth (t@(Located _ TokLParen _) : rest) =\n      t : skipExportListInBody ctxs (depth + 1) rest\n    skipExportListInBody ctxs depth (t@(Located _ TokRParen _) : rest)\n      | depth <= 1 = t : startLayoutCtx TokModule ctxs rest\n      | otherwise = t : skipExportListInBody ctxs (depth - 1) rest\n    skipExportListInBody ctxs depth (t : rest) = t : skipExportListInBody ctxs depth rest\n\n    -- Start a new layout context at the column of the next token\n    -- The Token parameter indicates which keyword triggered the context\n    -- (TokLet uses LetCtx, others use IndentCtx)\n    startLayoutCtx :: Token -> [LayoutContext] -> [Located] -> [Located]\n    startLayoutCtx _ ctxs [] = closingBraces ctxs []\n    startLayoutCtx _ ctxs [eof@(Located _ TokEOF _)] =\n      -- empty body\n      Located (locPos eof) TokVLBrace \"\"\n        : Located (locPos eof) TokVRBrace \"\"\n        : closingBraces ctxs [eof]\n    -- Explicit brace after layout keyword: skip virtual layout, let the\n    -- brace be handled as an explicit context by emitToken/process.\n    startLayoutCtx _ ctxs (t@(Located _ TokLBrace _) : rest) =\n      t : process (ExplicitCtx : ctxs) rest\n    startLayoutCtx kw ctxs (t : rest)\n      | otherwise =\n          let col = posCol (locPos t)\n              ctx = case kw of\n                TokLet -> LetCtx col\n                _      -> IndentCtx col\n              newCtxs = ctx : ctxs\n              vopen = Located (locPos t) TokVLBrace \"\"\n           in -- The first token of a layout block must not get a VSEMI before it\n              -- (indentCheck would emit one since col == n). So we handle it\n              -- specially, bypassing indentation checking.\n              vopen : emitFirstToken newCtxs t rest\n\n    -- Emit the first token of a layout block. Like processToken but\n    -- without indentation checking (the first token defines the column,\n    -- it should not receive a VSEMI).\n    emitFirstToken :: [LayoutContext] -> Located -> [Located] -> [Located]\n    emitFirstToken ctxs tok rest\n      | locToken tok == TokEOF = closingBraces ctxs [tok]\n      | otherwise = emitToken ctxs tok rest\n\n    -- Main processing loop\n    process :: [LayoutContext] -> [Located] -> [Located]\n    process ctxs [] = closingBraces ctxs []\n    process ctxs (t : rest) = processToken ctxs t rest\n\n    -- Process a single token with the current context stack\n    processToken :: [LayoutContext] -> Located -> [Located] -> [Located]\n    processToken ctxs tok rest\n      -- EOF: close all contexts\n      | locToken tok == TokEOF = closingBraces ctxs [tok]\n      -- Regular token: check indentation first (may emit VSEMI/VRBRACE)\n      | otherwise = indentCheck ctxs tok rest\n\n    -- Check indentation of a regular token against the context stack\n    indentCheck :: [LayoutContext] -> Located -> [Located] -> [Located]\n    indentCheck [] tok rest = emitToken [] tok rest\n    indentCheck (ExplicitCtx : ctxs) tok rest = emitToken (ExplicitCtx : ctxs) tok rest\n    -- 'in' immediately closes a LetCtx regardless of indentation\n    indentCheck (LetCtx _ : cs) tok rest\n      | locToken tok == TokIn =\n          Located (locPos tok) TokVRBrace \"\" : emitToken cs tok rest\n    -- ';' closes a LetCtx so that let-bindings terminate inside explicit-brace blocks\n    indentCheck (LetCtx _ : cs) tok rest\n      | locToken tok == TokSemicolon =\n          Located (locPos tok) TokVRBrace \"\" : indentCheck cs tok rest\n    indentCheck ctxs@(LetCtx n : cs) tok rest\n      | col == n && isBlockCloser (locToken tok) =\n          Located (locPos tok) TokVRBrace \"\" : indentCheck cs tok rest\n      | col == n =\n          Located (locPos tok) TokVSemi \"\" : emitToken ctxs tok rest\n      | col > n =\n          emitToken ctxs tok rest\n      | otherwise =\n          Located (locPos tok) TokVRBrace \"\" : indentCheck cs tok rest\n      where\n        col = posCol (locPos tok)\n    indentCheck ctxs@(IndentCtx n : cs) tok rest\n      | col == n && isBlockCloser (locToken tok) =\n          -- 'module' at layout column closes the current block\n          Located (locPos tok) TokVRBrace \"\" : indentCheck cs tok rest\n      | col == n =\n          -- aligned: emit semicolon before this token\n          Located (locPos tok) TokVSemi \"\" : emitToken ctxs tok rest\n      | col > n =\n          -- indented further: continuation of previous item\n          emitToken ctxs tok rest\n      | otherwise =\n          -- dedented: close this context, then re-check\n          Located (locPos tok) TokVRBrace \"\" : indentCheck cs tok rest\n      where\n        col = posCol (locPos tok)\n\n    -- Tokens that close a layout block even when at the same indentation level\n    isBlockCloser :: Token -> Bool\n    isBlockCloser TokModule = True\n    isBlockCloser TokWhere = True\n    isBlockCloser _ = False\n\n    -- Emit a token with special handling for keywords\n    emitToken :: [LayoutContext] -> Located -> [Located] -> [Located]\n    emitToken ctxs tok rest\n      -- Module keyword: emit it and skip the header, then start layout\n      | locToken tok == TokModule =\n          tok : skipModuleHeaderInBody ctxs rest\n      -- Layout keywords: emit the keyword, then start a new layout context\n      | isLayoutKeyword (locToken tok) =\n          tok : startLayoutCtx (locToken tok) ctxs rest\n      -- Explicit open brace\n      | locToken tok == TokLBrace =\n          tok : process (ExplicitCtx : ctxs) rest\n      -- Explicit close brace\n      | locToken tok == TokRBrace =\n          closeToExplicit ctxs tok rest\n      -- Prefix pragma: suppress VSEMI before the next token\n      | locToken tok == TokPragmaInline =\n          tok : case rest of\n            (next : rest') -> emitFirstToken ctxs next rest'\n            [] -> closingBraces ctxs []\n      -- Regular token\n      | otherwise = tok : process ctxs rest\n\n    -- Close layout contexts until we find an explicit brace context\n    closeToExplicit :: [LayoutContext] -> Located -> [Located] -> [Located]\n    closeToExplicit (ExplicitCtx : ctxs) tok rest =\n      tok : process ctxs rest\n    closeToExplicit (IndentCtx _ : ctxs) tok rest =\n      Located (locPos tok) TokVRBrace \"\" : closeToExplicit ctxs tok rest\n    closeToExplicit (LetCtx _ : ctxs) tok rest =\n      Located (locPos tok) TokVRBrace \"\" : closeToExplicit ctxs tok rest\n    closeToExplicit [] tok rest =\n      -- unbalanced }, let the parser report the error\n      tok : process [] rest\n\n    -- Emit closing braces for all remaining contexts\n    closingBraces :: [LayoutContext] -> [Located] -> [Located]\n    closingBraces [] rest = rest\n    closingBraces (_ : cs) rest =\n      let p = case rest of\n            (Located pp _ _ : _) -> pp\n            [] -> Pos 1 1 \"\"\n       in Located p TokVRBrace \"\" : closingBraces cs rest\n"
  },
  {
    "path": "library/Morloc/Frontend/Link.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Frontend.Link\nDescription : Link terms, sources, and typeclasses into 'MorlocState'\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nWalks the module DAG bottom-up and populates 'MorlocState' with signatures\n('stateSignatures'), typeclass instances ('stateTypeclasses'), and name\nmappings ('stateName'). This is the bridge between the parsed AST and the\nstate-based representation consumed by 'Treeify'.\n-}\nmodule Morloc.Frontend.Link (link) where\n\nimport Data.Set (Set)\nimport qualified Data.Set as Set\nimport qualified Morloc.Data.DAG as DAG\nimport Morloc.Data.Doc\nimport Morloc.Data.Map (Map)\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as DT\nimport Morloc.Frontend.Merge (mergeSignatureSet, weaveTermTypes)\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\nimport Morloc.Typecheck.Internal (qualify, unqualify)\n\n-- The following terms are modified in morloc state\n--  * stateSignatures :: GMap Int Int SignatureSet -- links terms to types\n--    - update SignatureSet when ClsE or IstE is encountered\n--    - add term entry when any term is encountered\n--  * stateTypeclasses :: Map EVar Instance -- only used to retrieve global typeclass terms, like pack and unpack\n--    - update when Packable instance is encountered\n--  * stateName :: Map Int EVar\n--    - update when term is encountered (same places as stateSignature term updates)\n\n-- when each new module is entered, the keys for linkTerms and linkClasses is\n-- rewritten based on the AliasedSymbol edge aliases\ndata LinkState = LinkState\n  { linkTerms :: Map EVar Int\n  -- ^ stateSignatures index 2\n  , linkClasses :: Map ClassName (Int, Typeclass Signature, Map EVar Int)\n  -- ^ maps class methods into stateSignatures index 2\n  }\n\n{- | Synthesize the module DAG bottom-up, linking all terms, sources,\ntypeclass instances, and declarations into 'MorlocState'.\n-}\nlink :: DAG MVar [AliasedSymbol] ExprI -> MorlocMonad ()\nlink d0 = do\n  mayResult <- DAG.synthesizeNodes synth d0\n  case mayResult of\n    Nothing -> error \"cyclical\"\n    (Just _) -> return ()\n\nsynth ::\n  MVar ->\n  ExprI ->\n  [(MVar, [AliasedSymbol], LinkState)] ->\n  MorlocMonad LinkState\nsynth k0 e0 edges = do\n  inheritedState <- mapM (realiasLinkState k0) edges >>= mergeLinkStates k0\n  finalState <- addLocalState k0 e0 inheritedState\n  _ <- linkLocalTerms k0 finalState e0\n  return finalState\n\n-- Will raise error if any import term is absent in the linkstate list\nrealiasLinkState ::\n  MVar -> (MVar, [AliasedSymbol], LinkState) -> MorlocMonad (MVar, MVar, LinkState)\nrealiasLinkState m1 (m2, ss, s) = do\n  termmap <-\n    mergeValues \"terms\" $\n      map (\\(n, a) -> (a, Map.lookup n (linkTerms s))) [(n, a) | AliasedTerm n a <- ss]\n  classmap <-\n    mergeValues \"classes\" $ map (\\n -> (n, Map.lookup n (linkClasses s))) [n | AliasedClass n <- ss]\n  return (m1, m2, LinkState termmap classmap)\n  where\n    mergeValues :: (Ord k, Show k) => String -> [(k, Maybe v)] -> MorlocMonad (Map k v)\n    mergeValues msg xs = case [v | (v, Nothing) <- xs] of\n      [] -> return $ Map.fromList [(v, i) | (v, Just i) <- xs]\n      missing -> error $ \"Undefined \" <> msg <> \" imports:\" <> show missing <> \"\\n  ss = \" <> show ss\n\nthrowInheritanceError :: MVar -> MDoc -> MorlocMonad a\nthrowInheritanceError m msg = MM.throwSystemError $ \"In module\" <+> squotes (pretty m) <> \":\" <+> msg\n\n-- All LinkState objects whould have already been renamed by realiasLinkState.\n--\n-- will raise error if\n--   1. if multiple separate typeclasses are imported under the same name\n--   2. if any imported term overlaps with a typeclass method\n--   3. if multiple separate terms are imported under the same name\n--\n-- This function needs to iterate through the imports such that the modules\n-- involved with conflicts can be identified for better error messages. So all\n-- imported terms are first pooled, then iterated through one-by-one checking\n-- for conflicts. Term identity is based on index, which maps one-to-one to type\n-- or class signature.\nmergeLinkStates :: MVar -> [(MVar, MVar, LinkState)] -> MorlocMonad LinkState\nmergeLinkStates m0 imps = do\n  -- Set EVar\n  let terms = Set.unions $ [Map.keysSet s | (_, _, LinkState s _) <- imps]\n      -- Set ClassName\n      classes = Set.unions $ [Map.keysSet s | (_, _, LinkState _ s) <- imps]\n      -- Map EVar [(MVar, Int)]\n      termGroups =\n        Map.fromSet\n          (\\k -> catMaybes [(,,) m1 m2 <$> Map.lookup k (linkTerms s) | (m1, m2, s) <- imps])\n          terms\n      -- Map ClassName [(MVar, (Int, Map EVar Int))]\n      classGroups =\n        Map.fromSet\n          (\\k -> catMaybes [(,,) m1 m2 <$> Map.lookup k (linkClasses s) | (m1, m2, s) <- imps])\n          classes\n\n  termmap <- Map.mapWithKeyM mergeTerms termGroups\n  classmap <- Map.mapWithKeyM mergeClasses classGroups\n  _ <- checkTermClassConflicts termmap classmap\n  return $ LinkState termmap classmap\n  where\n    mergeTerms :: EVar -> [(MVar, MVar, Int)] -> MorlocMonad Int\n    mergeTerms _ [] = error \"Compiler bug: This cannot be empty\"\n    mergeTerms _ [(_, _, i)] = return i\n    mergeTerms v ((m1, importMod1, i) : (_, importMod2, j) : xs)\n      | i == j = mergeTerms v ((m1, importMod1, j) : xs)\n      | otherwise =\n          throwInheritanceError m0 $\n            \"Illegal masking of type signatures for\" <+> squotes (pretty v)\n              <> \"\\n It is imported from modules\"\n                <+> squotes (pretty importMod1)\n                <+> \"and\"\n                <+> squotes (pretty importMod2)\n              <> \"\\n Terms may have multiple implementations but not multiple type signatures\"\n\n    mergeClasses ::\n      ClassName -> [(MVar, MVar, (Int, a, Map EVar Int))] -> MorlocMonad (Int, a, Map EVar Int)\n    mergeClasses _ [] = error \"This will never be empty\"\n    mergeClasses _ [(_, _, x)] = return x\n    mergeClasses v ((_, m1b, (i, _, _)) : (m2a, m2b, y@(j, _, _)) : xs)\n      | i == j = mergeClasses v ((m2a, m2b, y) : xs)\n      | otherwise =\n          throwInheritanceError m0 $\n            \"\\n  Cannot merge non-eqivalent definitions of typeclass\"\n              <+> squotes (pretty v)\n              <+> \"\\n  Definitions are imported from modules\"\n              <+> squotes (pretty m1b)\n              <+> \"and\"\n              <+> squotes (pretty m2b)\n\n    checkTermClassConflicts :: Map EVar Int -> Map ClassName (Int, a, Map EVar Int) -> MorlocMonad ()\n    checkTermClassConflicts me mc = case catMaybes . map (checkTermClassConflict me) $ Map.toList mc of\n      [] -> return ()\n      ((cls, vs) : _) ->\n        throwInheritanceError m0 $\n          \"\\n  The following terms are defined both as polymorphic terms in typeclass\"\n            <+> squotes (pretty cls)\n            <+> \"and as independent monomorphic terms:\"\n            <+> list (map pretty vs)\n\n    checkTermClassConflict ::\n      Map EVar Int -> (ClassName, (Int, a, Map EVar Int)) -> Maybe (ClassName, [EVar])\n    checkTermClassConflict me (cls, (_, _, mc)) = case Set.toList (Set.intersection (Map.keysSet me) (Map.keysSet mc)) of\n      [] -> Nothing\n      conflicts -> Just (cls, conflicts)\n\n-- updates stateSignature index 2\naddLocalState :: MVar -> ExprI -> LinkState -> MorlocMonad LinkState\naddLocalState m0 e0 s0 = do\n  s1 <- findDefs e0 s0\n  (_, s2) <- findFreeDefs e0 (Set.empty, s1)\n  return s2\n  where\n    -- Iterate through the expression and add signatures and typeclasses. This\n    -- needs to be done before the sources, declarations, and instances are\n    -- added, since these all need to augment code that has already be indexed.\n    findDefs (ExprI sigIndex (SigE (Signature v _ e))) lstate = do\n      -- get the (GMap Int Int SigantureSet) map from state\n      (GMap idmap sigmap) <- MM.gets stateSignatures\n      -- define a new monomorphic term with no implementations\n      let sigset = Monomorphic (TermTypes (Just e) [] [])\n          sigmap' = Map.insert sigIndex sigset sigmap\n      -- update state with the signature, the signature index will be linked to by\n      -- all future terms of this type (even after they have been aliased)\n      MM.modify (\\s -> s {stateSignatures = GMap idmap sigmap'})\n      -- update the map between term names and signature indices\n      return $ lstate {linkTerms = Map.insert v sigIndex (linkTerms lstate)}\n    -- create new entries for class definitions and type signatures\n    findDefs (ExprI clsIndex (ClsE tcls@(Typeclass constraints cls vs sigs))) lstate = do\n      -- get sigmap\n      (GMap idmap sigmap) <- MM.gets stateSignatures\n      -- generate an index for each signature in this typeclass\n      sigsIdx <- mapM (\\sig -> (,) <$> MM.getCounter <*> pure sig) sigs\n      -- add these new typeclass methods to stateSignatures as polymorphic entries\n      let sigmap' = foldr (\\(i, Signature v _ t) m -> Map.insert i (Polymorphic cls v t []) m) sigmap sigsIdx\n\n      -- setup stateTypeclasses\n      let xs = [(v, Instance cls vs et []) | Signature v _ et <- sigs]\n      tmap <- MM.gets stateTypeclasses\n      tmap' <- foldlM (\\m (k, v) -> insertWithCheck k v m) tmap xs\n\n      -- update morloc state\n      MM.modify\n        ( \\s ->\n            s\n              { stateSignatures = GMap idmap sigmap'\n              , stateTypeclasses = tmap'\n              , stateClassDefs = Map.insert cls constraints (stateClassDefs s)\n              }\n        )\n\n      -- generate the (Map EVar Int) list for LinkedState\n      let vmap = Map.fromList [(v, i) | (i, Signature v _ _) <- sigsIdx]\n          classes = Map.insert cls (clsIndex, tcls, vmap) (linkClasses lstate)\n      return $ lstate {linkClasses = classes}\n    -- We only search for definitions at the top level. This may be the top-level\n    -- inside of a where statement.\n    findDefs (ExprI _ (ModE _ es)) lstate = foldrM findDefs lstate es\n    -- All other types return the map unchanged\n    findDefs _ lstate = return lstate\n\n    insertWithCheck :: EVar -> Instance -> Map EVar Instance -> MorlocMonad (Map EVar Instance)\n    insertWithCheck k v m = case Map.lookup k m of\n      (Just inst2) ->\n        throwInheritanceError m0 $\n          \"The typeclasses\"\n            <+> (squotes . pretty . className $ inst2)\n            <+> \"and\"\n            <+> (squotes . pretty . className $ v)\n            <+> \"have conflicting definitions of the term\"\n            <+> squotes (pretty k)\n      Nothing -> return $ Map.insert k v m\n\n    -- Handle assignments that do not have signatures\n    findFreeDefs (ExprI _ (AssE v _ _)) (terms, lstate)\n      | Set.member v terms || Map.member v (linkTerms lstate) = return (terms, lstate)\n      | otherwise = do\n          -- make new index to use for all definitions of this term\n          idx <- MM.getCounter\n          (GMap idmap sigmap) <- MM.gets stateSignatures\n          -- define a new monomorphic term with no implementation and no type\n          let sigset = Monomorphic (TermTypes Nothing [] [])\n              sigmap' = Map.insert idx sigset sigmap\n              terms' = Set.insert v terms\n          MM.modify (\\s -> s {stateSignatures = GMap idmap sigmap'})\n          let lstate' = lstate {linkTerms = Map.insert v idx (linkTerms lstate)}\n          return (terms', lstate')\n    findFreeDefs (ExprI _ (LetE bindings body)) s = do\n      s' <- foldrM (\\(_, e) s0' -> findFreeDefs e s0') s bindings\n      findFreeDefs body s'\n    findFreeDefs (ExprI _ (ModE _ es)) s = foldrM findFreeDefs s es\n    findFreeDefs (ExprI _ (IfE c t e)) s = findFreeDefs c s >>= findFreeDefs t >>= findFreeDefs e\n    findFreeDefs (ExprI _ (DoBlockE e)) s = findFreeDefs e s\n    findFreeDefs (ExprI _ (EvalE e)) s = findFreeDefs e s\n    findFreeDefs (ExprI _ (IntrinsicE _ es)) s = foldrM findFreeDefs s es\n    findFreeDefs _ s = return s\n\ntoCondensedState :: LinkState -> Map EVar (Int, Maybe (Typeclass Signature))\ntoCondensedState s = Map.union terms classes\n  where\n    terms = Map.map (\\i -> (i, Nothing)) (linkTerms s)\n    classes =\n      Map.fromList . concat $\n        [ [(v, (i, Just tcls)) | (v, i) <- Map.toList emap]\n        | (_, tcls, emap) <- Map.elems (linkClasses s)\n        ]\n\n-- link source, declaration, and instance to stateSignature index 2\n-- link terms to stateSignature index 1 and stateName\nlinkLocalTerms :: MVar -> LinkState -> ExprI -> MorlocMonad ()\nlinkLocalTerms m0 s0 e0 = linkLocal Set.empty s0 (toCondensedState s0) e0\n  where\n    -- link a new source statement to its type in morloc state\n    linkLocal ::\n      Set EVar -> LinkState -> Map EVar (Int, Maybe (Typeclass Signature)) -> ExprI -> MorlocMonad ()\n    linkLocal _ _ cs (ExprI i (SrcE src)) = do\n      case Map.lookup (srcAlias src) cs of\n        -- A source with no associated type signature may be a constructor.\n        -- If it is a term, then it must have a signature if it is to be used, but\n        -- its use will raise a dedicated error later. So we let it pass for now.\n        Nothing -> return ()\n        (Just (_, Just (Typeclass _ cls _ _))) ->\n          MM.throwSourcedError i $\n            \"Source term\"\n              <+> squotes (pretty (srcAlias src))\n              <+> \"conflicts with the same term in typeclass\"\n              <+> squotes (pretty cls)\n        (Just (termIdx, Nothing)) -> do\n          (GMap idmap sigmap) <- MM.gets stateSignatures\n          case Map.lookup termIdx sigmap of\n            Nothing ->\n              error\n                \"This should be unreachable since there is an associated signature and it should have been loaded\"\n            (Just (Monomorphic tt)) -> do\n              let srcTerm = (m0, Idx i src)\n                  tt' = tt {termConcrete = srcTerm : termConcrete tt}\n                  sigmap' = Map.insert termIdx (Monomorphic tt') sigmap\n                  idmap' = Map.insert i termIdx idmap\n              MM.modify (\\s -> s {stateSignatures = GMap idmap' sigmap'})\n            (Just (Polymorphic cls _ _ _)) ->\n              MM.throwSourcedError i $\n                \"Source term\"\n                  <+> squotes (pretty (srcAlias src))\n                  <+> \" overlaps a term in typeclass\"\n                  <+> squotes (pretty cls)\n\n    -- link a new declaration to its type in morloc state and recurse into its\n    -- local where block as needed\n    linkLocal bnds c cs (ExprI i (AssE v e es)) = do\n      updateName i v\n      case Map.lookup v cs of\n        Nothing -> error \"Bug: This case should be unreachable\"\n        (Just (_, Just _)) -> undefined -- handle error for src that overlaps typeclass term\n        (Just (termIdx, Nothing)) -> do\n          (GMap idmap sigmap) <- MM.gets stateSignatures\n          case Map.lookup termIdx sigmap of\n            Nothing ->\n              error\n                \"Bug: This should be unreachable since there is an associated signature and it should have been loaded\"\n            (Just (Monomorphic tt)) -> do\n              let tt' = tt {termDecl = e : termDecl tt}\n                  sigmap' = Map.insert termIdx (Monomorphic tt') sigmap\n                  idmap' = Map.insert i termIdx idmap\n              MM.modify (\\ms -> ms {stateSignatures = GMap idmap' sigmap'})\n              (bnds', c', _) <- case e of\n                (ExprI _ (LamE vs _)) ->\n                  return\n                    ( foldr Set.insert bnds vs\n                    , c {linkTerms = foldr Map.delete (linkTerms c) vs}\n                    , foldr Map.delete cs vs\n                    )\n                _ -> return (bnds, c, cs)\n              -- link expressions in the where statement within a local scope\n              c'' <- foldrM (addLocalState m0) c' (e : es)\n\n              mapM_ (linkLocal bnds' c'' (toCondensedState c'')) (e : es)\n            (Just (Polymorphic cls _ _ _)) ->\n              MM.throwSourcedError i $\n                \"Declared term\" <+> squotes (pretty v) <+> \" overlaps a term in typeclass\" <+> squotes (pretty cls)\n    linkLocal bnds c cs (ExprI i (IstE cls ts es)) = do\n      case Map.lookup cls (linkClasses c) of\n        Nothing ->\n          MM.throwSourcedError i $\n            \"There is no typeclass declaration for instance\"\n              <+> squotes (pretty cls)\n              <+> \"in the scope of module\"\n              <+> squotes (pretty m0)\n        (Just (_, Typeclass superConstraints _ vs sigs, emap)) ->\n          if length vs /= length ts\n            then\n              MM.throwSourcedError i $\n                \"In module\" <+> squotes (pretty m0)\n                  <> \": the instance and typeclass definitions for\"\n                    <+> squotes (pretty cls)\n                    <+> \"differ in number of terms\"\n            else do\n              checkSuperclassConstraints i cls (zip vs ts) superConstraints\n              if null es\n                then\n                  -- Source-less instance: register each method with its\n                  -- specialized general type but no implementations.\n                  -- This enables typechecking without requiring sources.\n                  linkEmptyInstance m0 cls (zip vs ts) sigs emap\n                else\n                  mapM_ (linkInstance (linkLocal bnds c cs) m0 cls (zip vs ts) sigs emap) es\n    linkLocal bnds _ cs (ExprI termIdx (VarE _ v))\n      | Set.member v bnds = return ()\n      | otherwise = case Map.lookup v cs of\n          -- handle both monomorphic terms and polymorphic typeclass terms\n          (Just (sigIdx, _)) -> updateSigLinks v termIdx sigIdx\n          Nothing -> MM.throwSourcedError termIdx $ \"Undefined term: \" <> pretty v\n    linkLocal _ _ cs (ExprI i (ExpE (ExportMany (Set.toList -> ss) gs))) =\n      let allSs = ss ++ concatMap (Set.toList . exportGroupMembers) gs\n       in mapM_ linkExp [(v, termIdx, Map.lookup v cs) | (termIdx, TermSymbol v) <- allSs]\n      where\n        linkExp :: (EVar, Int, Maybe (Int, a)) -> MorlocMonad ()\n        linkExp (v, termIdx, Just (sigIdx, _)) = updateSigLinks v termIdx sigIdx\n        -- TODO: give this a good error message - it is a user facing issue\n        -- Is raised when an exported term, such as a sourced function, is\n        -- exported with no signature in scope.\n        linkExp (v, _, Nothing) =\n          MM.throwSourcedError i $ \"Undefined export\" <+> squotes (pretty v)\n    linkLocal _ _ _ (ExprI _ (ExpE ExportAll)) = error \"Bug: ExportAll should no longer be present\"\n    -- Shadowing is allowed, so here all terms in `s` that are bound by the lambda\n    -- need to be removed\n    linkLocal bnds c cs (ExprI i (LamE vs e)) = do\n      (c', cs') <- foldlM shadow (c, cs) vs\n      let bnds' = Set.union bnds (Set.fromList vs)\n      linkLocal bnds' c' cs' e\n      where\n        shadow ::\n          (LinkState, Map EVar (Int, Maybe (Typeclass Signature))) ->\n          EVar ->\n          MorlocMonad (LinkState, Map EVar (Int, Maybe (Typeclass Signature)))\n        shadow (ls, cs') v = case Map.lookup v cs' of\n          Nothing -> return (ls, cs)\n          (Just (_, Nothing)) -> return (ls {linkTerms = Map.delete v (linkTerms ls)}, Map.delete v cs')\n          (Just (_, Just _)) ->\n            MM.throwSourcedError i $ \"Illegal shadowing of typeclass term:\" <+> pretty v\n\n    -- The `m` should always be the same as `m0`, since modules don't next.\n    -- Even if there are two modules defined in one file, they will still be\n    -- unnested, same as if they were in different files.\n    linkLocal bnds c cs (ExprI i (ModE m es))\n      | m /= m0 = MM.throwSourcedError i $ \"Nested modules are not currently supported\"\n      | otherwise = mapM_ (linkLocal bnds c cs) es\n    -- let-bound variables are local, like lambda-bound\n    linkLocal bnds c cs (ExprI _ (LetE bindings body)) = do\n      bnds' <-\n        foldlM\n          ( \\b (v, e) -> do\n              linkLocal b c cs e\n              return (Set.insert v b)\n          )\n          bnds\n          bindings\n      linkLocal bnds' c cs body\n    -- simple recursive cases\n    linkLocal bnds c cs (ExprI _ (LstE es)) = mapM_ (linkLocal bnds c cs) es\n    linkLocal bnds c cs (ExprI _ (TupE es)) = mapM_ (linkLocal bnds c cs) es\n    linkLocal bnds c cs (ExprI _ (NamE (map snd -> es))) = mapM_ (linkLocal bnds c cs) es\n    linkLocal bnds c cs (ExprI _ (AppE e es)) = mapM_ (linkLocal bnds c cs) (e : es)\n    linkLocal bnds c cs (ExprI _ (AnnE e _)) = linkLocal bnds c cs e\n    linkLocal bnds c cs (ExprI _ (IfE cond thenE elseE)) = mapM_ (linkLocal bnds c cs) [cond, thenE, elseE]\n    linkLocal bnds c cs (ExprI _ (DoBlockE e)) = linkLocal bnds c cs e\n    linkLocal bnds c cs (ExprI _ (EvalE e)) = linkLocal bnds c cs e\n    linkLocal bnds c cs (ExprI _ (IntrinsicE _ es)) = mapM_ (linkLocal bnds c cs) es\n    -- terminal cases\n    linkLocal _ _ _ _ = return ()\n\n    updateSigLinks :: EVar -> Int -> Int -> MorlocMonad ()\n    updateSigLinks v termIdx sigIdx = do\n      (GMap idmap sigmap) <- MM.gets stateSignatures\n      let idmap' = Map.insert termIdx sigIdx idmap\n      MM.modify\n        ( \\ms ->\n            ms\n              { stateSignatures = GMap idmap' sigmap\n              , stateName = Map.insert termIdx v (stateName ms)\n              }\n        )\n\n    updateName :: Int -> EVar -> MorlocMonad ()\n    updateName i v = MM.modify (\\s -> s {stateName = Map.insert i v (stateName s)})\n\n-- Register a source-less instance: for each method in the typeclass, create a\n-- TermTypes with the specialized general type but no concrete implementations.\n-- This allows typechecking to succeed even without language-specific sources.\nlinkEmptyInstance ::\n  MVar ->\n  ClassName ->\n  [(TVar, TypeU)] ->\n  [Signature] ->\n  Map EVar Int ->\n  MorlocMonad ()\nlinkEmptyInstance _ cls0 params0 sigs emap = mapM_ go sigs\n  where\n    go (Signature v _ et) =\n      case Map.lookup v emap of\n        Nothing -> return ()\n        Just stateIdx -> do\n          t <- substituteInstanceTypes params0 (etype et)\n          let et' = et {etype = t}\n              tt = TermTypes (Just et') [] []\n          (GMap idmap sigmap) <- MM.gets stateSignatures\n          tcls <- MM.gets stateTypeclasses\n          case Map.lookup stateIdx sigmap of\n            Nothing -> return ()\n            (Just sigset) -> do\n              -- Use the general class type (et) for the Polymorphic wrapper,\n              -- matching linkInstance. The specialized type (et') is only\n              -- inside the TermTypes.\n              sigset' <- mergeSignatureSet sigset (Polymorphic cls0 v et [tt])\n              let sigmap' = Map.insert stateIdx sigset' sigmap\n              newInstance <- case Map.lookup v tcls of\n                Nothing ->\n                  return $ Instance cls0 (map fst params0) et [tt]\n                (Just inst) ->\n                  return $ inst {instanceTerms = weaveTermTypes tt (instanceTerms inst)}\n              let tcls' = Map.insert v newInstance tcls\n              MM.modify (\\ms -> ms {stateSignatures = GMap idmap sigmap', stateTypeclasses = tcls'})\n\n-- Goal:\n--   for each term in [Signature], add a TermTypes instance to the correct\n--   SignatureSet in stateSignatures. This will be a polymorphic case.\n--     Polymorphic ClassName EVar EType [TermTypes]\nlinkInstance ::\n  (ExprI -> MorlocMonad ()) ->\n  MVar ->\n  ClassName ->\n  [(TVar, TypeU)] ->\n  [Signature] ->\n  Map EVar Int ->\n  ExprI ->\n  MorlocMonad ()\nlinkInstance linker m0 cls0 params0 sigs0 emap0 e0 = linkExpr e0\n  where\n    linkExpr (ExprI i (SrcE src)) = do\n      let v = srcAlias src\n      (Signature _ _ et, stateIdx) <- lookupInfo v\n      t <- substituteInstanceTypes params0 (etype et)\n      let et' = et {etype = t}\n      let tt = TermTypes (Just et') [(m0, Idx i src)] []\n      linkTermTypes v et tt stateIdx\n    linkExpr (ExprI _ (AssE v e es)) = do\n      mapM_ linker (e : es)\n      (Signature _ _ et, stateIdx) <- lookupInfo v\n      t <- substituteInstanceTypes params0 (etype et)\n      let et' = et {etype = t}\n      let tt = TermTypes (Just et') [] [e]\n      linkTermTypes v et tt stateIdx\n    linkExpr _ =\n      error\n        \"Unreachable, instances may only contain sources and instances -- this should have been caught in the parser.\"\n\n    lookupInfo :: EVar -> MorlocMonad (Signature, Int)\n    lookupInfo v = case ([sig | sig@(Signature v' _ _) <- sigs0, v == v'], Map.lookup v emap0) of\n      ([sig], Just i) -> return (sig, i)\n      _ ->\n        throwInheritanceError m0 $\n          \"\\n  Instance of class\"\n            <+> squotes (pretty cls0)\n            <+> \"contains undefined term\"\n            <+> squotes (pretty v)\n\n    linkTermTypes :: EVar -> EType -> TermTypes -> Int -> MorlocMonad ()\n    linkTermTypes v et tt stateIdx = do\n      (GMap idmap sigmap) <- MM.gets stateSignatures\n      tcls <- MM.gets stateTypeclasses\n      case Map.lookup stateIdx sigmap of\n        Nothing -> undefined -- should be unreachable\n        (Just sigset) -> do\n          sigset' <- mergeSignatureSet sigset (Polymorphic cls0 v et [tt])\n          let sigmap' = Map.insert stateIdx sigset' sigmap\n          newInstance <- case Map.lookup v tcls of\n            Nothing -> do\n              return $ Instance cls0 (map fst params0) et [tt]\n            (Just inst) -> do\n              return $ inst {instanceTerms = weaveTermTypes tt (instanceTerms inst)}\n          let tcls' = Map.insert v newInstance tcls\n          MM.modify (\\ms -> ms {stateSignatures = GMap idmap sigmap', stateTypeclasses = tcls'})\n\n{- Substitute the instance types into the class function definition\n\nSuppose we have the following class and instances:\n\nclass Reversible a b where\n  forward :: a -> b\n  backward :: b -> a\n\ninstance Reversible ([a],[b]) [(a,b)] where\n  ...\n\nIf we are handling the single instance above for the `forward` function:\n\n  classVars: [a, b]\n  classType: forall a b . a -> b\n  instanceParameters: forall a b . ([a], [b])\n                      forall a b . [(a, b)]\n\nand the return type should be\n\n  forall a b . ([a],[b]) -> [(a,b)]\n\nA problem here is that the instance parameters *share* qualifiers. The `a` and `b`\nin the first instance parameter are the same as those in the second. But not the\nsame as the `a` and `b` in the class.\n\n-}\nsubstituteInstanceTypes :: [(TVar, TypeU)] -> TypeU -> MorlocMonad TypeU\nsubstituteInstanceTypes (unzip -> (clsVars, instanceParameters)) clsType = do\n  -- find all qualifiers in the instance parameter list\n  let instanceQualifiers = unique $ concatMap (fst . unqualify) instanceParameters\n\n      -- rewrite the class type such that the class qualifiers appear first and\n      -- do not conflict with parameter qualifiers\n      cleanClassType = replaceQualifiers instanceQualifiers (putClassVarsFirst clsType)\n\n      -- substitute in the parameter types\n      finalType =\n        qualify instanceQualifiers $\n          substituteQualifiers cleanClassType (map (snd . unqualify) instanceParameters)\n\n  MM.sayVVV $\n    \"substituteInstanceTypes\"\n      <> \"\\n  classVars:\" <+> pretty clsVars\n      <> \"\\n  classType:\" <+> pretty clsType\n      <> \"\\n  instanceParameters:\" <+> pretty instanceParameters\n      <> \"\\n  -------------------\"\n      <> \"\\n  instanceQualifiers:\" <+> pretty instanceQualifiers\n      <> \"\\n  cleanClassType:\" <+> pretty cleanClassType\n      <> \"\\n  finalType:\" <+> pretty finalType\n\n  return finalType\n  where\n    putClassVarsFirst :: TypeU -> TypeU\n    putClassVarsFirst t =\n      let (vs, t') = unqualify t\n       in qualify (clsVars <> filter (`notElem` clsVars) vs) t'\n\n    replaceQualifiers :: [TVar] -> TypeU -> TypeU\n    replaceQualifiers vs0 t0 = f vs0 [r | r <- freshVariables, r `notElem` doNotUse] t0\n      where\n        -- qualifiers to avoid when replacing\n        doNotUse = vs0 <> (fst . unqualify) t0\n\n        f (v : vs) (r : rs) (ForallU v' t)\n          | v == v' = ForallU r . f vs rs $ substituteTVar v' (VarU r) t\n          | otherwise = ForallU v' (f (v : vs) (r : rs) t)\n        f vs rs (EffectU effs t) = EffectU effs (f vs rs t)\n        f vs rs (OptionalU t) = OptionalU (f vs rs t)\n        f _ _ t = t\n\n        freshVariables = [1 ..] >>= flip replicateM ['a' .. 'z'] |>> TV . DT.pack\n\n    substituteQualifiers :: TypeU -> [TypeU] -> TypeU\n    substituteQualifiers (ForallU v t) (r : rs) = substituteQualifiers (substituteTVar v r t) rs\n    substituteQualifiers t _ = t\n\n-- Check that all superclass constraints are satisfied for an instance declaration.\n-- For example, if `class Eq a => Ord a`, then `instance Ord Int` requires `instance Eq Int`.\ncheckSuperclassConstraints :: Int -> ClassName -> [(TVar, TypeU)] -> [Constraint] -> MorlocMonad ()\ncheckSuperclassConstraints _ _ _ [] = return ()\ncheckSuperclassConstraints i cls params constraints = do\n  classDefs <- MM.gets stateClassDefs\n  mapM_ (checkOne classDefs) constraints\n  where\n    checkOne :: Map ClassName [Constraint] -> Constraint -> MorlocMonad ()\n    checkOne classDefs (Constraint superCls superTypeArgs) = do\n      let substArgs = applyParams superTypeArgs params\n      case Map.lookup superCls classDefs of\n        Nothing ->\n          MM.throwSourcedError i $\n            \"Superclass\" <+> squotes (pretty superCls) <+> \"is not defined\"\n        Just _ -> do\n          tcls <- MM.gets stateTypeclasses\n          let methodsOfSuper = [inst | (_, inst) <- Map.toList tcls, className inst == superCls]\n              hasMatchingInstance = any (matchesSuperInstance substArgs) methodsOfSuper\n          if not hasMatchingInstance\n            then\n              MM.throwSourcedError i $\n                \"Instance\"\n                  <+> pretty cls\n                  <+> hsep (map pretty (map snd params))\n                  <+> \"requires\"\n                  <+> pretty superCls\n                  <+> hsep (map pretty substArgs)\n                  <+> \"but no such instance exists\"\n            else return ()\n\n    applyParams :: [TypeU] -> [(TVar, TypeU)] -> [TypeU]\n    applyParams ts [] = ts\n    applyParams ts ((v, r) : ps) = applyParams (map (substituteTVar v r) ts) ps\n\n    matchesSuperInstance :: [TypeU] -> Instance -> Bool\n    matchesSuperInstance _ inst = not (null (instanceTerms inst))\n"
  },
  {
    "path": "library/Morloc/Frontend/Merge.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Frontend.Merge\nDescription : Merge and unify type signatures, term types, and typeclasses\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nMorloc allows multiple implementations of the same term. When definitions\nfrom different modules are combined, their type signatures, source\nimplementations, and typeclass instances must be merged while detecting\nconflicts. This module provides the merge operations used by 'Link' and\n'Restructure' to unify these definitions.\n-}\nmodule Morloc.Frontend.Merge\n  ( mergeTermTypes\n  , weaveTermTypes\n  , mergeEType\n  , mergeTypeUs\n  , mergeTypeclasses\n  , mergeIndexedInstances\n  , unionTermTypes\n  , mergeSignatureSet\n  , mergeFirstIndexM\n  ) where\n\n-- TODO: tighten all this up, formalize these operations and follow the\n-- conventions below. Make a typeclass for these mergeable types.\n--\n-- union :: [a] -> [a] -> Either MorlocError [a]\n-- weave :: a -> [a] -> Either MorlocError [a]\n-- merge :: a -> a -> Either MorlocError a\n\nimport Morloc.Data.Doc\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\n\n-- | Merge two 'Indexed' values, keeping the index from the first.\nmergeFirstIndexM :: (Monad m) => (a -> a -> m a) -> Indexed a -> Indexed a -> m (Indexed a)\nmergeFirstIndexM f (Idx i x) (Idx _ y) = Idx i <$> f x y\n\n-- | Merge two 'TermTypes', combining general types, sources, and declarations.\nmergeTermTypes :: TermTypes -> TermTypes -> MorlocMonad TermTypes\nmergeTermTypes (TermTypes g1 cs1 es1) (TermTypes g2 cs2 es2) =\n  TermTypes\n    <$> maybeCombine mergeEType g1 g2\n    <*> pure (unique (cs1 <> cs2))\n    <*> pure (unique (es1 <> es2))\n  where\n    -- either combine terms or take the first on that is defined, or whatever\n    maybeCombine :: (Monad m) => (a -> a -> m a) -> Maybe a -> Maybe a -> m (Maybe a)\n    maybeCombine f (Just a) (Just b) = Just <$> f a b\n    maybeCombine _ (Just a) _ = return $ Just a\n    maybeCombine _ _ (Just b) = return $ Just b\n    maybeCombine _ _ _ = return Nothing\n\n{- | Insert a 'TermTypes' into a list, merging with an existing entry\nif they share an equivalent general type.\n-}\nweaveTermTypes :: TermTypes -> [TermTypes] -> [TermTypes]\nweaveTermTypes t1@(TermTypes (Just gt1) srcs1 es1) (t2@(TermTypes (Just gt2) srcs2 es2) : ts)\n  | equivalent (etype gt1) (etype gt2) =\n      TermTypes (Just gt1) (unique (srcs1 <> srcs2)) (es1 <> es2) : ts\n  | otherwise = t2 : weaveTermTypes t1 ts\nweaveTermTypes (TermTypes Nothing srcs1 es1) ((TermTypes e2 srcs2 es2) : ts2) =\n  weaveTermTypes (TermTypes e2 (srcs1 <> srcs2) (es1 <> es2)) ts2\nweaveTermTypes TermTypes {} (TermTypes {} : _) = error \"what the why?\"\nweaveTermTypes t1 [] = [t1]\n\n{- | This function defines how general types are merged. There are decisions\nencoded in this function that should be vary carefully considered.\n * Can properties simply be concatenated?\n * What if constraints are contradictory?\n * Should general type merging even be possible?\n-}\nmergeEType :: EType -> EType -> MorlocMonad EType\nmergeEType (EType t1 cs1 edoc1 labels1) (EType t2 cs2 edoc2 _labels2) =\n  EType <$> mergeTypeUs t1 t2 <*> pure (cs1 <> cs2) <*> pure edocs12 <*> pure labels1\n  where\n    edocs12 = mergeEdocs edoc1 edoc2\n\n    -- TODO: is there a real use case where we would want to merge docstrings?\n    mergeEdocs x _ = x\n\n-- merge two general types\nmergeTypeUs :: TypeU -> TypeU -> MorlocMonad TypeU\nmergeTypeUs t1 t2\n  | equivalent t1 t2 = return t1\n  | otherwise =\n      MM.throwSystemError $\n        \"Incompatible general types:\" <+> parens (pretty t1) <+> \"vs\" <+> parens (pretty t2)\n\nthrowConflictingInstancesError :: MDoc -> Instance -> Instance -> MorlocMonad a\nthrowConflictingInstancesError msg inst1 inst2\n  | inst1 == inst2 =\n      MM.throwSystemError $\n        \"Found conflict between overlapping instances for class\"\n          <+> squotes (pretty (className inst1))\n          <> \":\" <+> msg\n  | otherwise =\n      MM.throwSystemError $\n        \"Found conflict between overlapping instances for classes\"\n          <+> squotes (pretty (className inst1))\n          <+> \"and\"\n          <+> squotes (pretty (className inst2))\n          <> \":\" <+> msg\n\n{- | Merge two typeclass instances, checking for conflicting class names,\ntypes, and parameter counts.\n-}\nmergeTypeclasses :: Instance -> Instance -> MorlocMonad Instance\nmergeTypeclasses inst1@(Instance cls1 vs1 t1 ts1) inst2@(Instance cls2 vs2 t2 ts2)\n  | cls1 /= cls2 = throwConflictingInstancesError \"Mismatched class names\" inst1 inst2\n  | not (equivalent (etype t1) (etype t2)) =\n      throwConflictingInstancesError \"Conflicting typeclass term general type\" inst1 inst2\n  | length vs1 /= length vs2 =\n      throwConflictingInstancesError \"Conflicting typeclass parameter count\" inst1 inst2\n  -- here I should do reciprocal subtyping\n  | otherwise = return $ Instance cls1 vs1 t1 (unionTermTypes ts1 ts2)\n\n-- | Merge two indexed typeclass instances, keeping the left index.\nmergeIndexedInstances ::\n  Indexed Instance ->\n  Indexed Instance ->\n  MorlocMonad (Indexed Instance)\nmergeIndexedInstances = mergeFirstIndexM mergeTypeclasses\n\nthrowSignatureUnificationError :: SignatureSet -> SignatureSet -> MorlocMonad a\nthrowSignatureUnificationError s1 s2 =\n  MM.throwSystemError $\n    \"Cannot unify signatures for the polymorphic signature sets below:\"\n      <> \"\\n  s1:\" <+> pretty s1\n      <> \"\\n  s2:\" <+> pretty s2\n\n-- | Merge two 'SignatureSet' values (both must be the same variant).\nmergeSignatureSet :: SignatureSet -> SignatureSet -> MorlocMonad SignatureSet\nmergeSignatureSet s1@(Polymorphic cls1 v1 t1 ts1) s2@(Polymorphic cls2 v2 t2 ts2)\n  | cls1 == cls2 && equivalent (etype t1) (etype t2) && v1 == v2 =\n      return $ Polymorphic cls1 v1 t1 (unionTermTypes ts1 ts2)\n  | otherwise = throwSignatureUnificationError s1 s2\nmergeSignatureSet (Monomorphic ts1) (Monomorphic ts2) = Monomorphic <$> mergeTermTypes ts1 ts2\nmergeSignatureSet s1 s2 = throwSignatureUnificationError s1 s2\n\n-- | Union two lists of 'TermTypes' by weaving each element from the first list.\nunionTermTypes :: [TermTypes] -> [TermTypes] -> [TermTypes]\nunionTermTypes ts1 ts2 = foldr weaveTermTypes ts2 ts1\n"
  },
  {
    "path": "library/Morloc/Frontend/Namespace.hs",
    "content": "{- |\nModule      : Morloc.Frontend.Namespace\nDescription : Re-exports of core namespace types plus frontend-specific helpers\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nAggregates all core namespace modules ('Prim', 'Type', 'Expr', 'State') into\na single import for frontend code. Also provides expression tree traversals\n('mapExpr', 'mapExprM') and state-index copying ('copyState').\n-}\nmodule Morloc.Frontend.Namespace\n  ( module Morloc.Namespace.Prim\n  , module Morloc.Namespace.Type\n  , module Morloc.Namespace.Expr\n  , module Morloc.Namespace.State\n  , mapExpr\n  , mapExprM\n  , isGeneric\n  , copyState\n  ) where\n\nimport qualified Data.Char as DC\nimport Data.Text (Text)\nimport qualified Data.Text as DT\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport Morloc.Namespace.Type\n\n-- | Determine if a type term is generic (i.e., is the first letter lowercase?)\nisGeneric :: Text -> Bool\nisGeneric typeStr = maybe False (DC.isLower . fst) (DT.uncons typeStr)\n\n-- | Bottom-up map over the 'Expr' layer of an 'ExprI' tree.\nmapExpr :: (Expr -> Expr) -> ExprI -> ExprI\nmapExpr f = g\n  where\n    g (ExprI i (ModE v xs)) = ExprI i . f $ ModE v (map g xs)\n    g (ExprI i (AssE v e es)) = ExprI i . f $ AssE v (g e) (map g es)\n    g (ExprI i (LstE es)) = ExprI i . f $ LstE (map g es)\n    g (ExprI i (TupE es)) = ExprI i . f $ TupE (map g es)\n    g (ExprI i (AppE e es)) = ExprI i . f $ AppE (g e) (map g es)\n    g (ExprI i (NamE rs)) = ExprI i . f $ NamE [(k, g e) | (k, e) <- rs]\n    g (ExprI i (LamE vs e)) = ExprI i . f $ LamE vs (g e)\n    g (ExprI i (AnnE e ts)) = ExprI i . f $ AnnE (g e) ts\n    g (ExprI i e) = ExprI i (f e)\n\n-- | Monadic bottom-up map over the 'Expr' layer of an 'ExprI' tree.\nmapExprM :: (Monad m) => (Expr -> m Expr) -> ExprI -> m ExprI\nmapExprM f = g\n  where\n    g (ExprI i (ModE v xs)) = ExprI i <$> (mapM g xs >>= f . ModE v)\n    g (ExprI i (AssE v e es)) = ExprI i <$> ((AssE v <$> g e <*> mapM g es) >>= f)\n    g (ExprI i (LstE es)) = ExprI i <$> (mapM g es >>= f . LstE)\n    g (ExprI i (TupE es)) = ExprI i <$> (mapM g es >>= f . TupE)\n    g (ExprI i (AppE e es)) = ExprI i <$> ((AppE <$> g e <*> mapM g es) >>= f)\n    g (ExprI i (NamE rs)) = do\n      es' <- mapM (g . snd) rs\n      ExprI i <$> f (NamE (zip (map fst rs) es'))\n    g (ExprI i (LamE vs e)) = ExprI i <$> (g e >>= f . LamE vs)\n    g (ExprI i (AnnE e ts)) = ExprI i <$> ((AnnE <$> g e <*> pure ts) >>= f)\n    g (ExprI i e) = ExprI i <$> f e\n\n{- | Copy all index-keyed state entries from @oldIndex@ to @newIndex@.\nUsed when a module is re-indexed (e.g., after merging duplicate imports).\n-}\ncopyState :: Int -> Int -> MorlocMonad ()\ncopyState oldIndex newIndex = do\n  s <- MM.get\n\n  -- Could be defined more succinctly, but it is IMPERATIVE that every index\n  -- is copied. Listing all fields will ensure that an error is raised if a\n  -- new MorlocState field is added but included in this function.\n  MM.put $\n    MorlocState\n      { statePackageMeta = statePackageMeta s\n      , stateVerbosity = stateVerbosity s\n      , stateCounter = stateCounter s\n      , stateDepth = stateDepth s\n      , stateSignatures = updateGMap (stateSignatures s)\n      , stateTypeclasses = stateTypeclasses s\n      , stateConcreteTypedefs = updateGMap (stateConcreteTypedefs s)\n      , stateGeneralTypedefs = updateGMap (stateGeneralTypedefs s)\n      , stateUniversalGeneralTypedefs = stateUniversalGeneralTypedefs s\n      , stateUniversalConcreteTypedefs = stateUniversalConcreteTypedefs s\n      , stateSources = updateGMap (stateSources s)\n      , stateAnnotations = updateMap (stateAnnotations s)\n      , stateOutfile = stateOutfile s\n      , stateExports = updateList (stateExports s)\n      , stateName = updateMap (stateName s)\n      , stateTermDocs = stateTermDocs s\n      , stateManifoldConfig = updateMap (stateManifoldConfig s)\n      , stateSourceMap = updateMap (stateSourceMap s)\n      , stateSourceText = stateSourceText s\n      , stateBuildConfig = stateBuildConfig s\n      , stateModuleName = stateModuleName s\n      , stateInstall = stateInstall s\n      , stateInstallForce = stateInstallForce s\n      , stateInstallDir = stateInstallDir s\n      , stateClassDefs = stateClassDefs s\n      , stateLangRegistry = stateLangRegistry s\n      , stateExportGroups = stateExportGroups s\n      , stateManifoldLang = stateManifoldLang s\n      , stateManifoldEffects = stateManifoldEffects s\n      , stateProjectRoot = stateProjectRoot s\n      , stateEvalMode = stateEvalMode s\n      , stateModuleDoc = stateModuleDoc s\n      , stateModuleEpilogues = stateModuleEpilogues s\n      }\n  where\n    updateGMap g = case GMap.yIsX oldIndex newIndex g of\n      (Just g') -> g'\n      Nothing -> g\n\n    updateMap m = case Map.lookup oldIndex m of\n      (Just x) -> Map.insert newIndex x m\n      Nothing -> m\n\n    updateList xs = if oldIndex `elem` xs then newIndex : xs else xs\n"
  },
  {
    "path": "library/Morloc/Frontend/Parser.hs",
    "content": "{-# OPTIONS_GHC -w #-}\n{-# LANGUAGE CPP #-}\n{-# LANGUAGE MagicHash #-}\n{-# LANGUAGE BangPatterns #-}\n{-# LANGUAGE TypeSynonymInstances #-}\n{-# LANGUAGE FlexibleInstances #-}\n{-# LANGUAGE PatternGuards #-}\n{-# LANGUAGE NoStrictData #-}\n{-# LANGUAGE UnboxedTuples #-}\n{-# LANGUAGE PartialTypeSignatures #-}\n{-# LANGUAGE OverloadedStrings #-}\n\nmodule Morloc.Frontend.Parser\n  ( readProgram\n  , readType\n  , PState (..)\n  , emptyPState\n  ) where\n\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Data.Map.Strict as Map\nimport qualified Data.Set as Set\nimport qualified Data.Scientific as DS\nimport Data.List (sortBy, foldl')\nimport qualified Control.Monad.State.Strict as State\nimport Morloc.Frontend.Token\nimport Morloc.Frontend.Lexer (lexMorloc, showLexError)\nimport Morloc.Frontend.CST\nimport Morloc.Frontend.Desugar (DState(..), D, ParseError(..), showParseError, desugarProgram, desugarExpr)\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.Type\nimport Morloc.Namespace.Expr\nimport qualified Morloc.BaseTypes as BT\nimport qualified Control.Monad as Happy_Prelude\nimport qualified Data.Bool as Happy_Prelude\nimport qualified Data.Function as Happy_Prelude\nimport qualified Data.Int as Happy_Prelude\nimport qualified Data.List as Happy_Prelude\nimport qualified Data.Maybe as Happy_Prelude\nimport qualified Data.String as Happy_Prelude\nimport qualified Data.Tuple as Happy_Prelude\nimport qualified GHC.Err as Happy_Prelude\nimport qualified GHC.Num as Happy_Prelude\nimport qualified Text.Show as Happy_Prelude\nimport qualified Data.Array as Happy_Data_Array\nimport qualified Data.Bits as Bits\nimport qualified GHC.Exts as Happy_GHC_Exts\nimport Control.Applicative(Applicative(..))\nimport Control.Monad (ap)\n\n-- parser produced by Happy Version 2.1.7\n\ndata HappyAbsSyn \n        = HappyTerminal (Located)\n        | HappyErrorToken Happy_Prelude.Int\n        | HappyAbsSyn7 (([Loc CstExpr], Bool))\n        | HappyAbsSyn8 (TypeU)\n        | HappyAbsSyn9 (Loc CstExpr)\n        | HappyAbsSyn10 ([Loc CstExpr])\n        | HappyAbsSyn17 ([(Loc CstExpr, Loc CstExpr)])\n        | HappyAbsSyn18 ((Loc CstExpr, Loc CstExpr))\n        | HappyAbsSyn19 (Text)\n        | HappyAbsSyn20 ([Text])\n        | HappyAbsSyn22 (CstExport)\n        | HappyAbsSyn23 ([Located])\n        | HappyAbsSyn24 (Located)\n        | HappyAbsSyn28 (Maybe [AliasedSymbol])\n        | HappyAbsSyn29 ([AliasedSymbol])\n        | HappyAbsSyn30 (AliasedSymbol)\n        | HappyAbsSyn32 ((Located, NamType))\n        | HappyAbsSyn33 ((Text, Bool))\n        | HappyAbsSyn34 ([(Key, TypeU)])\n        | HappyAbsSyn36 ((TVar, [Either (TVar, Kind) TypeU]))\n        | HappyAbsSyn37 ([Either (TVar, Kind) TypeU])\n        | HappyAbsSyn38 ((Key, TypeU))\n        | HappyAbsSyn39 ((Located, Key, TypeU))\n        | HappyAbsSyn40 ([(Located, Key, TypeU)])\n        | HappyAbsSyn42 ((TypeU, Bool))\n        | HappyAbsSyn43 ([TypeU])\n        | HappyAbsSyn51 (CstClassHead)\n        | HappyAbsSyn52 ([Constraint])\n        | HappyAbsSyn53 ([CstSigItem])\n        | HappyAbsSyn54 (CstSigItem)\n        | HappyAbsSyn56 ([(ClassName, [TypeU])])\n        | HappyAbsSyn57 ([[Loc CstExpr]])\n        | HappyAbsSyn60 ([EVar])\n        | HappyAbsSyn61 (EVar)\n        | HappyAbsSyn63 (Maybe Text)\n        | HappyAbsSyn64 ([(Text, Maybe Text)])\n        | HappyAbsSyn65 ((Text, Maybe Text))\n        | HappyAbsSyn67 ([(Bool, Text, Located)])\n        | HappyAbsSyn68 ((Bool, Text, Located))\n        | HappyAbsSyn69 ((Text, Located))\n        | HappyAbsSyn73 ([(EVar, Loc CstExpr)])\n        | HappyAbsSyn75 ((EVar, Loc CstExpr))\n        | HappyAbsSyn88 ([(Key, Loc CstExpr)])\n        | HappyAbsSyn89 ((Key, Loc CstExpr))\n        | HappyAbsSyn92 ([CstDoStmt])\n        | HappyAbsSyn96 (CstAccessorBody)\n        | HappyAbsSyn97 (CstAccessorTail)\n        | HappyAbsSyn98 ([CstAccessorBody])\n        | HappyAbsSyn106 (([Loc CstExpr], [Text]))\n        | HappyAbsSyn116 ([EffectLabel])\n        | HappyAbsSyn117 (CstSigType)\n        | HappyAbsSyn118 ([(Pos, TypeU)])\n        | HappyAbsSyn119 ((Pos, TypeU))\n        | HappyAbsSyn124 (Constraint)\n\n{-# NOINLINE happyTokenStrings #-}\nhappyTokenStrings = [\"VLBRACE\",\"VRBRACE\",\"VSEMI\",\"'('\",\"')'\",\"'['\",\"']'\",\"'{'\",\"'}'\",\"'<'\",\"'>'\",\"','\",\"'\\\\\\\\'\",\"'_'\",\"'!'\",\"'?'\",\"'.'\",\"GDOT\",\"NSDOT\",\"LABELCOLON\",\"GDOTCHAIN\",\"'='\",\"'::'\",\"'->'\",\"'=>'\",\"'<-'\",\"'*'\",\"'-'\",\"':'\",\"'module'\",\"'import'\",\"'source'\",\"'from'\",\"'where'\",\"'as'\",\"'True'\",\"'False'\",\"'type'\",\"'record'\",\"'object'\",\"'table'\",\"'class'\",\"'instance'\",\"'infixl'\",\"'infixr'\",\"'infix'\",\"'let'\",\"'in'\",\"'do'\",\"'Null'\",\"LOWER\",\"UPPER\",\"'+'\",\"'/'\",\"OPERATOR\",\"INTEGER\",\"FLOAT\",\"STRING\",\"STRSTART\",\"STRMID\",\"STREND\",\"INTERPOPEN\",\"INTERPCLOSE\",\"INTRINSIC\",\"';'\",\"'%inline'\",\"EOF\",\"%eof\"]\n\nhappyActOffsets :: HappyAddr\nhappyActOffsets = HappyA# \"\\x48\\x01\\x00\\x00\\xf3\\x06\\x00\\x00\\x69\\x05\\x00\\x00\\xee\\xff\\xff\\xff\\x09\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xfe\\xff\\xff\\xff\\xd3\\xff\\xff\\xff\\x18\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x97\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x15\\x05\\x00\\x00\\x50\\x05\\x00\\x00\\x8c\\x00\\x00\\x00\\x9b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb0\\x06\\x00\\x00\\x69\\x05\\x00\\x00\\x4d\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\xc7\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd3\\x00\\x00\\x00\\xa8\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x3b\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb1\\x00\\x00\\x00\\xf3\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x01\\x00\\x00\\xeb\\x00\\x00\\x00\\xb2\\x00\\x00\\x00\\x6a\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\xcc\\x04\\x00\\x00\\xf3\\x06\\x00\\x00\\x2a\\x01\\x00\\x00\\x6a\\x07\\x00\\x00\\x4b\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x01\\x00\\x00\\x7d\\x01\\x00\\x00\\xb9\\x04\\x00\\x00\\xcb\\x04\\x00\\x00\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb6\\x01\\x00\\x00\\xd6\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\xb8\\x00\\x00\\x00\\x42\\x01\\x00\\x00\\x88\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x72\\x07\\x00\\x00\\xb0\\x01\\x00\\x00\\xb9\\x01\\x00\\x00\\xd3\\x01\\x00\\x00\\x10\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xe7\\x01\\x00\\x00\\x6c\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf3\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\xf1\\x01\\x00\\x00\\x47\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x4f\\x02\\x00\\x00\\x61\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x88\\x07\\x00\\x00\\x88\\x07\\x00\\x00\\x88\\x07\\x00\\x00\\x88\\x07\\x00\\x00\\xf3\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\xbf\\x02\\x00\\x00\\x69\\x05\\x00\\x00\\x56\\x02\\x00\\x00\\x5c\\x02\\x00\\x00\\xa2\\x05\\x00\\x00\\xa2\\x05\\x00\\x00\\x8b\\x00\\x00\\x00\\x8b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x32\\x02\\x00\\x00\\x0a\\x03\\x00\\x00\\x0a\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x93\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xdc\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7e\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x9a\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x64\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xca\\x00\\x00\\x00\\x7d\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc8\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x06\\x00\\x00\\xbb\\x05\\x00\\x00\\xbb\\x05\\x00\\x00\\xbb\\x05\\x00\\x00\\xf3\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xbb\\x05\\x00\\x00\\xb2\\x02\\x00\\x00\\x89\\x04\\x00\\x00\\xb8\\x02\\x00\\x00\\x72\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xda\\x02\\x00\\x00\\xe8\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xda\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc6\\x02\\x00\\x00\\xce\\x02\\x00\\x00\\xce\\x02\\x00\\x00\\xce\\x02\\x00\\x00\\xce\\x02\\x00\\x00\\x72\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xbb\\x05\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xd3\\x02\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xc3\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x03\\x00\\x00\\xac\\x01\\x00\\x00\\x25\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xcf\\x01\\x00\\x00\\xa7\\x01\\x00\\x00\\x27\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xde\\x02\\x00\\x00\\xe7\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf8\\x00\\x00\\x00\\xf8\\x00\\x00\\x00\\x88\\x07\\x00\\x00\\x88\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\xf3\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf3\\x06\\x00\\x00\\xff\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe9\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\xce\\x07\\x00\\x00\\xce\\x07\\x00\\x00\\xce\\x07\\x00\\x00\\x5b\\x01\\x00\\x00\\x88\\x07\\x00\\x00\\x1d\\x03\\x00\\x00\\x08\\x05\\x00\\x00\\xd5\\x06\\x00\\x00\\x05\\x03\\x00\\x00\\x2b\\x03\\x00\\x00\\x33\\x03\\x00\\x00\\x3c\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xad\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x30\\x03\\x00\\x00\\x63\\x03\\x00\\x00\\x6b\\x03\\x00\\x00\\x79\\x03\\x00\\x00\\x23\\x01\\x00\\x00\\xfb\\x06\\x00\\x00\\x1d\\x00\\x00\\x00\\x50\\x03\\x00\\x00\\x71\\x03\\x00\\x00\\x76\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\xe9\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb2\\x01\\x00\\x00\\x96\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x95\\x03\\x00\\x00\\x90\\x03\\x00\\x00\\xf2\\x00\\x00\\x00\\x2f\\x01\\x00\\x00\\x88\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\xe8\\x06\\x00\\x00\\x06\\x07\\x00\\x00\\xfe\\x02\\x00\\x00\\x90\\x07\\x00\\x00\\x9e\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x53\\x01\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb9\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x90\\x00\\x00\\x00\\x80\\x03\\x00\\x00\\x9c\\x03\\x00\\x00\\xd1\\x00\\x00\\x00\\x9b\\x03\\x00\\x00\\x27\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xef\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\xc8\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\xa6\\x03\\x00\\x00\\xf2\\x03\\x00\\x00\\xd6\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe2\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd6\\x03\\x00\\x00\\xd6\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf9\\x01\\x00\\x00\\xbb\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf4\\x05\\x00\\x00\\x0d\\x06\\x00\\x00\\x8b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x46\\x06\\x00\\x00\\x08\\x03\\x00\\x00\\x8b\\x00\\x00\\x00\\x67\\x01\\x00\\x00\\x5f\\x06\\x00\\x00\\x5f\\x06\\x00\\x00\\x17\\x03\\x00\\x00\\x8b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9c\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x02\\x00\\x00\\xf4\\x03\\x00\\x00\\xe2\\x03\\x00\\x00\\xe2\\x03\\x00\\x00\\xe2\\x03\\x00\\x00\\xe2\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x04\\x00\\x00\\x0f\\x04\\x00\\x00\\x15\\x04\\x00\\x00\\x98\\x00\\x00\\x00\\x0b\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0b\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5f\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5f\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5f\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x4c\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe6\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x06\\x07\\x00\\x00\\xce\\x07\\x00\\x00\\x1b\\x04\\x00\\x00\\x28\\x04\\x00\\x00\\x37\\x04\\x00\\x00\\x28\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x24\\x04\\x00\\x00\\x0a\\x04\\x00\\x00\\xbd\\x00\\x00\\x00\\x2e\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x33\\x04\\x00\\x00\\x0e\\x07\\x00\\x00\\x2c\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x4a\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x48\\x04\\x00\\x00\\x16\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xee\\x07\\x00\\x00\\x30\\x04\\x00\\x00\\x31\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x1e\\x04\\x00\\x00\\x34\\x04\\x00\\x00\\x5f\\x06\\x00\\x00\\x2c\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x03\\x00\\x00\\x52\\x04\\x00\\x00\\x4e\\x04\\x00\\x00\\x2b\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x39\\x07\\x00\\x00\\x39\\x07\\x00\\x00\\x2b\\x04\\x00\\x00\\x57\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xce\\x00\\x00\\x00\\x4d\\x04\\x00\\x00\\x4f\\x04\\x00\\x00\\xb2\\x01\\x00\\x00\\xb2\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x38\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x59\\x03\\x00\\x00\\x59\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x54\\x01\\x00\\x00\\x54\\x01\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x49\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x3e\\x02\\x00\\x00\\x6e\\x04\\x00\\x00\\x3f\\x04\\x00\\x00\\x45\\x04\\x00\\x00\\x75\\x04\\x00\\x00\\x76\\x04\\x00\\x00\\x77\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x9d\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x44\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x5a\\x04\\x00\\x00\\x0b\\x08\\x00\\x00\\x64\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x83\\x04\\x00\\x00\\xdc\\x01\\x00\\x00\\x4c\\x07\\x00\\x00\\x4c\\x07\\x00\\x00\\x73\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x68\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x78\\x04\\x00\\x00\\x9b\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x70\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x74\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4d\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x57\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x9b\\x07\\x00\\x00\\x5f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7e\\x04\\x00\\x00\\x0a\\x01\\x00\\x00\\x56\\x01\\x00\\x00\\x9b\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\xf0\\x06\\x00\\x00\\x5f\\x07\\x00\\x00\\x4f\\x03\\x00\\x00\\xaa\\x07\\x00\\x00\\x7a\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x6b\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\xee\\x00\\x00\\x00\\x9b\\x04\\x00\\x00\\x9c\\x04\\x00\\x00\\xa3\\x04\\x00\\x00\\x14\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x72\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x10\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\xd9\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x86\\x04\\x00\\x00\\x87\\x04\\x00\\x00\\x8b\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe6\\x00\\x00\\x00\\xe6\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4c\\x02\\x00\\x00\\x5f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x7c\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\xc9\\x02\\x00\\x00\\x99\\x04\\x00\\x00\\xae\\x04\\x00\\x00\\xae\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x8e\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x8f\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x8f\\x04\\x00\\x00\\x90\\x04\\x00\\x00\\x92\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\xc1\\x04\\x00\\x00\\xc2\\x04\\x00\\x00\\xc3\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xce\\x04\\x00\\x00\\x9b\\x07\\x00\\x00\\x5f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x8e\\x03\\x00\\x00\\xb3\\x04\\x00\\x00\\xd0\\x04\\x00\\x00\\x52\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xaa\\x07\\x00\\x00\\xaa\\x07\\x00\\x00\\xaa\\x07\\x00\\x00\\xaa\\x07\\x00\\x00\\x5f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x56\\x01\\x00\\x00\\x56\\x01\\x00\\x00\\x9b\\x07\\x00\\x00\\x9b\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x5f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa3\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe6\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe6\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x61\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"#\n\nhappyGotoOffsets :: HappyAddr\nhappyGotoOffsets = HappyA# \"\\x21\\x02\\x00\\x00\\x4a\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x99\\x03\\x00\\x00\\xd2\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x82\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\xd1\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x65\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\xec\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x0d\\x01\\x00\\x00\\x56\\x03\\x00\\x00\\x61\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x1e\\x08\\x00\\x00\\xa3\\x01\\x00\\x00\\x91\\x04\\x00\\x00\\x96\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x8d\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7f\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\xcf\\x04\\x00\\x00\\x80\\x05\\x00\\x00\\x72\\x04\\x00\\x00\\x93\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd4\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x82\\x04\\x00\\x00\\x85\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x8e\\x01\\x00\\x00\\xe6\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf6\\xff\\xff\\xff\\xd3\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd5\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf7\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x55\\x03\\x00\\x00\\x57\\x03\\x00\\x00\\x4a\\x01\\x00\\x00\\x71\\x01\\x00\\x00\\xd2\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xca\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xbe\\x00\\x00\\x00\\xe5\\x00\\x00\\x00\\x65\\x01\\x00\\x00\\x73\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x6d\\x03\\x00\\x00\\xa9\\x04\\x00\\x00\\xac\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x06\\x08\\x00\\x00\\xee\\x01\\x00\\x00\\x15\\x02\\x00\\x00\\x39\\x02\\x00\\x00\\x24\\x06\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x60\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x83\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x95\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xfa\\x04\\x00\\x00\\x11\\x05\\x00\\x00\\x1b\\x05\\x00\\x00\\x1c\\x05\\x00\\x00\\xaa\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x34\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x84\\x02\\x00\\x00\\xab\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xdb\\x04\\x00\\x00\\xcf\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xf6\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\xd6\\x04\\x00\\x00\\x1a\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd7\\x04\\x00\\x00\\xe5\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc4\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc4\\x04\\x00\\x00\\xc4\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x1d\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4f\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xdb\\xff\\xff\\xff\\xe4\\xff\\xff\\xff\\xf0\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xaf\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\xc4\\x04\\x00\\x00\\x51\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x21\\x05\\x00\\x00\\x0a\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2b\\x05\\x00\\x00\\x6f\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd3\\x03\\x00\\x00\\xc0\\x07\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x26\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x05\\x00\\x00\\x2d\\x05\\x00\\x00\\x34\\x05\\x00\\x00\\x39\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xea\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x35\\x08\\x00\\x00\\x3c\\x08\\x00\\x00\\xed\\x04\\x00\\x00\\xe7\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x51\\x05\\x00\\x00\\x41\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x48\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xcf\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x90\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x05\\x00\\x00\\x47\\x05\\x00\\x00\\x53\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc6\\x01\\x00\\x00\\x7e\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x05\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xfd\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x65\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x58\\x01\\x00\\x00\\x8c\\x03\\x00\\x00\\xfb\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x7f\\x01\\x00\\x00\\x33\\x05\\x00\\x00\\x37\\x05\\x00\\x00\\x72\\x05\\x00\\x00\\xb0\\x03\\x00\\x00\\xd7\\x03\\x00\\x00\\x3d\\x05\\x00\\x00\\x41\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x25\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe1\\x03\\x00\\x00\\x82\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x83\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xfb\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x46\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x43\\x08\\x00\\x00\\xd1\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\xfa\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\xd7\\xff\\xff\\xff\\x23\\x05\\x00\\x00\\x2e\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x2a\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7c\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x4a\\x08\\x00\\x00\\x51\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x69\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x89\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x30\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x05\\x00\\x00\\x6d\\x04\\x00\\x00\\x87\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x03\\x00\\x00\\xa0\\x03\\x00\\x00\\x5e\\x02\\x00\\x00\\x7f\\x02\\x00\\x00\\x4a\\x05\\x00\\x00\\x9c\\x05\\x00\\x00\\xeb\\x03\\x00\\x00\\x86\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x8e\\x05\\x00\\x00\\x92\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xbc\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x43\\x05\\x00\\x00\\x43\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x25\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x83\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x36\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9a\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9d\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xb1\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x05\\x00\\x00\\x6d\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x73\\x02\\x00\\x00\\xaa\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4d\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x8a\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x84\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x4e\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\xd9\\xff\\xff\\xff\\x59\\x05\\x00\\x00\\x5f\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5c\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x66\\x08\\x00\\x00\\x6d\\x08\\x00\\x00\\x60\\x05\\x00\\x00\\xa5\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xac\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9e\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xdf\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x61\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\xa1\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf8\\xff\\xff\\xff\\xfb\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x74\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\xba\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc7\\x05\\x00\\x00\\xd1\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7b\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\xd3\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf3\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x87\\x05\\x00\\x00\\x92\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xff\\x03\\x00\\x00\\x09\\x04\\x00\\x00\\xdc\\x02\\x00\\x00\\xe9\\x02\\x00\\x00\\x82\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x90\\x05\\x00\\x00\\x90\\x05\\x00\\x00\\x00\\x00\\x00\\x00\\x2d\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd7\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf9\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"#\n\nhappyDefActions :: HappyAddr\nhappyDefActions = HappyA# \"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf8\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe1\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x44\\xff\\xff\\xff\\x46\\xff\\xff\\xff\\x45\\xff\\xff\\xff\\x43\\xff\\xff\\xff\\x34\\xff\\xff\\xff\\x30\\xff\\xff\\xff\\x2d\\xff\\xff\\xff\\x2a\\xff\\xff\\xff\\x1d\\xff\\xff\\xff\\x1c\\xff\\xff\\xff\\x27\\xff\\xff\\xff\\x21\\xff\\xff\\xff\\x22\\xff\\xff\\xff\\x1e\\xff\\xff\\xff\\x26\\xff\\xff\\xff\\x20\\xff\\xff\\xff\\x1f\\xff\\xff\\xff\\x24\\xff\\xff\\xff\\x23\\xff\\xff\\xff\\x25\\xff\\xff\\xff\\xed\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf3\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf2\\xfe\\xff\\xff\\xf1\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1b\\xff\\xff\\xff\\xf4\\xfe\\xff\\xff\\xf0\\xfe\\xff\\xff\\xef\\xfe\\xff\\xff\\xee\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x1a\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe9\\xfe\\xff\\xff\\xe8\\xfe\\xff\\xff\\xe4\\xfe\\xff\\xff\\xe1\\xfe\\xff\\xff\\xde\\xfe\\xff\\xff\\xdc\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd4\\xfe\\xff\\xff\\xd6\\xfe\\xff\\xff\\xd2\\xfe\\xff\\xff\\xd3\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xee\\xff\\xff\\xff\\xe5\\xff\\xff\\xff\\xec\\xff\\xff\\xff\\xeb\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xea\\xff\\xff\\xff\\xe9\\xff\\xff\\xff\\xe8\\xff\\xff\\xff\\xe7\\xff\\xff\\xff\\x9f\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xf1\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xae\\xff\\xff\\xff\\xad\\xff\\xff\\xff\\xac\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xab\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf0\\xff\\xff\\xff\\xf3\\xff\\xff\\xff\\xfb\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xd7\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xcd\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xdb\\xfe\\xff\\xff\\xdd\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xfa\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2f\\xff\\xff\\xff\\x2e\\xff\\xff\\xff\\x01\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xfd\\xfe\\xff\\xff\\xfd\\xfe\\xff\\xff\\x02\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x2b\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x9d\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x10\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x13\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x0d\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x19\\xff\\xff\\xff\\xae\\xfe\\xff\\xff\\xad\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xaf\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xb0\\xfe\\xff\\xff\\xac\\xfe\\xff\\xff\\xb1\\xfe\\xff\\xff\\x29\\xff\\xff\\xff\\x2c\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf9\\xff\\xff\\xff\\xe0\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xde\\xff\\xff\\xff\\xdd\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xd8\\xff\\xff\\xff\\xf7\\xff\\xff\\xff\\xfc\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xd5\\xff\\xff\\xff\\xd4\\xff\\xff\\xff\\xd2\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xd6\\xff\\xff\\xff\\xd1\\xff\\xff\\xff\\xcd\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\xff\\xff\\xff\\x42\\xff\\xff\\xff\\x32\\xff\\xff\\xff\\x31\\xff\\xff\\xff\\x33\\xff\\xff\\xff\\x28\\xff\\xff\\xff\\x17\\xff\\xff\\xff\\x16\\xff\\xff\\xff\\x18\\xff\\xff\\xff\\x15\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x0c\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x11\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9c\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xff\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xfa\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3a\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3c\\xff\\xff\\xff\\x03\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x07\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xf4\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x09\\xff\\xff\\xff\\xf5\\xfe\\xff\\xff\\xf6\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xec\\xfe\\xff\\xff\\xe7\\xfe\\xff\\xff\\xe3\\xfe\\xff\\xff\\xe2\\xfe\\xff\\xff\\xdf\\xfe\\xff\\xff\\xe0\\xfe\\xff\\xff\\xda\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xd8\\xfe\\xff\\xff\\xe6\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd5\\xfe\\xff\\xff\\xf4\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xe6\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x73\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x7d\\xff\\xff\\xff\\x7a\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa2\\xff\\xff\\xff\\x5d\\xff\\xff\\xff\\xa5\\xff\\xff\\xff\\xa6\\xff\\xff\\xff\\xc7\\xff\\xff\\xff\\xdd\\xff\\xff\\xff\\xc6\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa2\\xff\\xff\\xff\\xf2\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xed\\xff\\xff\\xff\\xa4\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa2\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe4\\xff\\xff\\xff\\xca\\xfe\\xff\\xff\\xc8\\xfe\\xff\\xff\\xc5\\xfe\\xff\\xff\\xc2\\xfe\\xff\\xff\\xbf\\xfe\\xff\\xff\\xbd\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb5\\xfe\\xff\\xff\\xb7\\xfe\\xff\\xff\\xb3\\xfe\\xff\\xff\\xb4\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x9e\\xfe\\xff\\xff\\xa9\\xfe\\xff\\xff\\xa8\\xfe\\xff\\xff\\xaa\\xfe\\xff\\xff\\xc6\\xff\\xff\\xff\\xcc\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb5\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa2\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x79\\xff\\xff\\xff\\xd6\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xcf\\xfe\\xff\\xff\\x72\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x6b\\xff\\xff\\xff\\x68\\xff\\xff\\xff\\x63\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x62\\xff\\xff\\xff\\x61\\xff\\xff\\xff\\x60\\xff\\xff\\xff\\x6a\\xff\\xff\\xff\\x69\\xff\\xff\\xff\\xef\\xff\\xff\\xff\\xcc\\xfe\\xff\\xff\\xe5\\xfe\\xff\\xff\\xd1\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xeb\\xfe\\xff\\xff\\x0b\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0a\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf7\\xfe\\xff\\xff\\xf8\\xfe\\xff\\xff\\xfe\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xfc\\xfe\\xff\\xff\\xfb\\xfe\\xff\\xff\\xdf\\xff\\xff\\xff\\x35\\xff\\xff\\xff\\x0f\\xff\\xff\\xff\\x0e\\xff\\xff\\xff\\x12\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xd9\\xff\\xff\\xff\\xda\\xff\\xff\\xff\\xdb\\xff\\xff\\xff\\xdc\\xff\\xff\\xff\\xd7\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf5\\xff\\xff\\xff\\xd3\\xff\\xff\\xff\\xcf\\xff\\xff\\xff\\xce\\xff\\xff\\xff\\xd0\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x14\\xff\\xff\\xff\\xf9\\xfe\\xff\\xff\\x39\\xff\\xff\\xff\\x3d\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x37\\xff\\xff\\xff\\x38\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x3b\\xff\\xff\\xff\\x3f\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x06\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x05\\xff\\xff\\xff\\x08\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xd9\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xce\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x7c\\xff\\xff\\xff\\xb2\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa2\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa1\\xff\\xff\\xff\\x5c\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc8\\xff\\xff\\xff\\xc6\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xc4\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xc2\\xff\\xff\\xff\\xba\\xff\\xff\\xff\\xca\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xa7\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb8\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xbc\\xfe\\xff\\xff\\xbe\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa8\\xff\\xff\\xff\\xa9\\xff\\xff\\xff\\xaa\\xff\\xff\\xff\\xab\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa3\\xff\\xff\\xff\\xb1\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x9c\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xcb\\xfe\\xff\\xff\\xc9\\xfe\\xff\\xff\\xc4\\xfe\\xff\\xff\\xc3\\xfe\\xff\\xff\\xc0\\xfe\\xff\\xff\\xc1\\xfe\\xff\\xff\\xbb\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xb9\\xfe\\xff\\xff\\xc7\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xb6\\xfe\\xff\\xff\\xa7\\xfe\\xff\\xff\\xe3\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xc6\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xc5\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xcb\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5b\\xff\\xff\\xff\\x55\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x59\\xff\\xff\\xff\\xb6\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xd4\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb3\\xff\\xff\\xff\\x78\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x77\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x71\\xff\\xff\\xff\\x6c\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x70\\xff\\xff\\xff\\x6e\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x65\\xff\\xff\\xff\\x64\\xff\\xff\\xff\\x66\\xff\\xff\\xff\\x67\\xff\\xff\\xff\\xd0\\xfe\\xff\\xff\\xea\\xfe\\xff\\xff\\x04\\xff\\xff\\xff\\x40\\xff\\xff\\xff\\x36\\xff\\xff\\xff\\x3e\\xff\\xff\\xff\\xf6\\xff\\xff\\xff\\x6d\\xff\\xff\\xff\\x74\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7e\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x7b\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xb7\\xff\\xff\\xff\\x97\\xff\\xff\\xff\\x93\\xff\\xff\\xff\\x90\\xff\\xff\\xff\\x8d\\xff\\xff\\xff\\x8a\\xff\\xff\\xff\\x88\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80\\xff\\xff\\xff\\x82\\xff\\xff\\xff\\x7f\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xb8\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x9f\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5f\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4e\\xff\\xff\\xff\\x4b\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x4a\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xc3\\xff\\xff\\xff\\xbd\\xff\\xff\\xff\\xbb\\xff\\xff\\xff\\xbf\\xff\\xff\\xff\\xc1\\xff\\xff\\xff\\xb9\\xff\\xff\\xff\\xc9\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xe2\\xff\\xff\\xff\\xc6\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb2\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x9a\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa8\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xb0\\xff\\xff\\xff\\xaf\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\xa7\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x9b\\xff\\xff\\xff\\x9d\\xff\\xff\\xff\\xba\\xfe\\xff\\xff\\xa0\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xa2\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xa4\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x4c\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5e\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x5a\\xff\\xff\\xff\\x54\\xff\\xff\\xff\\x53\\xff\\xff\\xff\\x52\\xff\\xff\\xff\\x50\\xff\\xff\\xff\\x4f\\xff\\xff\\xff\\x51\\xff\\xff\\xff\\x56\\xff\\xff\\xff\\x58\\xff\\xff\\xff\\x57\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x98\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x83\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x87\\xff\\xff\\xff\\x89\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb4\\xff\\xff\\xff\\x76\\xff\\xff\\xff\\x75\\xff\\xff\\xff\\x6f\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\x8f\\xff\\xff\\xff\\x8e\\xff\\xff\\xff\\x8b\\xff\\xff\\xff\\x8c\\xff\\xff\\xff\\x86\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x84\\xff\\xff\\xff\\x92\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x81\\xff\\xff\\xff\\x95\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\x4d\\xff\\xff\\xff\\x48\\xff\\xff\\xff\\x47\\xff\\xff\\xff\\x49\\xff\\xff\\xff\\xbe\\xff\\xff\\xff\\xbc\\xff\\xff\\xff\\xc0\\xff\\xff\\xff\\xa6\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\xa5\\xfe\\xff\\xff\\x00\\x00\\x00\\x00\\x99\\xff\\xff\\xff\\x9e\\xff\\xff\\xff\\xa1\\xfe\\xff\\xff\\xa3\\xfe\\xff\\xff\\x91\\xff\\xff\\xff\\x00\\x00\\x00\\x00\\x85\\xff\\xff\\xff\"#\n\nhappyCheck :: HappyAddr\nhappyCheck = HappyA# \"\\xff\\xff\\xff\\xff\\x09\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x79\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x7b\\x00\\x00\\x00\\x7b\\x00\\x00\\x00\\x7a\\x00\\x00\\x00\\x7b\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x7b\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x75\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x56\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x3d\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x3d\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x51\\x00\\x00\\x00\\x52\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x5b\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x3d\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x7d\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x63\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x7c\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x52\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x75\\x00\\x00\\x00\\x78\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x78\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x6a\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x13\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x16\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x09\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x13\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x16\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x1d\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x41\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x13\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x16\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x1d\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x09\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x13\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x16\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x1d\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x41\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x13\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x16\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x09\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x13\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x16\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x41\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x41\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x07\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x11\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x05\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x3b\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x12\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x34\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x12\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x6e\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x4a\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x50\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x50\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x58\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x50\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x53\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x58\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x6b\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x6b\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x6b\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x6e\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x70\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\"#\n\nhappyTable :: HappyAddr\nhappyTable = HappyA# \"\\x00\\x00\\x00\\x00\\x4a\\x02\\x00\\x00\\x4a\\x02\\x00\\x00\\xa7\\x00\\x00\\x00\\x4a\\x02\\x00\\x00\\xf3\\x01\\x00\\x00\\xf4\\x01\\x00\\x00\\xff\\x01\\x00\\x00\\x72\\x02\\x00\\x00\\x63\\x00\\x00\\x00\\x4a\\x02\\x00\\x00\\x64\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x10\\x01\\x00\\x00\\xf7\\x01\\x00\\x00\\x49\\x01\\x00\\x00\\x42\\x01\\x00\\x00\\x49\\x00\\x00\\x00\\x4a\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x2c\\x02\\x00\\x00\\x4c\\x00\\x00\\x00\\xf7\\x01\\x00\\x00\\xff\\xff\\xff\\xff\\x48\\x01\\x00\\x00\\x42\\x01\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\xa0\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x5d\\x01\\x00\\x00\\x4a\\x01\\x00\\x00\\x4b\\x00\\x00\\x00\\xf9\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x41\\x01\\x00\\x00\\x42\\x01\\x00\\x00\\xe9\\x01\\x00\\x00\\x07\\x00\\x00\\x00\\x56\\x01\\x00\\x00\\x8c\\x02\\x00\\x00\\x4c\\x00\\x00\\x00\\x9e\\x01\\x00\\x00\\x55\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x9e\\x01\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\xa8\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x17\\x01\\x00\\x00\\x0d\\x01\\x00\\x00\\x4f\\x00\\x00\\x00\\x51\\x00\\x00\\x00\\xf8\\x01\\x00\\x00\\xf9\\x01\\x00\\x00\\x52\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x0d\\x01\\x00\\x00\\xfa\\x01\\x00\\x00\\x9f\\x01\\x00\\x00\\x51\\x00\\x00\\x00\\x18\\x01\\x00\\x00\\x74\\x02\\x00\\x00\\x52\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x2d\\x02\\x00\\x00\\xfa\\x01\\x00\\x00\\x11\\x01\\x00\\x00\\x43\\x01\\x00\\x00\\x50\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x51\\x00\\x00\\x00\\xaa\\x00\\x00\\x00\\xf5\\x01\\x00\\x00\\x52\\x00\\x00\\x00\\xf5\\x01\\x00\\x00\\x43\\x01\\x00\\x00\\x82\\x00\\x00\\x00\\x2e\\x02\\x00\\x00\\x82\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x5e\\x01\\x00\\x00\\x11\\x01\\x00\\x00\\x4b\\x00\\x00\\x00\\x43\\x01\\x00\\x00\\xa0\\x01\\x00\\x00\\x62\\x00\\x00\\x00\\xea\\x01\\x00\\x00\\xa0\\x01\\x00\\x00\\xfa\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x57\\x01\\x00\\x00\\x8d\\x02\\x00\\x00\\x4c\\x00\\x00\\x00\\x0e\\x01\\x00\\x00\\x0f\\x01\\x00\\x00\\x43\\x01\\x00\\x00\\xeb\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x30\\x01\\x00\\x00\\xfc\\x01\\x00\\x00\\x9c\\x01\\x00\\x00\\xbf\\x00\\x00\\x00\\xec\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x4d\\x02\\x00\\x00\\x53\\x00\\x00\\x00\\x4e\\x02\\x00\\x00\\x90\\x02\\x00\\x00\\x4b\\x02\\x00\\x00\\x4c\\x02\\x00\\x00\\xaf\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x8f\\x02\\x00\\x00\\x53\\x00\\x00\\x00\\x38\\x01\\x00\\x00\\xa1\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x83\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x83\\x00\\x00\\x00\\x51\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x84\\x00\\x00\\x00\\x52\\x00\\x00\\x00\\x84\\x00\\x00\\x00\\x31\\x01\\x00\\x00\\x8f\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xfd\\x00\\x00\\x00\\xb0\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xa8\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xd6\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\xaf\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x7f\\x00\\x00\\x00\\x80\\x00\\x00\\x00\\xa8\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xb1\\x00\\x00\\x00\\xb2\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xfe\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\x30\\x01\\x00\\x00\\xd7\\x00\\x00\\x00\\x8c\\x00\\x00\\x00\\x90\\x00\\x00\\x00\\x9e\\x01\\x00\\x00\\xf0\\x01\\x00\\x00\\xa9\\x01\\x00\\x00\\xaa\\x01\\x00\\x00\\x39\\x01\\x00\\x00\\x53\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xb3\\x00\\x00\\x00\\x06\\x01\\x00\\x00\\xb1\\x00\\x00\\x00\\xb2\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x89\\x00\\x00\\x00\\xc2\\x00\\x00\\x00\\xa9\\x01\\x00\\x00\\xaa\\x01\\x00\\x00\\x9e\\x01\\x00\\x00\\xc7\\x01\\x00\\x00\\x7d\\x00\\x00\\x00\\xa2\\x01\\x00\\x00\\xc3\\x00\\x00\\x00\\x78\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x7e\\x00\\x00\\x00\\xac\\x01\\x00\\x00\\x2c\\x02\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x32\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\xa8\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xa0\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\xe9\\x01\\x00\\x00\\xa3\\x01\\x00\\x00\\xc9\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xd9\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x7f\\x00\\x00\\x00\\x80\\x00\\x00\\x00\\xa0\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x2d\\x02\\x00\\x00\\x16\\x00\\x00\\x00\\xb9\\x01\\x00\\x00\\xca\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\xde\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\xdf\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xe9\\x01\\x00\\x00\\x62\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x61\\x02\\x00\\x00\\x62\\x02\\x00\\x00\\xd9\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x6f\\x02\\x00\\x00\\xba\\x01\\x00\\x00\\x27\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\xa3\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x29\\x01\\x00\\x00\\xda\\x00\\x00\\x00\\xdb\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x70\\x02\\x00\\x00\\x75\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x5b\\x02\\x00\\x00\\x5c\\x02\\x00\\x00\\x48\\x00\\x00\\x00\\xb7\\x01\\x00\\x00\\x8c\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x79\\x00\\x00\\x00\\x7a\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x2a\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x8d\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x6b\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\xb8\\x01\\x00\\x00\\x07\\x00\\x00\\x00\\x40\\x01\\x00\\x00\\x68\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\xae\\x01\\x00\\x00\\x6d\\x02\\x00\\x00\\x8c\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x01\\x01\\x00\\x00\\x02\\x01\\x00\\x00\\x27\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x41\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x69\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x83\\x01\\x00\\x00\\xff\\xff\\xff\\xff\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x6e\\x02\\x00\\x00\\xa3\\x00\\x00\\x00\\xa4\\x00\\x00\\x00\\xa5\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xd9\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x02\\x01\\x00\\x00\\xa4\\x00\\x00\\x00\\x03\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x04\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xd7\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\xd8\\x00\\x00\\x00\\x7b\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x89\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x7c\\x00\\x00\\x00\\xe7\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x79\\x00\\x00\\x00\\x7a\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\xd9\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x54\\x01\\x00\\x00\\x5b\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x61\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x0b\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x62\\x01\\x00\\x00\\x55\\x01\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x86\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x7e\\x00\\x00\\x00\\xe6\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x85\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xf9\\x00\\x00\\x00\\xbf\\x01\\x00\\x00\\xc0\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xc1\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xf7\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x20\\x02\\x00\\x00\\x98\\x01\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x99\\x01\\x00\\x00\\xef\\x00\\x00\\x00\\xf0\\x00\\x00\\x00\\x8c\\x01\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x8d\\x01\\x00\\x00\\x7c\\x01\\x00\\x00\\x58\\x00\\x00\\x00\\xe2\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xf6\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\xc5\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x3d\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x3e\\x01\\x00\\x00\\xe2\\x01\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\xe3\\x01\\x00\\x00\\x04\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xbc\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x3d\\x01\\x00\\x00\\xd3\\x01\\x00\\x00\\x96\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xd4\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x87\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\xd8\\x00\\x00\\x00\\x36\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xd2\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x37\\x02\\x00\\x00\\xd3\\x00\\x00\\x00\\xf5\\x00\\x00\\x00\\x27\\x02\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x28\\x02\\x00\\x00\\x4a\\x02\\x00\\x00\\xee\\x00\\x00\\x00\\xbb\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xed\\x00\\x00\\x00\\x7b\\x02\\x00\\x00\\x8d\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x7c\\x02\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x3d\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\xf6\\x01\\x00\\x00\\x94\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xc4\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x8d\\x01\\x00\\x00\\xf2\\x00\\x00\\x00\\xf3\\x00\\x00\\x00\\xc5\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xba\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x2d\\x01\\x00\\x00\\xa4\\x00\\x00\\x00\\xa5\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xc1\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xc7\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\xe2\\x00\\x00\\x00\\xc8\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xe1\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\xaa\\x00\\x00\\x00\\xab\\x00\\x00\\x00\\xac\\x00\\x00\\x00\\xad\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xa3\\x01\\x00\\x00\\xa4\\x00\\x00\\x00\\xa5\\x00\\x00\\x00\\xb8\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xe5\\x01\\x00\\x00\\xe6\\x01\\x00\\x00\\xe7\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xcb\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xd2\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\xc6\\x00\\x00\\x00\\xd3\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xb8\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x6a\\x01\\x00\\x00\\xab\\x00\\x00\\x00\\xac\\x00\\x00\\x00\\xad\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x68\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xcf\\x01\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xc0\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x46\\x02\\x00\\x00\\x16\\x00\\x00\\x00\\xb3\\x00\\x00\\x00\\x47\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x3d\\x02\\x00\\x00\\x19\\x00\\x00\\x00\\x3d\\x01\\x00\\x00\\x3e\\x02\\x00\\x00\\x96\\x01\\x00\\x00\\x75\\x01\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xce\\x01\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x67\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x28\\x02\\x00\\x00\\x29\\x02\\x00\\x00\\x2a\\x02\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x74\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x70\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\xe4\\x00\\x00\\x00\\xe5\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xa8\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x77\\x02\\x00\\x00\\x14\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x8c\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x65\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x76\\x02\\x00\\x00\\x14\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x5c\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x51\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\xcd\\x00\\x00\\x00\\xce\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x50\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x58\\x01\\x00\\x00\\x59\\x01\\x00\\x00\\x52\\x01\\x00\\x00\\x53\\x01\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xb2\\x01\\x00\\x00\\x6c\\x00\\x00\\x00\\x4c\\x01\\x00\\x00\\x64\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x86\\x01\\x00\\x00\\x38\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x3d\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x37\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x80\\x01\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x36\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x03\\x02\\x00\\x00\\x59\\x01\\x00\\x00\\xd7\\x01\\x00\\x00\\xf0\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x62\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x3c\\x02\\x00\\x00\\x3d\\x02\\x00\\x00\\x34\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xa8\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x2d\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x0c\\x02\\x00\\x00\\x0d\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x2c\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x09\\x02\\x00\\x00\\x0a\\x02\\x00\\x00\\x57\\x02\\x00\\x00\\x58\\x02\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x86\\x01\\x00\\x00\\x2b\\x01\\x00\\x00\\xac\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x68\\x02\\x00\\x00\\x6c\\x00\\x00\\x00\\x16\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x15\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x14\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x8a\\x02\\x00\\x00\\x8b\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xbd\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x7f\\x02\\x00\\x00\\xf0\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x8a\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x89\\x00\\x00\\x00\\x8a\\x00\\x00\\x00\\xbb\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xbc\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xa5\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\xd3\\x00\\x00\\x00\\xd4\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xaf\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\xe9\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xe8\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xcf\\x00\\x00\\x00\\xd0\\x00\\x00\\x00\\x95\\x01\\x00\\x00\\x88\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x59\\x01\\x00\\x00\\x09\\x00\\x00\\x00\\xa8\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xa1\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x93\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\x27\\x01\\x00\\x00\\x09\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x8e\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\xa5\\x01\\x00\\x00\\xa6\\x01\\x00\\x00\\x95\\x01\\x00\\x00\\x3c\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x81\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x76\\x01\\x00\\x00\\xad\\x00\\x00\\x00\\x92\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x7b\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xb3\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\xd1\\x01\\x00\\x00\\x3c\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x7a\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\xd0\\x01\\x00\\x00\\x3c\\x00\\x00\\x00\\xc9\\x01\\x00\\x00\\xca\\x01\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x0d\\x02\\x00\\x00\\x3c\\x00\\x00\\x00\\x79\\x01\\x00\\x00\\x80\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x58\\x02\\x00\\x00\\xe7\\x01\\x00\\x00\\x78\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xff\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x02\\x02\\x00\\x00\\x16\\x00\\x00\\x00\\x79\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xfe\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\x82\\x02\\x00\\x00\\x2a\\x02\\x00\\x00\\x78\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x05\\x02\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xfd\\x01\\x00\\x00\\xf3\\x01\\x00\\x00\\xf2\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xef\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\xee\\x01\\x00\\x00\\x16\\x00\\x00\\x00\\xe5\\x01\\x00\\x00\\x30\\x01\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\xdc\\x01\\x00\\x00\\x19\\x00\\x00\\x00\\xde\\x01\\x00\\x00\\xdd\\x01\\x00\\x00\\xd5\\x01\\x00\\x00\\xdb\\x01\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xd6\\x01\\x00\\x00\\xcc\\x01\\x00\\x00\\xc9\\x01\\x00\\x00\\x04\\x02\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\xc6\\x01\\x00\\x00\\xdb\\x01\\x00\\x00\\xc5\\x01\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x40\\x02\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x3b\\x02\\x00\\x00\\x16\\x00\\x00\\x00\\x30\\x01\\x00\\x00\\x34\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x33\\x02\\x00\\x00\\x19\\x00\\x00\\x00\\x32\\x02\\x00\\x00\\x31\\x02\\x00\\x00\\x30\\x02\\x00\\x00\\x26\\x02\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x03\\x02\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x22\\x02\\x00\\x00\\x21\\x02\\x00\\x00\\x0f\\x02\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x0b\\x02\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x71\\x02\\x00\\x00\\x65\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\xb4\\x00\\x00\\x00\\xb5\\x00\\x00\\x00\\xb6\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\xb7\\x00\\x00\\x00\\x63\\x02\\x00\\x00\\x5f\\x02\\x00\\x00\\x5e\\x02\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x5d\\x02\\x00\\x00\\x52\\x02\\x00\\x00\\x51\\x02\\x00\\x00\\xd8\\x01\\x00\\x00\\x0b\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x50\\x02\\x00\\x00\\xcc\\x01\\x00\\x00\\x45\\x02\\x00\\x00\\x0d\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\xc9\\x01\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x7e\\x02\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x40\\x02\\x00\\x00\\x89\\x02\\x00\\x00\\x88\\x02\\x00\\x00\\x19\\x00\\x00\\x00\\x87\\x02\\x00\\x00\\x86\\x02\\x00\\x00\\x85\\x02\\x00\\x00\\x84\\x02\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x82\\x02\\x00\\x00\\x56\\x00\\x00\\x00\\xa8\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x7d\\x02\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x9c\\x00\\x00\\x00\\xa1\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x87\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x5b\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x84\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x80\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x0b\\x01\\x00\\x00\\x59\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x5b\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x06\\x01\\x00\\x00\\x63\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x09\\x01\\x00\\x00\\x62\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xce\\x00\\x00\\x00\\xf7\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\xcb\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x6e\\x01\\x00\\x00\\x57\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x70\\x01\\x00\\x00\\xf3\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x63\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x5b\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x92\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x6d\\x01\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x3c\\x01\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x95\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x6c\\x01\\x00\\x00\\x6b\\x01\\x00\\x00\\x29\\x00\\x00\\x00\\x63\\x00\\x00\\x00\\x66\\x01\\x00\\x00\\x6f\\x00\\x00\\x00\\x63\\x01\\x00\\x00\\x5f\\x01\\x00\\x00\\x96\\x00\\x00\\x00\\x97\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x5e\\x01\\x00\\x00\\x34\\x01\\x00\\x00\\x2e\\x01\\x00\\x00\\x44\\x00\\x00\\x00\\x32\\x01\\x00\\x00\\x45\\x00\\x00\\x00\\x12\\x01\\x00\\x00\\x2d\\x00\\x00\\x00\\xc3\\x01\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\xc2\\x01\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xc1\\x01\\x00\\x00\\xbd\\x01\\x00\\x00\\xb5\\x01\\x00\\x00\\x21\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x8f\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\xb0\\x01\\x00\\x00\\xaf\\x01\\x00\\x00\\xa1\\x00\\x00\\x00\\xaa\\x01\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x9c\\x01\\x00\\x00\\x28\\x00\\x00\\x00\\x9a\\x01\\x00\\x00\\x1e\\x02\\x00\\x00\\x29\\x00\\x00\\x00\\x10\\x02\\x00\\x00\\x11\\x02\\x00\\x00\\x12\\x02\\x00\\x00\\x13\\x02\\x00\\x00\\x14\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x2a\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x93\\x01\\x00\\x00\\x22\\x00\\x00\\x00\\x99\\x01\\x00\\x00\\x23\\x00\\x00\\x00\\x8e\\x01\\x00\\x00\\x84\\x01\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x83\\x01\\x00\\x00\\x28\\x00\\x00\\x00\\xa1\\x00\\x00\\x00\\x7e\\x01\\x00\\x00\\x29\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x7c\\x01\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x7d\\x01\\x00\\x00\\x2a\\x00\\x00\\x00\\x75\\x01\\x00\\x00\\x06\\x02\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x0f\\x02\\x00\\x00\\x35\\x00\\x00\\x00\\x10\\x02\\x00\\x00\\x11\\x02\\x00\\x00\\x12\\x02\\x00\\x00\\x13\\x02\\x00\\x00\\x14\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x93\\x01\\x00\\x00\\x2d\\x00\\x00\\x00\\x12\\x01\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\xe3\\x01\\x00\\x00\\xf0\\x01\\x00\\x00\\xd9\\x01\\x00\\x00\\xc7\\x01\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\xde\\x01\\x00\\x00\\x21\\x00\\x00\\x00\\x41\\x02\\x00\\x00\\x22\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x40\\x02\\x00\\x00\\x6f\\x00\\x00\\x00\\x37\\x02\\x00\\x00\\x34\\x02\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x2e\\x02\\x00\\x00\\x28\\x00\\x00\\x00\\x22\\x02\\x00\\x00\\x93\\x01\\x00\\x00\\x29\\x00\\x00\\x00\\xcd\\x01\\x00\\x00\\x1a\\x01\\x00\\x00\\x1b\\x01\\x00\\x00\\x1c\\x01\\x00\\x00\\x1d\\x01\\x00\\x00\\x1e\\x01\\x00\\x00\\x2a\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x07\\x02\\x00\\x00\\x22\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x84\\x01\\x00\\x00\\x6b\\x02\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x66\\x02\\x00\\x00\\x28\\x00\\x00\\x00\\x65\\x02\\x00\\x00\\x63\\x02\\x00\\x00\\x29\\x00\\x00\\x00\\xdd\\x00\\x00\\x00\\x5f\\x02\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\xde\\x00\\x00\\x00\\x53\\x02\\x00\\x00\\x2a\\x00\\x00\\x00\\x59\\x02\\x00\\x00\\x47\\x02\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x52\\x02\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x43\\x02\\x00\\x00\\x35\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x42\\x02\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80\\x02\\x00\\x00\\x8d\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x6b\\x02\\x00\\x00\\x22\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\xcc\\x01\\x00\\x00\\x1a\\x01\\x00\\x00\\x1b\\x01\\x00\\x00\\x1c\\x01\\x00\\x00\\x1d\\x01\\x00\\x00\\x1e\\x01\\x00\\x00\\x2a\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\xdd\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\xde\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xe5\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\xf0\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\xdd\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\xde\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\xb9\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x9e\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x9f\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\xb5\\x01\\x00\\x00\\x3f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x6b\\x02\\x00\\x00\\x3f\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x20\\x01\\x00\\x00\\x41\\x00\\x00\\x00\\x21\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x22\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x3b\\x01\\x00\\x00\\x3e\\x00\\x00\\x00\\x23\\x01\\x00\\x00\\x3f\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x24\\x01\\x00\\x00\\x25\\x01\\x00\\x00\\x3e\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x26\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x27\\x01\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x20\\x01\\x00\\x00\\x44\\x00\\x00\\x00\\x21\\x01\\x00\\x00\\x45\\x00\\x00\\x00\\xed\\x01\\x00\\x00\\x43\\x00\\x00\\x00\\x22\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x23\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x20\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x21\\x01\\x00\\x00\\x17\\x02\\x00\\x00\\x41\\x00\\x00\\x00\\x18\\x02\\x00\\x00\\x22\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x19\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x23\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x20\\x01\\x00\\x00\\x1a\\x02\\x00\\x00\\x21\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x22\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x23\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x24\\x01\\x00\\x00\\x25\\x01\\x00\\x00\\x3e\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x26\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x27\\x01\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\xfc\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x24\\x01\\x00\\x00\\x25\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x1b\\x02\\x00\\x00\\x1c\\x02\\x00\\x00\\x26\\x01\\x00\\x00\\x41\\x00\\x00\\x00\\x27\\x01\\x00\\x00\\x1d\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x1e\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x24\\x01\\x00\\x00\\x25\\x01\\x00\\x00\\x3e\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x26\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x27\\x01\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x20\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x21\\x01\\x00\\x00\\x44\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x23\\x01\\x00\\x00\\x3f\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x17\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x18\\x02\\x00\\x00\\x44\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x19\\x02\\x00\\x00\\x17\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x18\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x02\\x00\\x00\\x4c\\x01\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x02\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x24\\x01\\x00\\x00\\x25\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x26\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x27\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x01\\x00\\x00\\x44\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x1b\\x02\\x00\\x00\\x1c\\x02\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1d\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x1b\\x02\\x00\\x00\\x1c\\x02\\x00\\x00\\x46\\x01\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x1d\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x08\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x47\\x01\\x00\\x00\\x72\\x01\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x09\\x01\\x00\\x00\\x90\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x73\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x91\\x01\\x00\\x00\\xe0\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x48\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\xe1\\x01\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x93\\x00\\x00\\x00\\x94\\x00\\x00\\x00\\x24\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x55\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x25\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x96\\x00\\x00\\x00\\x56\\x02\\x00\\x00\\x18\\x01\\x00\\x00\\x19\\x01\\x00\\x00\\x1a\\x01\\x00\\x00\\x1b\\x01\\x00\\x00\\x1c\\x01\\x00\\x00\\x1d\\x01\\x00\\x00\\x1e\\x01\\x00\\x00\\x9a\\x00\\x00\\x00\\x9b\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x98\\x00\\x00\\x00\\x99\\x00\\x00\\x00\\x9a\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\xbd\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x86\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x4d\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x4e\\x01\\x00\\x00\\x4d\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x39\\x02\\x00\\x00\\x4d\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x92\\x02\\x00\\x00\\xb3\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xb2\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xeb\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xea\\x01\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x73\\x02\\x00\\x00\\x19\\x01\\x00\\x00\\x1a\\x01\\x00\\x00\\x1b\\x01\\x00\\x00\\x1c\\x01\\x00\\x00\\x1d\\x01\\x00\\x00\\x1e\\x01\\x00\\x00\\x71\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x69\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x68\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x48\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x8e\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x75\\x02\\x00\\x00\\x37\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\xd7\\x01\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x38\\x02\\x00\\x00\\x1b\\x01\\x00\\x00\\x1c\\x01\\x00\\x00\\x1d\\x01\\x00\\x00\\x1e\\x01\\x00\\x00\\x7f\\x02\\x00\\x00\\x39\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x91\\x02\\x00\\x00\\x12\\x02\\x00\\x00\\x13\\x02\\x00\\x00\\x14\\x02\\x00\\x00\\x15\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"#\n\nhappyReduceArr = Happy_Data_Array.array (3, 355) [\n        (3 , happyReduce_3),\n        (4 , happyReduce_4),\n        (5 , happyReduce_5),\n        (6 , happyReduce_6),\n        (7 , happyReduce_7),\n        (8 , happyReduce_8),\n        (9 , happyReduce_9),\n        (10 , happyReduce_10),\n        (11 , happyReduce_11),\n        (12 , happyReduce_12),\n        (13 , happyReduce_13),\n        (14 , happyReduce_14),\n        (15 , happyReduce_15),\n        (16 , happyReduce_16),\n        (17 , happyReduce_17),\n        (18 , happyReduce_18),\n        (19 , happyReduce_19),\n        (20 , happyReduce_20),\n        (21 , happyReduce_21),\n        (22 , happyReduce_22),\n        (23 , happyReduce_23),\n        (24 , happyReduce_24),\n        (25 , happyReduce_25),\n        (26 , happyReduce_26),\n        (27 , happyReduce_27),\n        (28 , happyReduce_28),\n        (29 , happyReduce_29),\n        (30 , happyReduce_30),\n        (31 , happyReduce_31),\n        (32 , happyReduce_32),\n        (33 , happyReduce_33),\n        (34 , happyReduce_34),\n        (35 , happyReduce_35),\n        (36 , happyReduce_36),\n        (37 , happyReduce_37),\n        (38 , happyReduce_38),\n        (39 , happyReduce_39),\n        (40 , happyReduce_40),\n        (41 , happyReduce_41),\n        (42 , happyReduce_42),\n        (43 , happyReduce_43),\n        (44 , happyReduce_44),\n        (45 , happyReduce_45),\n        (46 , happyReduce_46),\n        (47 , happyReduce_47),\n        (48 , happyReduce_48),\n        (49 , happyReduce_49),\n        (50 , happyReduce_50),\n        (51 , happyReduce_51),\n        (52 , happyReduce_52),\n        (53 , happyReduce_53),\n        (54 , happyReduce_54),\n        (55 , happyReduce_55),\n        (56 , happyReduce_56),\n        (57 , happyReduce_57),\n        (58 , happyReduce_58),\n        (59 , happyReduce_59),\n        (60 , happyReduce_60),\n        (61 , happyReduce_61),\n        (62 , happyReduce_62),\n        (63 , happyReduce_63),\n        (64 , happyReduce_64),\n        (65 , happyReduce_65),\n        (66 , happyReduce_66),\n        (67 , happyReduce_67),\n        (68 , happyReduce_68),\n        (69 , happyReduce_69),\n        (70 , happyReduce_70),\n        (71 , happyReduce_71),\n        (72 , happyReduce_72),\n        (73 , happyReduce_73),\n        (74 , happyReduce_74),\n        (75 , happyReduce_75),\n        (76 , happyReduce_76),\n        (77 , happyReduce_77),\n        (78 , happyReduce_78),\n        (79 , happyReduce_79),\n        (80 , happyReduce_80),\n        (81 , happyReduce_81),\n        (82 , happyReduce_82),\n        (83 , happyReduce_83),\n        (84 , happyReduce_84),\n        (85 , happyReduce_85),\n        (86 , happyReduce_86),\n        (87 , happyReduce_87),\n        (88 , happyReduce_88),\n        (89 , happyReduce_89),\n        (90 , happyReduce_90),\n        (91 , happyReduce_91),\n        (92 , happyReduce_92),\n        (93 , happyReduce_93),\n        (94 , happyReduce_94),\n        (95 , happyReduce_95),\n        (96 , happyReduce_96),\n        (97 , happyReduce_97),\n        (98 , happyReduce_98),\n        (99 , happyReduce_99),\n        (100 , happyReduce_100),\n        (101 , happyReduce_101),\n        (102 , happyReduce_102),\n        (103 , happyReduce_103),\n        (104 , happyReduce_104),\n        (105 , happyReduce_105),\n        (106 , happyReduce_106),\n        (107 , happyReduce_107),\n        (108 , happyReduce_108),\n        (109 , happyReduce_109),\n        (110 , happyReduce_110),\n        (111 , happyReduce_111),\n        (112 , happyReduce_112),\n        (113 , happyReduce_113),\n        (114 , happyReduce_114),\n        (115 , happyReduce_115),\n        (116 , happyReduce_116),\n        (117 , happyReduce_117),\n        (118 , happyReduce_118),\n        (119 , happyReduce_119),\n        (120 , happyReduce_120),\n        (121 , happyReduce_121),\n        (122 , happyReduce_122),\n        (123 , happyReduce_123),\n        (124 , happyReduce_124),\n        (125 , happyReduce_125),\n        (126 , happyReduce_126),\n        (127 , happyReduce_127),\n        (128 , happyReduce_128),\n        (129 , happyReduce_129),\n        (130 , happyReduce_130),\n        (131 , happyReduce_131),\n        (132 , happyReduce_132),\n        (133 , happyReduce_133),\n        (134 , happyReduce_134),\n        (135 , happyReduce_135),\n        (136 , happyReduce_136),\n        (137 , happyReduce_137),\n        (138 , happyReduce_138),\n        (139 , happyReduce_139),\n        (140 , happyReduce_140),\n        (141 , happyReduce_141),\n        (142 , happyReduce_142),\n        (143 , happyReduce_143),\n        (144 , happyReduce_144),\n        (145 , happyReduce_145),\n        (146 , happyReduce_146),\n        (147 , happyReduce_147),\n        (148 , happyReduce_148),\n        (149 , happyReduce_149),\n        (150 , happyReduce_150),\n        (151 , happyReduce_151),\n        (152 , happyReduce_152),\n        (153 , happyReduce_153),\n        (154 , happyReduce_154),\n        (155 , happyReduce_155),\n        (156 , happyReduce_156),\n        (157 , happyReduce_157),\n        (158 , happyReduce_158),\n        (159 , happyReduce_159),\n        (160 , happyReduce_160),\n        (161 , happyReduce_161),\n        (162 , happyReduce_162),\n        (163 , happyReduce_163),\n        (164 , happyReduce_164),\n        (165 , happyReduce_165),\n        (166 , happyReduce_166),\n        (167 , happyReduce_167),\n        (168 , happyReduce_168),\n        (169 , happyReduce_169),\n        (170 , happyReduce_170),\n        (171 , happyReduce_171),\n        (172 , happyReduce_172),\n        (173 , happyReduce_173),\n        (174 , happyReduce_174),\n        (175 , happyReduce_175),\n        (176 , happyReduce_176),\n        (177 , happyReduce_177),\n        (178 , happyReduce_178),\n        (179 , happyReduce_179),\n        (180 , happyReduce_180),\n        (181 , happyReduce_181),\n        (182 , happyReduce_182),\n        (183 , happyReduce_183),\n        (184 , happyReduce_184),\n        (185 , happyReduce_185),\n        (186 , happyReduce_186),\n        (187 , happyReduce_187),\n        (188 , happyReduce_188),\n        (189 , happyReduce_189),\n        (190 , happyReduce_190),\n        (191 , happyReduce_191),\n        (192 , happyReduce_192),\n        (193 , happyReduce_193),\n        (194 , happyReduce_194),\n        (195 , happyReduce_195),\n        (196 , happyReduce_196),\n        (197 , happyReduce_197),\n        (198 , happyReduce_198),\n        (199 , happyReduce_199),\n        (200 , happyReduce_200),\n        (201 , happyReduce_201),\n        (202 , happyReduce_202),\n        (203 , happyReduce_203),\n        (204 , happyReduce_204),\n        (205 , happyReduce_205),\n        (206 , happyReduce_206),\n        (207 , happyReduce_207),\n        (208 , happyReduce_208),\n        (209 , happyReduce_209),\n        (210 , happyReduce_210),\n        (211 , happyReduce_211),\n        (212 , happyReduce_212),\n        (213 , happyReduce_213),\n        (214 , happyReduce_214),\n        (215 , happyReduce_215),\n        (216 , happyReduce_216),\n        (217 , happyReduce_217),\n        (218 , happyReduce_218),\n        (219 , happyReduce_219),\n        (220 , happyReduce_220),\n        (221 , happyReduce_221),\n        (222 , happyReduce_222),\n        (223 , happyReduce_223),\n        (224 , happyReduce_224),\n        (225 , happyReduce_225),\n        (226 , happyReduce_226),\n        (227 , happyReduce_227),\n        (228 , happyReduce_228),\n        (229 , happyReduce_229),\n        (230 , happyReduce_230),\n        (231 , happyReduce_231),\n        (232 , happyReduce_232),\n        (233 , happyReduce_233),\n        (234 , happyReduce_234),\n        (235 , happyReduce_235),\n        (236 , happyReduce_236),\n        (237 , happyReduce_237),\n        (238 , happyReduce_238),\n        (239 , happyReduce_239),\n        (240 , happyReduce_240),\n        (241 , happyReduce_241),\n        (242 , happyReduce_242),\n        (243 , happyReduce_243),\n        (244 , happyReduce_244),\n        (245 , happyReduce_245),\n        (246 , happyReduce_246),\n        (247 , happyReduce_247),\n        (248 , happyReduce_248),\n        (249 , happyReduce_249),\n        (250 , happyReduce_250),\n        (251 , happyReduce_251),\n        (252 , happyReduce_252),\n        (253 , happyReduce_253),\n        (254 , happyReduce_254),\n        (255 , happyReduce_255),\n        (256 , happyReduce_256),\n        (257 , happyReduce_257),\n        (258 , happyReduce_258),\n        (259 , happyReduce_259),\n        (260 , happyReduce_260),\n        (261 , happyReduce_261),\n        (262 , happyReduce_262),\n        (263 , happyReduce_263),\n        (264 , happyReduce_264),\n        (265 , happyReduce_265),\n        (266 , happyReduce_266),\n        (267 , happyReduce_267),\n        (268 , happyReduce_268),\n        (269 , happyReduce_269),\n        (270 , happyReduce_270),\n        (271 , happyReduce_271),\n        (272 , happyReduce_272),\n        (273 , happyReduce_273),\n        (274 , happyReduce_274),\n        (275 , happyReduce_275),\n        (276 , happyReduce_276),\n        (277 , happyReduce_277),\n        (278 , happyReduce_278),\n        (279 , happyReduce_279),\n        (280 , happyReduce_280),\n        (281 , happyReduce_281),\n        (282 , happyReduce_282),\n        (283 , happyReduce_283),\n        (284 , happyReduce_284),\n        (285 , happyReduce_285),\n        (286 , happyReduce_286),\n        (287 , happyReduce_287),\n        (288 , happyReduce_288),\n        (289 , happyReduce_289),\n        (290 , happyReduce_290),\n        (291 , happyReduce_291),\n        (292 , happyReduce_292),\n        (293 , happyReduce_293),\n        (294 , happyReduce_294),\n        (295 , happyReduce_295),\n        (296 , happyReduce_296),\n        (297 , happyReduce_297),\n        (298 , happyReduce_298),\n        (299 , happyReduce_299),\n        (300 , happyReduce_300),\n        (301 , happyReduce_301),\n        (302 , happyReduce_302),\n        (303 , happyReduce_303),\n        (304 , happyReduce_304),\n        (305 , happyReduce_305),\n        (306 , happyReduce_306),\n        (307 , happyReduce_307),\n        (308 , happyReduce_308),\n        (309 , happyReduce_309),\n        (310 , happyReduce_310),\n        (311 , happyReduce_311),\n        (312 , happyReduce_312),\n        (313 , happyReduce_313),\n        (314 , happyReduce_314),\n        (315 , happyReduce_315),\n        (316 , happyReduce_316),\n        (317 , happyReduce_317),\n        (318 , happyReduce_318),\n        (319 , happyReduce_319),\n        (320 , happyReduce_320),\n        (321 , happyReduce_321),\n        (322 , happyReduce_322),\n        (323 , happyReduce_323),\n        (324 , happyReduce_324),\n        (325 , happyReduce_325),\n        (326 , happyReduce_326),\n        (327 , happyReduce_327),\n        (328 , happyReduce_328),\n        (329 , happyReduce_329),\n        (330 , happyReduce_330),\n        (331 , happyReduce_331),\n        (332 , happyReduce_332),\n        (333 , happyReduce_333),\n        (334 , happyReduce_334),\n        (335 , happyReduce_335),\n        (336 , happyReduce_336),\n        (337 , happyReduce_337),\n        (338 , happyReduce_338),\n        (339 , happyReduce_339),\n        (340 , happyReduce_340),\n        (341 , happyReduce_341),\n        (342 , happyReduce_342),\n        (343 , happyReduce_343),\n        (344 , happyReduce_344),\n        (345 , happyReduce_345),\n        (346 , happyReduce_346),\n        (347 , happyReduce_347),\n        (348 , happyReduce_348),\n        (349 , happyReduce_349),\n        (350 , happyReduce_350),\n        (351 , happyReduce_351),\n        (352 , happyReduce_352),\n        (353 , happyReduce_353),\n        (354 , happyReduce_354),\n        (355 , happyReduce_355)\n        ]\n\nhappyRuleArr :: HappyAddr\nhappyRuleArr = HappyA# \"\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x08\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x0a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x0c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x0d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x0e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x11\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x16\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x18\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x19\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x1d\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x1e\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x20\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x21\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x22\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x23\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x24\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x25\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x26\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x27\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x28\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x29\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x2b\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x2c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x2e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x2f\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x30\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x31\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x32\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x33\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x35\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x36\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x37\\x00\\x00\\x00\\x07\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x38\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x39\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3b\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3c\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3d\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x3d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x3f\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x06\\x00\\x00\\x00\\x41\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x42\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x43\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x44\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x45\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x46\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x47\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x48\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x49\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x4e\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x4f\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x50\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x51\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x51\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x52\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x53\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x54\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x55\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x56\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x56\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x57\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x58\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x59\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x5a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x5b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x5b\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x5c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x5d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x5e\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x5f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x60\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x61\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x62\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x63\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x63\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x64\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x65\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x66\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x67\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x68\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x69\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6b\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6d\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6e\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6e\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x6f\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x70\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x71\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x72\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x73\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x74\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x75\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x76\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x77\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x78\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x78\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x78\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x79\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x79\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x7a\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x7a\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x7b\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x7c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x7c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x7d\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x7d\\x00\\x00\\x00\\x02\\x00\\x00\\x00\"#\n\nhappyCatchStates :: [Happy_Prelude.Int]\nhappyCatchStates = []\n\nhappy_n_terms = 70 :: Happy_Prelude.Int\nhappy_n_nonterms = 126 :: Happy_Prelude.Int\n\nhappy_n_starts = 3 :: Happy_Prelude.Int\n\nhappyReduce_3 = happySpecReduce_2  0# happyReduction_3\nhappyReduction_3 _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn7\n                 ((happy_var_1, False)\n        )\nhappyReduction_3 _ _  = notHappyAtAll \n\nhappyReduce_4 = happySpecReduce_2  0# happyReduction_4\nhappyReduction_4 _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn7\n                 ((happy_var_1, True)\n        )\nhappyReduction_4 _ _  = notHappyAtAll \n\nhappyReduce_5 = happySpecReduce_2  1# happyReduction_5\nhappyReduction_5 _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_5 _ _  = notHappyAtAll \n\nhappyReduce_6 = happySpecReduce_2  2# happyReduction_6\nhappyReduction_6 _\n        (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_6 _ _  = notHappyAtAll \n\nhappyReduce_7 = happySpecReduce_1  3# happyReduction_7\nhappyReduction_7 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_7 _  = notHappyAtAll \n\nhappyReduce_8 = happySpecReduce_2  3# happyReduction_8\nhappyReduction_8 (HappyAbsSyn9  happy_var_2)\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ [happy_var_2]\n        )\nhappyReduction_8 _ _  = notHappyAtAll \n\nhappyReduce_9 = happyReduce 6# 4# happyReduction_9\nhappyReduction_9 ((HappyAbsSyn10  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn22  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn19  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CModE (Just happy_var_2) happy_var_4 happy_var_6)\n        ) `HappyStk` happyRest\n\nhappyReduce_10 = happyReduce 5# 4# happyReduction_10\nhappyReduction_10 ((HappyAbsSyn10  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn22  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CModE Nothing happy_var_3 happy_var_5)\n        ) `HappyStk` happyRest\n\nhappyReduce_11 = happySpecReduce_3  5# happyReduction_11\nhappyReduction_11 _\n        (HappyAbsSyn10  happy_var_2)\n        _\n         =  HappyAbsSyn10\n                 (happy_var_2\n        )\nhappyReduction_11 _ _ _  = notHappyAtAll \n\nhappyReduce_12 = happySpecReduce_2  5# happyReduction_12\nhappyReduction_12 _\n        _\n         =  HappyAbsSyn10\n                 ([]\n        )\n\nhappyReduce_13 = happySpecReduce_3  5# happyReduction_13\nhappyReduction_13 _\n        (HappyAbsSyn10  happy_var_2)\n        _\n         =  HappyAbsSyn10\n                 (happy_var_2\n        )\nhappyReduction_13 _ _ _  = notHappyAtAll \n\nhappyReduce_14 = happySpecReduce_2  5# happyReduction_14\nhappyReduction_14 _\n        _\n         =  HappyAbsSyn10\n                 ([]\n        )\n\nhappyReduce_15 = happySpecReduce_1  6# happyReduction_15\nhappyReduction_15 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_15 _  = notHappyAtAll \n\nhappyReduce_16 = happySpecReduce_3  6# happyReduction_16\nhappyReduction_16 (HappyAbsSyn10  happy_var_3)\n        _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ happy_var_3\n        )\nhappyReduction_16 _ _ _  = notHappyAtAll \n\nhappyReduce_17 = happySpecReduce_1  7# happyReduction_17\nhappyReduction_17 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_17 _  = notHappyAtAll \n\nhappyReduce_18 = happySpecReduce_3  7# happyReduction_18\nhappyReduction_18 (HappyAbsSyn10  happy_var_3)\n        _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ happy_var_3\n        )\nhappyReduction_18 _ _ _  = notHappyAtAll \n\nhappyReduce_19 = happySpecReduce_1  8# happyReduction_19\nhappyReduction_19 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_19 _  = notHappyAtAll \n\nhappyReduce_20 = happySpecReduce_1  8# happyReduction_20\nhappyReduction_20 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_20 _  = notHappyAtAll \n\nhappyReduce_21 = happySpecReduce_1  8# happyReduction_21\nhappyReduction_21 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_21 _  = notHappyAtAll \n\nhappyReduce_22 = happySpecReduce_1  8# happyReduction_22\nhappyReduction_22 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_22 _  = notHappyAtAll \n\nhappyReduce_23 = happySpecReduce_1  8# happyReduction_23\nhappyReduction_23 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_23 _  = notHappyAtAll \n\nhappyReduce_24 = happySpecReduce_1  8# happyReduction_24\nhappyReduction_24 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_24 _  = notHappyAtAll \n\nhappyReduce_25 = happySpecReduce_2  8# happyReduction_25\nhappyReduction_25 (HappyAbsSyn10  happy_var_2)\n        _\n         =  HappyAbsSyn10\n                 (map (\\(Loc sp e) -> Loc sp (CInlineE (Loc sp e))) happy_var_2\n        )\nhappyReduction_25 _ _  = notHappyAtAll \n\nhappyReduce_26 = happySpecReduce_1  8# happyReduction_26\nhappyReduction_26 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_26 _  = notHappyAtAll \n\nhappyReduce_27 = happySpecReduce_3  9# happyReduction_27\nhappyReduction_27 (HappyAbsSyn117  happy_var_3)\n        _\n        (HappyAbsSyn24  happy_var_1)\n         =  HappyAbsSyn10\n                 ([at happy_var_1 (CSigE (toEVar happy_var_1) happy_var_3)]\n        )\nhappyReduction_27 _ _ _  = notHappyAtAll \n\nhappyReduce_28 = happyReduce 5# 9# happyReduction_28\nhappyReduction_28 ((HappyAbsSyn10  happy_var_5) `HappyStk`\n        (HappyAbsSyn9  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn20  happy_var_2) `HappyStk`\n        (HappyAbsSyn24  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 ([at happy_var_1 (CAssE (toEVar happy_var_1) happy_var_2 happy_var_4 happy_var_5)]\n        ) `HappyStk` happyRest\n\nhappyReduce_29 = happyReduce 6# 9# happyReduction_29\nhappyReduction_29 ((HappyAbsSyn10  happy_var_6) `HappyStk`\n        (HappyAbsSyn9  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn17  happy_var_3) `HappyStk`\n        (HappyAbsSyn20  happy_var_2) `HappyStk`\n        (HappyAbsSyn24  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 ([at happy_var_1 (CGuardedAssE (toEVar happy_var_1) happy_var_2 happy_var_3 happy_var_5 happy_var_6)]\n        ) `HappyStk` happyRest\n\nhappyReduce_30 = happySpecReduce_1  10# happyReduction_30\nhappyReduction_30 (HappyAbsSyn18  happy_var_1)\n         =  HappyAbsSyn17\n                 ([happy_var_1]\n        )\nhappyReduction_30 _  = notHappyAtAll \n\nhappyReduce_31 = happySpecReduce_2  10# happyReduction_31\nhappyReduction_31 (HappyAbsSyn18  happy_var_2)\n        (HappyAbsSyn17  happy_var_1)\n         =  HappyAbsSyn17\n                 (happy_var_1 ++ [happy_var_2]\n        )\nhappyReduction_31 _ _  = notHappyAtAll \n\nhappyReduce_32 = happyReduce 4# 11# happyReduction_32\nhappyReduction_32 ((HappyAbsSyn9  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn9  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn18\n                 ((happy_var_2, happy_var_4)\n        ) `HappyStk` happyRest\n\nhappyReduce_33 = happySpecReduce_1  12# happyReduction_33\nhappyReduction_33 (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn19\n                 (T.intercalate \".\" happy_var_1\n        )\nhappyReduction_33 _  = notHappyAtAll \n\nhappyReduce_34 = happySpecReduce_1  13# happyReduction_34\nhappyReduction_34 (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn20\n                 ([happy_var_1]\n        )\nhappyReduction_34 _  = notHappyAtAll \n\nhappyReduce_35 = happySpecReduce_3  13# happyReduction_35\nhappyReduction_35 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn20\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_35 _ _ _  = notHappyAtAll \n\nhappyReduce_36 = happySpecReduce_3  13# happyReduction_36\nhappyReduction_36 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn20\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_36 _ _ _  = notHappyAtAll \n\nhappyReduce_37 = happySpecReduce_3  13# happyReduction_37\nhappyReduction_37 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn20\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_37 _ _ _  = notHappyAtAll \n\nhappyReduce_38 = happySpecReduce_3  13# happyReduction_38\nhappyReduction_38 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn20\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_38 _ _ _  = notHappyAtAll \n\nhappyReduce_39 = happySpecReduce_1  14# happyReduction_39\nhappyReduction_39 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn19\n                 (getName happy_var_1\n        )\nhappyReduction_39 _  = notHappyAtAll \n\nhappyReduce_40 = happySpecReduce_3  14# happyReduction_40\nhappyReduction_40 (HappyTerminal happy_var_3)\n        _\n        (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn19\n                 (happy_var_1 <> \"-\" <> getName happy_var_3\n        )\nhappyReduction_40 _ _ _  = notHappyAtAll \n\nhappyReduce_41 = happySpecReduce_1  15# happyReduction_41\nhappyReduction_41 _\n         =  HappyAbsSyn22\n                 (CstExportAll\n        )\n\nhappyReduce_42 = happySpecReduce_1  15# happyReduction_42\nhappyReduction_42 (HappyAbsSyn23  happy_var_1)\n         =  HappyAbsSyn22\n                 (CstExportMany happy_var_1\n        )\nhappyReduction_42 _  = notHappyAtAll \n\nhappyReduce_43 = happySpecReduce_1  16# happyReduction_43\nhappyReduction_43 (HappyAbsSyn24  happy_var_1)\n         =  HappyAbsSyn23\n                 ([happy_var_1]\n        )\nhappyReduction_43 _  = notHappyAtAll \n\nhappyReduce_44 = happySpecReduce_3  16# happyReduction_44\nhappyReduction_44 (HappyAbsSyn24  happy_var_3)\n        _\n        (HappyAbsSyn23  happy_var_1)\n         =  HappyAbsSyn23\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_44 _ _ _  = notHappyAtAll \n\nhappyReduce_45 = happySpecReduce_1  17# happyReduction_45\nhappyReduction_45 (HappyAbsSyn24  happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_45 _  = notHappyAtAll \n\nhappyReduce_46 = happySpecReduce_1  18# happyReduction_46\nhappyReduction_46 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_46 _  = notHappyAtAll \n\nhappyReduce_47 = happySpecReduce_3  18# happyReduction_47\nhappyReduction_47 _\n        (HappyAbsSyn24  happy_var_2)\n        _\n         =  HappyAbsSyn24\n                 (happy_var_2\n        )\nhappyReduction_47 _ _ _  = notHappyAtAll \n\nhappyReduce_48 = happySpecReduce_3  18# happyReduction_48\nhappyReduction_48 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn24\n                 (happy_var_2\n        )\nhappyReduction_48 _ _ _  = notHappyAtAll \n\nhappyReduce_49 = happySpecReduce_3  18# happyReduction_49\nhappyReduction_49 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn24\n                 (happy_var_2\n        )\nhappyReduction_49 _ _ _  = notHappyAtAll \n\nhappyReduce_50 = happySpecReduce_1  18# happyReduction_50\nhappyReduction_50 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_50 _  = notHappyAtAll \n\nhappyReduce_51 = happySpecReduce_3  19# happyReduction_51\nhappyReduction_51 (HappyAbsSyn28  happy_var_3)\n        (HappyAbsSyn19  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CImpE (Import (MV happy_var_2) happy_var_3 [] Nothing))\n        )\nhappyReduction_51 _ _ _  = notHappyAtAll \n\nhappyReduce_52 = happyReduce 5# 19# happyReduction_52\nhappyReduction_52 ((HappyAbsSyn28  happy_var_5) `HappyStk`\n        (HappyTerminal happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn19  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CImpE (Import (MV happy_var_2) happy_var_5 [] (Just (EV (getName happy_var_4)))))\n        ) `HappyStk` happyRest\n\nhappyReduce_53 = happyReduce 4# 19# happyReduction_53\nhappyReduction_53 ((HappyAbsSyn28  happy_var_4) `HappyStk`\n        (HappyAbsSyn19  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CImpE (Import (MV (\".\" <> happy_var_3)) happy_var_4 [] Nothing))\n        ) `HappyStk` happyRest\n\nhappyReduce_54 = happyReduce 6# 19# happyReduction_54\nhappyReduction_54 ((HappyAbsSyn28  happy_var_6) `HappyStk`\n        (HappyTerminal happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn19  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CImpE (Import (MV (\".\" <> happy_var_3)) happy_var_6 [] (Just (EV (getName happy_var_5)))))\n        ) `HappyStk` happyRest\n\nhappyReduce_55 = happySpecReduce_3  20# happyReduction_55\nhappyReduction_55 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn19\n                 (happy_var_1 <> \"/\" <> happy_var_3\n        )\nhappyReduction_55 _ _ _  = notHappyAtAll \n\nhappyReduce_56 = happySpecReduce_1  20# happyReduction_56\nhappyReduction_56 (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn19\n                 (happy_var_1\n        )\nhappyReduction_56 _  = notHappyAtAll \n\nhappyReduce_57 = happySpecReduce_0  21# happyReduction_57\nhappyReduction_57  =  HappyAbsSyn28\n                 (Nothing\n        )\n\nhappyReduce_58 = happySpecReduce_3  21# happyReduction_58\nhappyReduction_58 _\n        (HappyAbsSyn29  happy_var_2)\n        _\n         =  HappyAbsSyn28\n                 (Just happy_var_2\n        )\nhappyReduction_58 _ _ _  = notHappyAtAll \n\nhappyReduce_59 = happySpecReduce_1  22# happyReduction_59\nhappyReduction_59 (HappyAbsSyn30  happy_var_1)\n         =  HappyAbsSyn29\n                 ([happy_var_1]\n        )\nhappyReduction_59 _  = notHappyAtAll \n\nhappyReduce_60 = happySpecReduce_3  22# happyReduction_60\nhappyReduction_60 (HappyAbsSyn30  happy_var_3)\n        _\n        (HappyAbsSyn29  happy_var_1)\n         =  HappyAbsSyn29\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_60 _ _ _  = notHappyAtAll \n\nhappyReduce_61 = happySpecReduce_1  23# happyReduction_61\nhappyReduction_61 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn30\n                 (AliasedTerm (EV (getName happy_var_1)) (EV (getName happy_var_1))\n        )\nhappyReduction_61 _  = notHappyAtAll \n\nhappyReduce_62 = happySpecReduce_3  23# happyReduction_62\nhappyReduction_62 (HappyTerminal happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn30\n                 (AliasedTerm (EV (getName happy_var_1)) (EV (getName happy_var_3))\n        )\nhappyReduction_62 _ _ _  = notHappyAtAll \n\nhappyReduce_63 = happyReduce 5# 23# happyReduction_63\nhappyReduction_63 ((HappyTerminal happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn24  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn30\n                 (AliasedTerm (EV (getOp happy_var_2)) (EV (getName happy_var_5))\n        ) `HappyStk` happyRest\n\nhappyReduce_64 = happySpecReduce_3  23# happyReduction_64\nhappyReduction_64 _\n        (HappyAbsSyn24  happy_var_2)\n        _\n         =  HappyAbsSyn30\n                 (AliasedTerm (EV (getOp happy_var_2)) (EV (getOp happy_var_2))\n        )\nhappyReduction_64 _ _ _  = notHappyAtAll \n\nhappyReduce_65 = happyReduce 5# 23# happyReduction_65\nhappyReduction_65 ((HappyTerminal happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn30\n                 (AliasedTerm (EV \"-\") (EV (getName happy_var_5))\n        ) `HappyStk` happyRest\n\nhappyReduce_66 = happySpecReduce_3  23# happyReduction_66\nhappyReduction_66 _\n        _\n        _\n         =  HappyAbsSyn30\n                 (AliasedTerm (EV \"-\") (EV \"-\")\n        )\n\nhappyReduce_67 = happyReduce 5# 23# happyReduction_67\nhappyReduction_67 ((HappyTerminal happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn30\n                 (AliasedTerm (EV \".\") (EV (getName happy_var_5))\n        ) `HappyStk` happyRest\n\nhappyReduce_68 = happySpecReduce_3  23# happyReduction_68\nhappyReduction_68 _\n        _\n        _\n         =  HappyAbsSyn30\n                 (AliasedTerm (EV \".\") (EV \".\")\n        )\n\nhappyReduce_69 = happySpecReduce_1  23# happyReduction_69\nhappyReduction_69 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn30\n                 (AliasedType (TV (getName happy_var_1)) (TV (getName happy_var_1))\n        )\nhappyReduction_69 _  = notHappyAtAll \n\nhappyReduce_70 = happySpecReduce_3  23# happyReduction_70\nhappyReduction_70 (HappyTerminal happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn30\n                 (AliasedType (TV (getName happy_var_1)) (TV (getName happy_var_3))\n        )\nhappyReduction_70 _ _ _  = notHappyAtAll \n\nhappyReduce_71 = happyReduce 6# 24# happyReduction_71\nhappyReduction_71 ((HappyAbsSyn42  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn36  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CTypE (CstTypeAlias (Just happy_var_2) happy_var_4 happy_var_6))\n        ) `HappyStk` happyRest\n\nhappyReduce_72 = happyReduce 6# 24# happyReduction_72\nhappyReduction_72 ((HappyAbsSyn42  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn36  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CTypE (CstTypeAlias (Just happy_var_2) happy_var_4 happy_var_6))\n        ) `HappyStk` happyRest\n\nhappyReduce_73 = happyReduce 5# 24# happyReduction_73\nhappyReduction_73 ((HappyAbsSyn8  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn37  happy_var_3) `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CTypE (CstTypeAlias Nothing (TV (getName happy_var_2), happy_var_3) (happy_var_5, False)))\n        ) `HappyStk` happyRest\n\nhappyReduce_74 = happySpecReduce_3  24# happyReduction_74\nhappyReduction_74 (HappyAbsSyn37  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CTypE (CstTypeAliasForward (TV (getName happy_var_2), happy_var_3)))\n        )\nhappyReduction_74 _ _ _  = notHappyAtAll \n\nhappyReduce_75 = happyReduce 7# 24# happyReduction_75\nhappyReduction_75 ((HappyAbsSyn8  happy_var_7) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn37  happy_var_4) `HappyStk`\n        (HappyTerminal happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CTypE (CstTypeAlias Nothing (TV (getName happy_var_3), happy_var_4) (happy_var_7, False)))\n        ) `HappyStk` happyRest\n\nhappyReduce_76 = happyReduce 5# 24# happyReduction_76\nhappyReduction_76 (_ `HappyStk`\n        (HappyAbsSyn37  happy_var_4) `HappyStk`\n        (HappyTerminal happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CTypE (CstTypeAliasForward (TV (getName happy_var_3), happy_var_4)))\n        ) `HappyStk` happyRest\n\nhappyReduce_77 = happyReduce 6# 24# happyReduction_77\nhappyReduction_77 (_ `HappyStk`\n        (HappyAbsSyn40  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn36  happy_var_2) `HappyStk`\n        (HappyAbsSyn32  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at (fst happy_var_1) (CTypE (CstNamTypeWhere (snd happy_var_1) happy_var_2 happy_var_5))\n        ) `HappyStk` happyRest\n\nhappyReduce_78 = happyReduce 5# 24# happyReduction_78\nhappyReduction_78 ((HappyAbsSyn34  happy_var_5) `HappyStk`\n        (HappyAbsSyn33  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn36  happy_var_2) `HappyStk`\n        (HappyAbsSyn32  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at (fst happy_var_1) (CTypE (CstNamTypeLegacy Nothing (snd happy_var_1) happy_var_2 happy_var_4 happy_var_5))\n        ) `HappyStk` happyRest\n\nhappyReduce_79 = happyReduce 7# 24# happyReduction_79\nhappyReduction_79 ((HappyAbsSyn34  happy_var_7) `HappyStk`\n        (HappyAbsSyn33  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn36  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        (HappyAbsSyn32  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at (fst happy_var_1) (CTypE (CstNamTypeLegacy (Just happy_var_2) (snd happy_var_1) happy_var_4 happy_var_6 happy_var_7))\n        ) `HappyStk` happyRest\n\nhappyReduce_80 = happyReduce 7# 24# happyReduction_80\nhappyReduction_80 ((HappyAbsSyn34  happy_var_7) `HappyStk`\n        (HappyAbsSyn33  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn36  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        (HappyAbsSyn32  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at (fst happy_var_1) (CTypE (CstNamTypeLegacy (Just happy_var_2) (snd happy_var_1) happy_var_4 happy_var_6 happy_var_7))\n        ) `HappyStk` happyRest\n\nhappyReduce_81 = happySpecReduce_1  25# happyReduction_81\nhappyReduction_81 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn32\n                 ((happy_var_1, NamRecord)\n        )\nhappyReduction_81 _  = notHappyAtAll \n\nhappyReduce_82 = happySpecReduce_1  25# happyReduction_82\nhappyReduction_82 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn32\n                 ((happy_var_1, NamObject)\n        )\nhappyReduction_82 _  = notHappyAtAll \n\nhappyReduce_83 = happySpecReduce_1  25# happyReduction_83\nhappyReduction_83 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn32\n                 ((happy_var_1, NamTable)\n        )\nhappyReduction_83 _  = notHappyAtAll \n\nhappyReduce_84 = happySpecReduce_1  26# happyReduction_84\nhappyReduction_84 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn33\n                 ((getString happy_var_1, True)\n        )\nhappyReduction_84 _  = notHappyAtAll \n\nhappyReduce_85 = happySpecReduce_1  26# happyReduction_85\nhappyReduction_85 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn33\n                 ((getName happy_var_1, False)\n        )\nhappyReduction_85 _  = notHappyAtAll \n\nhappyReduce_86 = happySpecReduce_1  26# happyReduction_86\nhappyReduction_86 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn33\n                 ((getName happy_var_1, False)\n        )\nhappyReduction_86 _  = notHappyAtAll \n\nhappyReduce_87 = happySpecReduce_0  27# happyReduction_87\nhappyReduction_87  =  HappyAbsSyn34\n                 ([]\n        )\n\nhappyReduce_88 = happySpecReduce_3  27# happyReduction_88\nhappyReduction_88 _\n        (HappyAbsSyn34  happy_var_2)\n        _\n         =  HappyAbsSyn34\n                 (happy_var_2\n        )\nhappyReduction_88 _ _ _  = notHappyAtAll \n\nhappyReduce_89 = happySpecReduce_1  28# happyReduction_89\nhappyReduction_89 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_89 _  = notHappyAtAll \n\nhappyReduce_90 = happySpecReduce_1  28# happyReduction_90\nhappyReduction_90 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_90 _  = notHappyAtAll \n\nhappyReduce_91 = happySpecReduce_2  29# happyReduction_91\nhappyReduction_91 (HappyAbsSyn37  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn36\n                 ((TV (getName happy_var_1), happy_var_2)\n        )\nhappyReduction_91 _ _  = notHappyAtAll \n\nhappyReduce_92 = happyReduce 4# 29# happyReduction_92\nhappyReduction_92 (_ `HappyStk`\n        (HappyAbsSyn37  happy_var_3) `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn36\n                 ((TV (getName happy_var_2), happy_var_3)\n        ) `HappyStk` happyRest\n\nhappyReduce_93 = happySpecReduce_0  30# happyReduction_93\nhappyReduction_93  =  HappyAbsSyn37\n                 ([]\n        )\n\nhappyReduce_94 = happySpecReduce_2  30# happyReduction_94\nhappyReduction_94 (HappyTerminal happy_var_2)\n        (HappyAbsSyn37  happy_var_1)\n         =  HappyAbsSyn37\n                 (happy_var_1 ++ [Left (TV (getName happy_var_2), KindType)]\n        )\nhappyReduction_94 _ _  = notHappyAtAll \n\nhappyReduce_95 = happyReduce 6# 30# happyReduction_95\nhappyReduction_95 (_ `HappyStk`\n        (HappyTerminal happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn37  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn37\n                 (happy_var_1 ++ [Left (TV (getName happy_var_3), parseKind (getName happy_var_5))]\n        ) `HappyStk` happyRest\n\nhappyReduce_96 = happyReduce 4# 30# happyReduction_96\nhappyReduction_96 (_ `HappyStk`\n        (HappyAbsSyn8  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn37  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn37\n                 (happy_var_1 ++ [Right happy_var_3]\n        ) `HappyStk` happyRest\n\nhappyReduce_97 = happySpecReduce_3  31# happyReduction_97\nhappyReduction_97 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn38\n                 ((Key (getName happy_var_1), happy_var_3)\n        )\nhappyReduction_97 _ _ _  = notHappyAtAll \n\nhappyReduce_98 = happySpecReduce_3  32# happyReduction_98\nhappyReduction_98 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn39\n                 ((happy_var_1, Key (getName happy_var_1), happy_var_3)\n        )\nhappyReduction_98 _ _ _  = notHappyAtAll \n\nhappyReduce_99 = happySpecReduce_1  33# happyReduction_99\nhappyReduction_99 (HappyAbsSyn39  happy_var_1)\n         =  HappyAbsSyn40\n                 ([happy_var_1]\n        )\nhappyReduction_99 _  = notHappyAtAll \n\nhappyReduce_100 = happySpecReduce_3  33# happyReduction_100\nhappyReduction_100 (HappyAbsSyn39  happy_var_3)\n        _\n        (HappyAbsSyn40  happy_var_1)\n         =  HappyAbsSyn40\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_100 _ _ _  = notHappyAtAll \n\nhappyReduce_101 = happySpecReduce_1  34# happyReduction_101\nhappyReduction_101 (HappyAbsSyn38  happy_var_1)\n         =  HappyAbsSyn34\n                 ([happy_var_1]\n        )\nhappyReduction_101 _  = notHappyAtAll \n\nhappyReduce_102 = happySpecReduce_3  34# happyReduction_102\nhappyReduction_102 (HappyAbsSyn38  happy_var_3)\n        _\n        (HappyAbsSyn34  happy_var_1)\n         =  HappyAbsSyn34\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_102 _ _ _  = notHappyAtAll \n\nhappyReduce_103 = happySpecReduce_2  35# happyReduction_103\nhappyReduction_103 (HappyAbsSyn43  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn42\n                 ((case happy_var_2 of { [] -> VarU (TV (getString happy_var_1)); ts -> AppU (VarU (TV (getString happy_var_1))) ts }, True)\n        )\nhappyReduction_103 _ _  = notHappyAtAll \n\nhappyReduce_104 = happySpecReduce_1  35# happyReduction_104\nhappyReduction_104 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn42\n                 ((happy_var_1, False)\n        )\nhappyReduction_104 _  = notHappyAtAll \n\nhappyReduce_105 = happySpecReduce_0  36# happyReduction_105\nhappyReduction_105  =  HappyAbsSyn43\n                 ([]\n        )\n\nhappyReduce_106 = happySpecReduce_2  36# happyReduction_106\nhappyReduction_106 (HappyAbsSyn8  happy_var_2)\n        (HappyAbsSyn43  happy_var_1)\n         =  HappyAbsSyn43\n                 (happy_var_1 ++ [happy_var_2]\n        )\nhappyReduction_106 _ _  = notHappyAtAll \n\nhappyReduce_107 = happySpecReduce_3  37# happyReduction_107\nhappyReduction_107 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (case happy_var_3 of { FunU args ret -> FunU (happy_var_1 : args) ret; t -> FunU [happy_var_1] t }\n        )\nhappyReduction_107 _ _ _  = notHappyAtAll \n\nhappyReduce_108 = happySpecReduce_1  37# happyReduction_108\nhappyReduction_108 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_108 _  = notHappyAtAll \n\nhappyReduce_109 = happySpecReduce_3  38# happyReduction_109\nhappyReduction_109 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (ExistU (TV (getName happy_var_2)) ([], Open) ([], Open)\n        )\nhappyReduction_109 _ _ _  = notHappyAtAll \n\nhappyReduce_110 = happyReduce 4# 38# happyReduction_110\nhappyReduction_110 ((HappyAbsSyn8  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn116  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn8\n                 (EffectU (EffectSet (Set.fromList happy_var_2)) happy_var_4\n        ) `HappyStk` happyRest\n\nhappyReduce_111 = happySpecReduce_1  38# happyReduction_111\nhappyReduction_111 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_111 _  = notHappyAtAll \n\nhappyReduce_112 = happySpecReduce_3  39# happyReduction_112\nhappyReduction_112 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatAddU happy_var_1 happy_var_3\n        )\nhappyReduction_112 _ _ _  = notHappyAtAll \n\nhappyReduce_113 = happySpecReduce_3  39# happyReduction_113\nhappyReduction_113 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatSubU happy_var_1 happy_var_3\n        )\nhappyReduction_113 _ _ _  = notHappyAtAll \n\nhappyReduce_114 = happySpecReduce_1  39# happyReduction_114\nhappyReduction_114 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_114 _  = notHappyAtAll \n\nhappyReduce_115 = happySpecReduce_3  40# happyReduction_115\nhappyReduction_115 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatMulU happy_var_1 happy_var_3\n        )\nhappyReduction_115 _ _ _  = notHappyAtAll \n\nhappyReduce_116 = happySpecReduce_3  40# happyReduction_116\nhappyReduction_116 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatDivU happy_var_1 happy_var_3\n        )\nhappyReduction_116 _ _ _  = notHappyAtAll \n\nhappyReduce_117 = happySpecReduce_1  40# happyReduction_117\nhappyReduction_117 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_117 _  = notHappyAtAll \n\nhappyReduce_118 = happySpecReduce_2  41# happyReduction_118\nhappyReduction_118 (HappyAbsSyn8  happy_var_2)\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (applyType happy_var_1 happy_var_2\n        )\nhappyReduction_118 _ _  = notHappyAtAll \n\nhappyReduce_119 = happySpecReduce_1  41# happyReduction_119\nhappyReduction_119 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_119 _  = notHappyAtAll \n\nhappyReduce_120 = happySpecReduce_2  42# happyReduction_120\nhappyReduction_120 _\n        _\n         =  HappyAbsSyn8\n                 (BT.unitU\n        )\n\nhappyReduce_121 = happySpecReduce_3  42# happyReduction_121\nhappyReduction_121 _\n        (HappyAbsSyn8  happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (happy_var_2\n        )\nhappyReduction_121 _ _ _  = notHappyAtAll \n\nhappyReduce_122 = happyReduce 5# 42# happyReduction_122\nhappyReduction_122 (_ `HappyStk`\n        (HappyAbsSyn43  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn8  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn8\n                 (BT.tupleU (happy_var_2 : happy_var_4)\n        ) `HappyStk` happyRest\n\nhappyReduce_123 = happySpecReduce_3  42# happyReduction_123\nhappyReduction_123 _\n        (HappyAbsSyn8  happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (BT.listU happy_var_2\n        )\nhappyReduction_123 _ _ _  = notHappyAtAll \n\nhappyReduce_124 = happySpecReduce_2  42# happyReduction_124\nhappyReduction_124 (HappyAbsSyn8  happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (OptionalU happy_var_2\n        )\nhappyReduction_124 _ _  = notHappyAtAll \n\nhappyReduce_125 = happySpecReduce_1  42# happyReduction_125\nhappyReduction_125 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (VarU (TV (getName happy_var_1))\n        )\nhappyReduction_125 _  = notHappyAtAll \n\nhappyReduce_126 = happySpecReduce_3  42# happyReduction_126\nhappyReduction_126 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (LabeledU (TV (getName happy_var_1)) happy_var_3\n        )\nhappyReduction_126 _ _ _  = notHappyAtAll \n\nhappyReduce_127 = happySpecReduce_1  42# happyReduction_127\nhappyReduction_127 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (VarU (TV (getName happy_var_1))\n        )\nhappyReduction_127 _  = notHappyAtAll \n\nhappyReduce_128 = happySpecReduce_1  42# happyReduction_128\nhappyReduction_128 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (NatLitU (getInt happy_var_1)\n        )\nhappyReduction_128 _  = notHappyAtAll \n\nhappyReduce_129 = happyReduce 6# 43# happyReduction_129\nhappyReduction_129 (_ `HappyStk`\n        (HappyAbsSyn53  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn51  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CClsE happy_var_2 happy_var_5)\n        ) `HappyStk` happyRest\n\nhappyReduce_130 = happySpecReduce_2  43# happyReduction_130\nhappyReduction_130 (HappyAbsSyn51  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CClsE happy_var_2 [])\n        )\nhappyReduction_130 _ _  = notHappyAtAll \n\nhappyReduce_131 = happySpecReduce_3  44# happyReduction_131\nhappyReduction_131 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn51\n                 (CCHConstrained happy_var_1 happy_var_3\n        )\nhappyReduction_131 _ _ _  = notHappyAtAll \n\nhappyReduce_132 = happyReduce 5# 44# happyReduction_132\nhappyReduction_132 ((HappyAbsSyn8  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn52  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn51\n                 (CCHMultiConstrained happy_var_2 happy_var_5\n        ) `HappyStk` happyRest\n\nhappyReduce_133 = happySpecReduce_1  44# happyReduction_133\nhappyReduction_133 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn51\n                 (CCHSimple happy_var_1\n        )\nhappyReduction_133 _  = notHappyAtAll \n\nhappyReduce_134 = happySpecReduce_1  45# happyReduction_134\nhappyReduction_134 (HappyAbsSyn124  happy_var_1)\n         =  HappyAbsSyn52\n                 ([happy_var_1]\n        )\nhappyReduction_134 _  = notHappyAtAll \n\nhappyReduce_135 = happySpecReduce_3  45# happyReduction_135\nhappyReduction_135 (HappyAbsSyn124  happy_var_3)\n        _\n        (HappyAbsSyn52  happy_var_1)\n         =  HappyAbsSyn52\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_135 _ _ _  = notHappyAtAll \n\nhappyReduce_136 = happySpecReduce_1  46# happyReduction_136\nhappyReduction_136 (HappyAbsSyn54  happy_var_1)\n         =  HappyAbsSyn53\n                 ([happy_var_1]\n        )\nhappyReduction_136 _  = notHappyAtAll \n\nhappyReduce_137 = happySpecReduce_3  46# happyReduction_137\nhappyReduction_137 (HappyAbsSyn54  happy_var_3)\n        _\n        (HappyAbsSyn53  happy_var_1)\n         =  HappyAbsSyn53\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_137 _ _ _  = notHappyAtAll \n\nhappyReduce_138 = happySpecReduce_3  47# happyReduction_138\nhappyReduction_138 (HappyAbsSyn117  happy_var_3)\n        _\n        (HappyAbsSyn24  happy_var_1)\n         =  HappyAbsSyn54\n                 (CstSigItem (toEVar happy_var_1) happy_var_3\n        )\nhappyReduction_138 _ _ _  = notHappyAtAll \n\nhappyReduce_139 = happyReduce 6# 48# happyReduction_139\nhappyReduction_139 (_ `HappyStk`\n        (HappyAbsSyn57  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn56  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 ([at happy_var_1 (CIstE cn ts (concat happy_var_5)) | (cn, ts) <- happy_var_2]\n        ) `HappyStk` happyRest\n\nhappyReduce_140 = happySpecReduce_2  48# happyReduction_140\nhappyReduction_140 (HappyAbsSyn56  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn10\n                 ([at happy_var_1 (CIstE cn ts []) | (cn, ts) <- happy_var_2]\n        )\nhappyReduction_140 _ _  = notHappyAtAll \n\nhappyReduce_141 = happySpecReduce_2  49# happyReduction_141\nhappyReduction_141 (HappyAbsSyn43  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn56\n                 ([(ClassName (getName happy_var_1), happy_var_2)]\n        )\nhappyReduction_141 _ _  = notHappyAtAll \n\nhappyReduce_142 = happyReduce 4# 49# happyReduction_142\nhappyReduction_142 ((HappyAbsSyn43  happy_var_4) `HappyStk`\n        (HappyTerminal happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn56  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn56\n                 (happy_var_1 ++ [(ClassName (getName happy_var_3), happy_var_4)]\n        ) `HappyStk` happyRest\n\nhappyReduce_143 = happySpecReduce_1  50# happyReduction_143\nhappyReduction_143 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn57\n                 ([happy_var_1]\n        )\nhappyReduction_143 _  = notHappyAtAll \n\nhappyReduce_144 = happySpecReduce_3  50# happyReduction_144\nhappyReduction_144 (HappyAbsSyn10  happy_var_3)\n        _\n        (HappyAbsSyn57  happy_var_1)\n         =  HappyAbsSyn57\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_144 _ _ _  = notHappyAtAll \n\nhappyReduce_145 = happySpecReduce_1  51# happyReduction_145\nhappyReduction_145 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_145 _  = notHappyAtAll \n\nhappyReduce_146 = happySpecReduce_2  51# happyReduction_146\nhappyReduction_146 (HappyAbsSyn10  happy_var_2)\n        _\n         =  HappyAbsSyn10\n                 (map (\\(Loc sp e) -> Loc sp (CInlineE (Loc sp e))) happy_var_2\n        )\nhappyReduction_146 _ _  = notHappyAtAll \n\nhappyReduce_147 = happySpecReduce_1  51# happyReduction_147\nhappyReduction_147 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_147 _  = notHappyAtAll \n\nhappyReduce_148 = happySpecReduce_3  52# happyReduction_148\nhappyReduction_148 (HappyAbsSyn60  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CFixE InfixL (fromInteger (getInt happy_var_2)) happy_var_3)\n        )\nhappyReduction_148 _ _ _  = notHappyAtAll \n\nhappyReduce_149 = happySpecReduce_3  52# happyReduction_149\nhappyReduction_149 (HappyAbsSyn60  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CFixE InfixR (fromInteger (getInt happy_var_2)) happy_var_3)\n        )\nhappyReduction_149 _ _ _  = notHappyAtAll \n\nhappyReduce_150 = happySpecReduce_3  52# happyReduction_150\nhappyReduction_150 (HappyAbsSyn60  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CFixE InfixN (fromInteger (getInt happy_var_2)) happy_var_3)\n        )\nhappyReduction_150 _ _ _  = notHappyAtAll \n\nhappyReduce_151 = happySpecReduce_1  53# happyReduction_151\nhappyReduction_151 (HappyAbsSyn61  happy_var_1)\n         =  HappyAbsSyn60\n                 ([happy_var_1]\n        )\nhappyReduction_151 _  = notHappyAtAll \n\nhappyReduce_152 = happySpecReduce_3  53# happyReduction_152\nhappyReduction_152 (HappyAbsSyn61  happy_var_3)\n        _\n        (HappyAbsSyn60  happy_var_1)\n         =  HappyAbsSyn60\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_152 _ _ _  = notHappyAtAll \n\nhappyReduce_153 = happySpecReduce_3  54# happyReduction_153\nhappyReduction_153 _\n        (HappyAbsSyn24  happy_var_2)\n        _\n         =  HappyAbsSyn61\n                 (EV (getOp happy_var_2)\n        )\nhappyReduction_153 _ _ _  = notHappyAtAll \n\nhappyReduce_154 = happySpecReduce_3  54# happyReduction_154\nhappyReduction_154 _\n        _\n        _\n         =  HappyAbsSyn61\n                 (EV \"-\"\n        )\n\nhappyReduce_155 = happySpecReduce_3  54# happyReduction_155\nhappyReduction_155 _\n        _\n        _\n         =  HappyAbsSyn61\n                 (EV \".\"\n        )\n\nhappyReduce_156 = happySpecReduce_1  54# happyReduction_156\nhappyReduction_156 (HappyAbsSyn24  happy_var_1)\n         =  HappyAbsSyn61\n                 (EV (getOp happy_var_1)\n        )\nhappyReduction_156 _  = notHappyAtAll \n\nhappyReduce_157 = happySpecReduce_1  54# happyReduction_157\nhappyReduction_157 _\n         =  HappyAbsSyn61\n                 (EV \".\"\n        )\n\nhappyReduce_158 = happySpecReduce_1  54# happyReduction_158\nhappyReduction_158 _\n         =  HappyAbsSyn61\n                 (EV \"-\"\n        )\n\nhappyReduce_159 = happySpecReduce_1  54# happyReduction_159\nhappyReduction_159 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn61\n                 (EV (getName happy_var_1)\n        )\nhappyReduction_159 _  = notHappyAtAll \n\nhappyReduce_160 = happyReduce 6# 55# happyReduction_160\nhappyReduction_160 (_ `HappyStk`\n        (HappyAbsSyn64  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn63  happy_var_3) `HappyStk`\n        (HappyAbsSyn24  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 ([at happy_var_1 (CSrcOldE happy_var_2 happy_var_3 happy_var_5)]\n        ) `HappyStk` happyRest\n\nhappyReduce_161 = happyReduce 7# 55# happyReduction_161\nhappyReduction_161 (_ `HappyStk`\n        (HappyAbsSyn67  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn63  happy_var_3) `HappyStk`\n        (HappyAbsSyn24  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 ([at happy_var_1 (CSrcNewE happy_var_2 happy_var_3 happy_var_6)]\n        ) `HappyStk` happyRest\n\nhappyReduce_162 = happySpecReduce_0  56# happyReduction_162\nhappyReduction_162  =  HappyAbsSyn63\n                 (Nothing\n        )\n\nhappyReduce_163 = happySpecReduce_2  56# happyReduction_163\nhappyReduction_163 (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn63\n                 (Just (getString happy_var_2)\n        )\nhappyReduction_163 _ _  = notHappyAtAll \n\nhappyReduce_164 = happySpecReduce_1  57# happyReduction_164\nhappyReduction_164 (HappyAbsSyn65  happy_var_1)\n         =  HappyAbsSyn64\n                 ([happy_var_1]\n        )\nhappyReduction_164 _  = notHappyAtAll \n\nhappyReduce_165 = happySpecReduce_3  57# happyReduction_165\nhappyReduction_165 (HappyAbsSyn65  happy_var_3)\n        _\n        (HappyAbsSyn64  happy_var_1)\n         =  HappyAbsSyn64\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_165 _ _ _  = notHappyAtAll \n\nhappyReduce_166 = happySpecReduce_1  58# happyReduction_166\nhappyReduction_166 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn65\n                 ((getString happy_var_1, Nothing)\n        )\nhappyReduction_166 _  = notHappyAtAll \n\nhappyReduce_167 = happySpecReduce_3  58# happyReduction_167\nhappyReduction_167 (HappyTerminal happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn65\n                 ((getString happy_var_1, Just (getName happy_var_3))\n        )\nhappyReduction_167 _ _ _  = notHappyAtAll \n\nhappyReduce_168 = happySpecReduce_3  58# happyReduction_168\nhappyReduction_168 (HappyTerminal happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn65\n                 ((getString happy_var_1, Just (getName happy_var_3))\n        )\nhappyReduction_168 _ _ _  = notHappyAtAll \n\nhappyReduce_169 = happySpecReduce_3  58# happyReduction_169\nhappyReduction_169 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn65\n                 ((getString happy_var_1, Just happy_var_3)\n        )\nhappyReduction_169 _ _ _  = notHappyAtAll \n\nhappyReduce_170 = happySpecReduce_1  58# happyReduction_170\nhappyReduction_170 (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn65\n                 ((happy_var_1, Nothing)\n        )\nhappyReduction_170 _  = notHappyAtAll \n\nhappyReduce_171 = happySpecReduce_3  58# happyReduction_171\nhappyReduction_171 (HappyAbsSyn19  happy_var_3)\n        _\n        (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn65\n                 ((happy_var_1, Just happy_var_3)\n        )\nhappyReduction_171 _ _ _  = notHappyAtAll \n\nhappyReduce_172 = happySpecReduce_3  58# happyReduction_172\nhappyReduction_172 (HappyTerminal happy_var_3)\n        _\n        (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn65\n                 ((happy_var_1, Just (getName happy_var_3))\n        )\nhappyReduction_172 _ _ _  = notHappyAtAll \n\nhappyReduce_173 = happySpecReduce_3  58# happyReduction_173\nhappyReduction_173 (HappyTerminal happy_var_3)\n        _\n        (HappyAbsSyn19  happy_var_1)\n         =  HappyAbsSyn65\n                 ((happy_var_1, Just (getName happy_var_3))\n        )\nhappyReduction_173 _ _ _  = notHappyAtAll \n\nhappyReduce_174 = happySpecReduce_3  59# happyReduction_174\nhappyReduction_174 _\n        (HappyAbsSyn24  happy_var_2)\n        _\n         =  HappyAbsSyn19\n                 (getOp happy_var_2\n        )\nhappyReduction_174 _ _ _  = notHappyAtAll \n\nhappyReduce_175 = happySpecReduce_3  59# happyReduction_175\nhappyReduction_175 _\n        _\n        _\n         =  HappyAbsSyn19\n                 (\"-\"\n        )\n\nhappyReduce_176 = happySpecReduce_3  59# happyReduction_176\nhappyReduction_176 _\n        _\n        _\n         =  HappyAbsSyn19\n                 (\".\"\n        )\n\nhappyReduce_177 = happySpecReduce_1  60# happyReduction_177\nhappyReduction_177 (HappyAbsSyn68  happy_var_1)\n         =  HappyAbsSyn67\n                 ([happy_var_1]\n        )\nhappyReduction_177 _  = notHappyAtAll \n\nhappyReduce_178 = happySpecReduce_3  60# happyReduction_178\nhappyReduction_178 (HappyAbsSyn68  happy_var_3)\n        _\n        (HappyAbsSyn67  happy_var_1)\n         =  HappyAbsSyn67\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_178 _ _ _  = notHappyAtAll \n\nhappyReduce_179 = happySpecReduce_2  61# happyReduction_179\nhappyReduction_179 (HappyAbsSyn69  happy_var_2)\n        _\n         =  HappyAbsSyn68\n                 ((True, fst happy_var_2, snd happy_var_2)\n        )\nhappyReduction_179 _ _  = notHappyAtAll \n\nhappyReduce_180 = happySpecReduce_1  61# happyReduction_180\nhappyReduction_180 (HappyAbsSyn69  happy_var_1)\n         =  HappyAbsSyn68\n                 ((False, fst happy_var_1, snd happy_var_1)\n        )\nhappyReduction_180 _  = notHappyAtAll \n\nhappyReduce_181 = happySpecReduce_1  62# happyReduction_181\nhappyReduction_181 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn69\n                 ((getName happy_var_1, happy_var_1)\n        )\nhappyReduction_181 _  = notHappyAtAll \n\nhappyReduce_182 = happySpecReduce_3  62# happyReduction_182\nhappyReduction_182 _\n        (HappyAbsSyn24  happy_var_2)\n        _\n         =  HappyAbsSyn69\n                 ((getOp happy_var_2, happy_var_2)\n        )\nhappyReduction_182 _ _ _  = notHappyAtAll \n\nhappyReduce_183 = happySpecReduce_3  62# happyReduction_183\nhappyReduction_183 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn69\n                 ((\"-\", happy_var_2)\n        )\nhappyReduction_183 _ _ _  = notHappyAtAll \n\nhappyReduce_184 = happySpecReduce_3  62# happyReduction_184\nhappyReduction_184 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn69\n                 ((\".\", happy_var_2)\n        )\nhappyReduction_184 _ _ _  = notHappyAtAll \n\nhappyReduce_185 = happySpecReduce_1  63# happyReduction_185\nhappyReduction_185 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_185 _  = notHappyAtAll \n\nhappyReduce_186 = happySpecReduce_1  63# happyReduction_186\nhappyReduction_186 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_186 _  = notHappyAtAll \n\nhappyReduce_187 = happySpecReduce_1  63# happyReduction_187\nhappyReduction_187 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_187 _  = notHappyAtAll \n\nhappyReduce_188 = happySpecReduce_1  63# happyReduction_188\nhappyReduction_188 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_188 _  = notHappyAtAll \n\nhappyReduce_189 = happySpecReduce_3  63# happyReduction_189\nhappyReduction_189 (HappyAbsSyn8  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_2 (CAnnE happy_var_1 happy_var_3)\n        )\nhappyReduction_189 _ _ _  = notHappyAtAll \n\nhappyReduce_190 = happySpecReduce_3  64# happyReduction_190\nhappyReduction_190 (HappyAbsSyn9  happy_var_3)\n        _\n        (HappyAbsSyn17  happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (fst (head happy_var_1) <-> happy_var_3) (CGuardExprE happy_var_1 happy_var_3)\n        )\nhappyReduction_190 _ _ _  = notHappyAtAll \n\nhappyReduce_191 = happyReduce 6# 65# happyReduction_191\nhappyReduction_191 ((HappyAbsSyn9  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn73  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CLetE happy_var_3 happy_var_6)\n        ) `HappyStk` happyRest\n\nhappyReduce_192 = happyReduce 5# 65# happyReduction_192\nhappyReduction_192 ((HappyAbsSyn9  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn73  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CLetE happy_var_3 happy_var_5)\n        ) `HappyStk` happyRest\n\nhappyReduce_193 = happyReduce 6# 65# happyReduction_193\nhappyReduction_193 ((HappyAbsSyn9  happy_var_6) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn73  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CLetE happy_var_3 happy_var_6)\n        ) `HappyStk` happyRest\n\nhappyReduce_194 = happyReduce 5# 65# happyReduction_194\nhappyReduction_194 ((HappyAbsSyn9  happy_var_5) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn73  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CLetE happy_var_3 happy_var_5)\n        ) `HappyStk` happyRest\n\nhappyReduce_195 = happySpecReduce_1  66# happyReduction_195\nhappyReduction_195 (HappyAbsSyn75  happy_var_1)\n         =  HappyAbsSyn73\n                 ([happy_var_1]\n        )\nhappyReduction_195 _  = notHappyAtAll \n\nhappyReduce_196 = happySpecReduce_3  66# happyReduction_196\nhappyReduction_196 (HappyAbsSyn75  happy_var_3)\n        _\n        (HappyAbsSyn73  happy_var_1)\n         =  HappyAbsSyn73\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_196 _ _ _  = notHappyAtAll \n\nhappyReduce_197 = happySpecReduce_1  67# happyReduction_197\nhappyReduction_197 (HappyAbsSyn75  happy_var_1)\n         =  HappyAbsSyn73\n                 ([happy_var_1]\n        )\nhappyReduction_197 _  = notHappyAtAll \n\nhappyReduce_198 = happySpecReduce_3  67# happyReduction_198\nhappyReduction_198 (HappyAbsSyn75  happy_var_3)\n        _\n        (HappyAbsSyn73  happy_var_1)\n         =  HappyAbsSyn73\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_198 _ _ _  = notHappyAtAll \n\nhappyReduce_199 = happySpecReduce_3  68# happyReduction_199\nhappyReduction_199 (HappyAbsSyn9  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn75\n                 ((EV (getName happy_var_1), happy_var_3)\n        )\nhappyReduction_199 _ _ _  = notHappyAtAll \n\nhappyReduce_200 = happySpecReduce_3  68# happyReduction_200\nhappyReduction_200 (HappyAbsSyn9  happy_var_3)\n        _\n        _\n         =  HappyAbsSyn75\n                 ((EV \"_\", happy_var_3)\n        )\nhappyReduction_200 _ _ _  = notHappyAtAll \n\nhappyReduce_201 = happyReduce 4# 68# happyReduction_201\nhappyReduction_201 ((HappyAbsSyn9  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn17  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn75\n                 ((EV (getName happy_var_1), Loc (happy_var_1 <-> happy_var_4) (CGuardExprE happy_var_2 happy_var_4))\n        ) `HappyStk` happyRest\n\nhappyReduce_202 = happyReduce 4# 69# happyReduction_202\nhappyReduction_202 ((HappyAbsSyn9  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn20  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (at happy_var_1 (CLamE (map EV happy_var_2) happy_var_4)\n        ) `HappyStk` happyRest\n\nhappyReduce_203 = happySpecReduce_1  70# happyReduction_203\nhappyReduction_203 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_203 _  = notHappyAtAll \n\nhappyReduce_204 = happySpecReduce_3  70# happyReduction_204\nhappyReduction_204 (HappyAbsSyn9  happy_var_3)\n        (HappyAbsSyn24  happy_var_2)\n        (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_2 (CBopE happy_var_1 happy_var_2 happy_var_3)\n        )\nhappyReduction_204 _ _ _  = notHappyAtAll \n\nhappyReduce_205 = happySpecReduce_3  70# happyReduction_205\nhappyReduction_205 (HappyAbsSyn9  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_2 (CBopE happy_var_1 happy_var_2 happy_var_3)\n        )\nhappyReduction_205 _ _ _  = notHappyAtAll \n\nhappyReduce_206 = happySpecReduce_3  70# happyReduction_206\nhappyReduction_206 (HappyAbsSyn9  happy_var_3)\n        (HappyTerminal happy_var_2)\n        (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_2 (CBopE happy_var_1 happy_var_2 happy_var_3)\n        )\nhappyReduction_206 _ _ _  = notHappyAtAll \n\nhappyReduce_207 = happySpecReduce_1  71# happyReduction_207\nhappyReduction_207 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_207 _  = notHappyAtAll \n\nhappyReduce_208 = happySpecReduce_2  71# happyReduction_208\nhappyReduction_208 (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CIntE (negate (getInt happy_var_2)))\n        )\nhappyReduction_208 _ _  = notHappyAtAll \n\nhappyReduce_209 = happySpecReduce_2  71# happyReduction_209\nhappyReduction_209 (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CRealE (DS.fromFloatDigits (negate (getFloat happy_var_2))))\n        )\nhappyReduction_209 _ _  = notHappyAtAll \n\nhappyReduce_210 = happySpecReduce_1  72# happyReduction_210\nhappyReduction_210 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_210 _  = notHappyAtAll \n\nhappyReduce_211 = happySpecReduce_2  72# happyReduction_211\nhappyReduction_211 (HappyAbsSyn10  happy_var_2)\n        (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> last happy_var_2) (CAppE happy_var_1 happy_var_2)\n        )\nhappyReduction_211 _ _  = notHappyAtAll \n\nhappyReduce_212 = happySpecReduce_2  73# happyReduction_212\nhappyReduction_212 (HappyAbsSyn9  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_2) (CForceE happy_var_2)\n        )\nhappyReduction_212 _ _  = notHappyAtAll \n\nhappyReduce_213 = happySpecReduce_1  73# happyReduction_213\nhappyReduction_213 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_213 _  = notHappyAtAll \n\nhappyReduce_214 = happySpecReduce_1  74# happyReduction_214\nhappyReduction_214 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_214 _  = notHappyAtAll \n\nhappyReduce_215 = happySpecReduce_2  74# happyReduction_215\nhappyReduction_215 (HappyAbsSyn9  happy_var_2)\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ [happy_var_2]\n        )\nhappyReduction_215 _ _  = notHappyAtAll \n\nhappyReduce_216 = happySpecReduce_1  75# happyReduction_216\nhappyReduction_216 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_216 _  = notHappyAtAll \n\nhappyReduce_217 = happySpecReduce_1  75# happyReduction_217\nhappyReduction_217 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_217 _  = notHappyAtAll \n\nhappyReduce_218 = happySpecReduce_1  75# happyReduction_218\nhappyReduction_218 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_218 _  = notHappyAtAll \n\nhappyReduce_219 = happySpecReduce_1  75# happyReduction_219\nhappyReduction_219 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_219 _  = notHappyAtAll \n\nhappyReduce_220 = happySpecReduce_1  75# happyReduction_220\nhappyReduction_220 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_220 _  = notHappyAtAll \n\nhappyReduce_221 = happySpecReduce_1  75# happyReduction_221\nhappyReduction_221 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_221 _  = notHappyAtAll \n\nhappyReduce_222 = happySpecReduce_1  75# happyReduction_222\nhappyReduction_222 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_222 _  = notHappyAtAll \n\nhappyReduce_223 = happySpecReduce_1  75# happyReduction_223\nhappyReduction_223 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_223 _  = notHappyAtAll \n\nhappyReduce_224 = happySpecReduce_1  75# happyReduction_224\nhappyReduction_224 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_224 _  = notHappyAtAll \n\nhappyReduce_225 = happySpecReduce_1  75# happyReduction_225\nhappyReduction_225 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_225 _  = notHappyAtAll \n\nhappyReduce_226 = happySpecReduce_1  75# happyReduction_226\nhappyReduction_226 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_226 _  = notHappyAtAll \n\nhappyReduce_227 = happySpecReduce_1  75# happyReduction_227\nhappyReduction_227 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_227 _  = notHappyAtAll \n\nhappyReduce_228 = happySpecReduce_1  76# happyReduction_228\nhappyReduction_228 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 CNullE\n        )\nhappyReduction_228 _  = notHappyAtAll \n\nhappyReduce_229 = happySpecReduce_1  77# happyReduction_229\nhappyReduction_229 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CIntrinsicE (getIntrinsicName happy_var_1))\n        )\nhappyReduction_229 _  = notHappyAtAll \n\nhappyReduce_230 = happySpecReduce_2  78# happyReduction_230\nhappyReduction_230 _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 CUniE\n        )\nhappyReduction_230 _ _  = notHappyAtAll \n\nhappyReduce_231 = happySpecReduce_3  78# happyReduction_231\nhappyReduction_231 _\n        (HappyAbsSyn24  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CVarE (EV (getOp happy_var_2)))\n        )\nhappyReduction_231 _ _ _  = notHappyAtAll \n\nhappyReduce_232 = happySpecReduce_3  78# happyReduction_232\nhappyReduction_232 _\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CVarE (EV \"-\"))\n        )\nhappyReduction_232 _ _ _  = notHappyAtAll \n\nhappyReduce_233 = happySpecReduce_3  78# happyReduction_233\nhappyReduction_233 _\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CVarE (EV \".\"))\n        )\nhappyReduction_233 _ _ _  = notHappyAtAll \n\nhappyReduce_234 = happySpecReduce_3  78# happyReduction_234\nhappyReduction_234 (HappyTerminal happy_var_3)\n        (HappyAbsSyn9  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_3) (CParenE happy_var_2)\n        )\nhappyReduction_234 _ _ _  = notHappyAtAll \n\nhappyReduce_235 = happyReduce 5# 78# happyReduction_235\nhappyReduction_235 ((HappyTerminal happy_var_5) `HappyStk`\n        (HappyAbsSyn10  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn9  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_5) (CTupE (happy_var_2 : happy_var_4))\n        ) `HappyStk` happyRest\n\nhappyReduce_236 = happySpecReduce_1  79# happyReduction_236\nhappyReduction_236 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn10\n                 ([happy_var_1]\n        )\nhappyReduction_236 _  = notHappyAtAll \n\nhappyReduce_237 = happySpecReduce_3  79# happyReduction_237\nhappyReduction_237 (HappyAbsSyn9  happy_var_3)\n        _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_237 _ _ _  = notHappyAtAll \n\nhappyReduce_238 = happySpecReduce_3  80# happyReduction_238\nhappyReduction_238 (HappyTerminal happy_var_3)\n        (HappyAbsSyn88  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_3) (CNamE happy_var_2)\n        )\nhappyReduction_238 _ _ _  = notHappyAtAll \n\nhappyReduce_239 = happySpecReduce_1  81# happyReduction_239\nhappyReduction_239 (HappyAbsSyn89  happy_var_1)\n         =  HappyAbsSyn88\n                 ([happy_var_1]\n        )\nhappyReduction_239 _  = notHappyAtAll \n\nhappyReduce_240 = happySpecReduce_3  81# happyReduction_240\nhappyReduction_240 (HappyAbsSyn89  happy_var_3)\n        _\n        (HappyAbsSyn88  happy_var_1)\n         =  HappyAbsSyn88\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_240 _ _ _  = notHappyAtAll \n\nhappyReduce_241 = happySpecReduce_3  82# happyReduction_241\nhappyReduction_241 (HappyAbsSyn9  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn89\n                 ((Key (getName happy_var_1), happy_var_3)\n        )\nhappyReduction_241 _ _ _  = notHappyAtAll \n\nhappyReduce_242 = happySpecReduce_2  83# happyReduction_242\nhappyReduction_242 (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_2) (CLstE [])\n        )\nhappyReduction_242 _ _  = notHappyAtAll \n\nhappyReduce_243 = happySpecReduce_3  83# happyReduction_243\nhappyReduction_243 (HappyTerminal happy_var_3)\n        (HappyAbsSyn10  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_3) (CLstE happy_var_2)\n        )\nhappyReduction_243 _ _ _  = notHappyAtAll \n\nhappyReduce_244 = happyReduce 4# 84# happyReduction_244\nhappyReduction_244 ((HappyTerminal happy_var_4) `HappyStk`\n        (HappyAbsSyn92  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_4) (CDoE happy_var_3)\n        ) `HappyStk` happyRest\n\nhappyReduce_245 = happyReduce 4# 84# happyReduction_245\nhappyReduction_245 ((HappyTerminal happy_var_4) `HappyStk`\n        (HappyAbsSyn92  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_4) (CDoE happy_var_3)\n        ) `HappyStk` happyRest\n\nhappyReduce_246 = happySpecReduce_1  85# happyReduction_246\nhappyReduction_246 (HappyAbsSyn92  happy_var_1)\n         =  HappyAbsSyn92\n                 (happy_var_1\n        )\nhappyReduction_246 _  = notHappyAtAll \n\nhappyReduce_247 = happySpecReduce_3  85# happyReduction_247\nhappyReduction_247 (HappyAbsSyn92  happy_var_3)\n        _\n        (HappyAbsSyn92  happy_var_1)\n         =  HappyAbsSyn92\n                 (happy_var_1 ++ happy_var_3\n        )\nhappyReduction_247 _ _ _  = notHappyAtAll \n\nhappyReduce_248 = happySpecReduce_1  86# happyReduction_248\nhappyReduction_248 (HappyAbsSyn92  happy_var_1)\n         =  HappyAbsSyn92\n                 (happy_var_1\n        )\nhappyReduction_248 _  = notHappyAtAll \n\nhappyReduce_249 = happySpecReduce_3  86# happyReduction_249\nhappyReduction_249 (HappyAbsSyn92  happy_var_3)\n        _\n        (HappyAbsSyn92  happy_var_1)\n         =  HappyAbsSyn92\n                 (happy_var_1 ++ happy_var_3\n        )\nhappyReduction_249 _ _ _  = notHappyAtAll \n\nhappyReduce_250 = happySpecReduce_3  87# happyReduction_250\nhappyReduction_250 (HappyAbsSyn9  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn92\n                 ([CstDoBind (EV (getName happy_var_1)) happy_var_3]\n        )\nhappyReduction_250 _ _ _  = notHappyAtAll \n\nhappyReduce_251 = happyReduce 4# 87# happyReduction_251\nhappyReduction_251 (_ `HappyStk`\n        (HappyAbsSyn73  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn92\n                 ([CstDoLet (EV v) e | (EV v, e) <- happy_var_3]\n        ) `HappyStk` happyRest\n\nhappyReduce_252 = happySpecReduce_1  87# happyReduction_252\nhappyReduction_252 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn92\n                 ([CstDoBare happy_var_1]\n        )\nhappyReduction_252 _  = notHappyAtAll \n\nhappyReduce_253 = happySpecReduce_2  88# happyReduction_253\nhappyReduction_253 (HappyAbsSyn96  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CAccessorE happy_var_2)\n        )\nhappyReduction_253 _ _  = notHappyAtAll \n\nhappyReduce_254 = happySpecReduce_2  88# happyReduction_254\nhappyReduction_254 (HappyAbsSyn96  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CAccessorE happy_var_2)\n        )\nhappyReduction_254 _ _  = notHappyAtAll \n\nhappyReduce_255 = happySpecReduce_2  89# happyReduction_255\nhappyReduction_255 (HappyAbsSyn97  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn96\n                 (CABKey (getName happy_var_1) happy_var_2\n        )\nhappyReduction_255 _ _  = notHappyAtAll \n\nhappyReduce_256 = happySpecReduce_2  89# happyReduction_256\nhappyReduction_256 (HappyAbsSyn97  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn96\n                 (CABIdx (fromInteger (getInt happy_var_1)) happy_var_2\n        )\nhappyReduction_256 _ _  = notHappyAtAll \n\nhappyReduce_257 = happySpecReduce_3  89# happyReduction_257\nhappyReduction_257 _\n        (HappyAbsSyn98  happy_var_2)\n        _\n         =  HappyAbsSyn96\n                 (CABGroup happy_var_2\n        )\nhappyReduction_257 _ _ _  = notHappyAtAll \n\nhappyReduce_258 = happySpecReduce_0  90# happyReduction_258\nhappyReduction_258  =  HappyAbsSyn97\n                 (CATEnd\n        )\n\nhappyReduce_259 = happySpecReduce_2  90# happyReduction_259\nhappyReduction_259 (HappyAbsSyn9  happy_var_2)\n        _\n         =  HappyAbsSyn97\n                 (CATSet happy_var_2\n        )\nhappyReduction_259 _ _  = notHappyAtAll \n\nhappyReduce_260 = happySpecReduce_2  90# happyReduction_260\nhappyReduction_260 (HappyAbsSyn96  happy_var_2)\n        _\n         =  HappyAbsSyn97\n                 (CATChain happy_var_2\n        )\nhappyReduction_260 _ _  = notHappyAtAll \n\nhappyReduce_261 = happySpecReduce_1  91# happyReduction_261\nhappyReduction_261 (HappyAbsSyn96  happy_var_1)\n         =  HappyAbsSyn98\n                 ([happy_var_1]\n        )\nhappyReduction_261 _  = notHappyAtAll \n\nhappyReduce_262 = happySpecReduce_3  91# happyReduction_262\nhappyReduction_262 (HappyAbsSyn96  happy_var_3)\n        _\n        (HappyAbsSyn98  happy_var_1)\n         =  HappyAbsSyn98\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_262 _ _ _  = notHappyAtAll \n\nhappyReduce_263 = happySpecReduce_2  92# happyReduction_263\nhappyReduction_263 (HappyAbsSyn96  happy_var_2)\n        _\n         =  HappyAbsSyn96\n                 (happy_var_2\n        )\nhappyReduction_263 _ _  = notHappyAtAll \n\nhappyReduce_264 = happySpecReduce_2  92# happyReduction_264\nhappyReduction_264 (HappyAbsSyn96  happy_var_2)\n        _\n         =  HappyAbsSyn96\n                 (happy_var_2\n        )\nhappyReduction_264 _ _  = notHappyAtAll \n\nhappyReduce_265 = happySpecReduce_3  93# happyReduction_265\nhappyReduction_265 (HappyTerminal happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_3) (CVarE (EV (getName happy_var_1 <> \".\" <> getName happy_var_3)))\n        )\nhappyReduction_265 _ _ _  = notHappyAtAll \n\nhappyReduce_266 = happySpecReduce_3  93# happyReduction_266\nhappyReduction_266 (HappyTerminal happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_3) (CLabeledVarE (getName happy_var_1) (EV (getName happy_var_3)))\n        )\nhappyReduction_266 _ _ _  = notHappyAtAll \n\nhappyReduce_267 = happySpecReduce_1  93# happyReduction_267\nhappyReduction_267 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CVarE (EV (getName happy_var_1)))\n        )\nhappyReduction_267 _  = notHappyAtAll \n\nhappyReduce_268 = happySpecReduce_1  94# happyReduction_268\nhappyReduction_268 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 CHolE\n        )\nhappyReduction_268 _  = notHappyAtAll \n\nhappyReduce_269 = happySpecReduce_1  95# happyReduction_269\nhappyReduction_269 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CLogE True)\n        )\nhappyReduction_269 _  = notHappyAtAll \n\nhappyReduce_270 = happySpecReduce_1  95# happyReduction_270\nhappyReduction_270 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CLogE False)\n        )\nhappyReduction_270 _  = notHappyAtAll \n\nhappyReduce_271 = happySpecReduce_1  96# happyReduction_271\nhappyReduction_271 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CIntE (getInt happy_var_1))\n        )\nhappyReduction_271 _  = notHappyAtAll \n\nhappyReduce_272 = happySpecReduce_1  96# happyReduction_272\nhappyReduction_272 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CRealE (DS.fromFloatDigits (getFloat happy_var_1)))\n        )\nhappyReduction_272 _  = notHappyAtAll \n\nhappyReduce_273 = happySpecReduce_1  97# happyReduction_273\nhappyReduction_273 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (at happy_var_1 (CStrE (getString happy_var_1))\n        )\nhappyReduction_273 _  = notHappyAtAll \n\nhappyReduce_274 = happySpecReduce_1  97# happyReduction_274\nhappyReduction_274 (HappyAbsSyn9  happy_var_1)\n         =  HappyAbsSyn9\n                 (happy_var_1\n        )\nhappyReduction_274 _  = notHappyAtAll \n\nhappyReduce_275 = happySpecReduce_3  98# happyReduction_275\nhappyReduction_275 (HappyTerminal happy_var_3)\n        (HappyAbsSyn106  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn9\n                 (Loc (happy_var_1 <-> happy_var_3) (CInterpE (getString happy_var_1) (fst happy_var_2) (snd happy_var_2) (getString happy_var_3))\n        )\nhappyReduction_275 _ _ _  = notHappyAtAll \n\nhappyReduce_276 = happySpecReduce_3  99# happyReduction_276\nhappyReduction_276 _\n        (HappyAbsSyn9  happy_var_2)\n        _\n         =  HappyAbsSyn106\n                 (([happy_var_2], [])\n        )\nhappyReduction_276 _ _ _  = notHappyAtAll \n\nhappyReduce_277 = happyReduce 5# 99# happyReduction_277\nhappyReduction_277 (_ `HappyStk`\n        (HappyAbsSyn9  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyTerminal happy_var_2) `HappyStk`\n        (HappyAbsSyn106  happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn106\n                 (let (es, ms) = happy_var_1 in (es ++ [happy_var_4], ms ++ [getString happy_var_2])\n        ) `HappyStk` happyRest\n\nhappyReduce_278 = happySpecReduce_1  100# happyReduction_278\nhappyReduction_278 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_278 _  = notHappyAtAll \n\nhappyReduce_279 = happySpecReduce_1  100# happyReduction_279\nhappyReduction_279 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_279 _  = notHappyAtAll \n\nhappyReduce_280 = happySpecReduce_3  101# happyReduction_280\nhappyReduction_280 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (case happy_var_3 of { FunU args ret -> FunU (happy_var_1 : args) ret; t -> FunU [happy_var_1] t }\n        )\nhappyReduction_280 _ _ _  = notHappyAtAll \n\nhappyReduce_281 = happySpecReduce_3  102# happyReduction_281\nhappyReduction_281 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (ExistU (TV (getName happy_var_2)) ([], Open) ([], Open)\n        )\nhappyReduction_281 _ _ _  = notHappyAtAll \n\nhappyReduce_282 = happyReduce 4# 102# happyReduction_282\nhappyReduction_282 ((HappyAbsSyn8  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn116  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn8\n                 (EffectU (EffectSet (Set.fromList happy_var_2)) happy_var_4\n        ) `HappyStk` happyRest\n\nhappyReduce_283 = happySpecReduce_1  102# happyReduction_283\nhappyReduction_283 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_283 _  = notHappyAtAll \n\nhappyReduce_284 = happySpecReduce_3  103# happyReduction_284\nhappyReduction_284 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatAddU happy_var_1 happy_var_3\n        )\nhappyReduction_284 _ _ _  = notHappyAtAll \n\nhappyReduce_285 = happySpecReduce_3  103# happyReduction_285\nhappyReduction_285 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatSubU happy_var_1 happy_var_3\n        )\nhappyReduction_285 _ _ _  = notHappyAtAll \n\nhappyReduce_286 = happySpecReduce_1  103# happyReduction_286\nhappyReduction_286 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_286 _  = notHappyAtAll \n\nhappyReduce_287 = happySpecReduce_3  104# happyReduction_287\nhappyReduction_287 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatMulU happy_var_1 happy_var_3\n        )\nhappyReduction_287 _ _ _  = notHappyAtAll \n\nhappyReduce_288 = happySpecReduce_3  104# happyReduction_288\nhappyReduction_288 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (NatDivU happy_var_1 happy_var_3\n        )\nhappyReduction_288 _ _ _  = notHappyAtAll \n\nhappyReduce_289 = happySpecReduce_1  104# happyReduction_289\nhappyReduction_289 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_289 _  = notHappyAtAll \n\nhappyReduce_290 = happySpecReduce_2  105# happyReduction_290\nhappyReduction_290 (HappyAbsSyn8  happy_var_2)\n        (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (applyType happy_var_1 happy_var_2\n        )\nhappyReduction_290 _ _  = notHappyAtAll \n\nhappyReduce_291 = happySpecReduce_1  105# happyReduction_291\nhappyReduction_291 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn8\n                 (happy_var_1\n        )\nhappyReduction_291 _  = notHappyAtAll \n\nhappyReduce_292 = happySpecReduce_2  106# happyReduction_292\nhappyReduction_292 _\n        _\n         =  HappyAbsSyn8\n                 (BT.unitU\n        )\n\nhappyReduce_293 = happySpecReduce_3  106# happyReduction_293\nhappyReduction_293 _\n        (HappyAbsSyn8  happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (happy_var_2\n        )\nhappyReduction_293 _ _ _  = notHappyAtAll \n\nhappyReduce_294 = happyReduce 5# 106# happyReduction_294\nhappyReduction_294 (_ `HappyStk`\n        (HappyAbsSyn43  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn8  happy_var_2) `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn8\n                 (BT.tupleU (happy_var_2 : happy_var_4)\n        ) `HappyStk` happyRest\n\nhappyReduce_295 = happySpecReduce_3  106# happyReduction_295\nhappyReduction_295 _\n        (HappyAbsSyn8  happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (BT.listU happy_var_2\n        )\nhappyReduction_295 _ _ _  = notHappyAtAll \n\nhappyReduce_296 = happySpecReduce_2  106# happyReduction_296\nhappyReduction_296 (HappyAbsSyn8  happy_var_2)\n        _\n         =  HappyAbsSyn8\n                 (OptionalU happy_var_2\n        )\nhappyReduction_296 _ _  = notHappyAtAll \n\nhappyReduce_297 = happySpecReduce_1  106# happyReduction_297\nhappyReduction_297 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (VarU (TV (getName happy_var_1))\n        )\nhappyReduction_297 _  = notHappyAtAll \n\nhappyReduce_298 = happySpecReduce_3  106# happyReduction_298\nhappyReduction_298 (HappyAbsSyn8  happy_var_3)\n        _\n        _\n         =  HappyAbsSyn8\n                 (happy_var_3\n        )\nhappyReduction_298 _ _ _  = notHappyAtAll \n\nhappyReduce_299 = happySpecReduce_1  106# happyReduction_299\nhappyReduction_299 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (VarU (TV (getName happy_var_1))\n        )\nhappyReduction_299 _  = notHappyAtAll \n\nhappyReduce_300 = happySpecReduce_1  106# happyReduction_300\nhappyReduction_300 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (VarU (TV (getString happy_var_1))\n        )\nhappyReduction_300 _  = notHappyAtAll \n\nhappyReduce_301 = happySpecReduce_1  106# happyReduction_301\nhappyReduction_301 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn8\n                 (NatLitU (getInt happy_var_1)\n        )\nhappyReduction_301 _  = notHappyAtAll \n\nhappyReduce_302 = happySpecReduce_1  107# happyReduction_302\nhappyReduction_302 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn43\n                 ([happy_var_1]\n        )\nhappyReduction_302 _  = notHappyAtAll \n\nhappyReduce_303 = happySpecReduce_3  107# happyReduction_303\nhappyReduction_303 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn43  happy_var_1)\n         =  HappyAbsSyn43\n                 (happy_var_1 ++ [happy_var_3]\n        )\nhappyReduction_303 _ _ _  = notHappyAtAll \n\nhappyReduce_304 = happySpecReduce_1  108# happyReduction_304\nhappyReduction_304 (HappyAbsSyn8  happy_var_1)\n         =  HappyAbsSyn43\n                 ([happy_var_1]\n        )\nhappyReduction_304 _  = notHappyAtAll \n\nhappyReduce_305 = happySpecReduce_2  108# happyReduction_305\nhappyReduction_305 (HappyAbsSyn8  happy_var_2)\n        (HappyAbsSyn43  happy_var_1)\n         =  HappyAbsSyn43\n                 (happy_var_1 ++ [happy_var_2]\n        )\nhappyReduction_305 _ _  = notHappyAtAll \n\nhappyReduce_306 = happySpecReduce_1  109# happyReduction_306\nhappyReduction_306 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn116\n                 ([getName happy_var_1]\n        )\nhappyReduction_306 _  = notHappyAtAll \n\nhappyReduce_307 = happySpecReduce_3  109# happyReduction_307\nhappyReduction_307 (HappyTerminal happy_var_3)\n        _\n        (HappyAbsSyn116  happy_var_1)\n         =  HappyAbsSyn116\n                 (happy_var_1 ++ [getName happy_var_3]\n        )\nhappyReduction_307 _ _ _  = notHappyAtAll \n\nhappyReduce_308 = happySpecReduce_3  110# happyReduction_308\nhappyReduction_308 (HappyAbsSyn118  happy_var_3)\n        _\n        (HappyAbsSyn118  happy_var_1)\n         =  HappyAbsSyn117\n                 (CstSigType (Just happy_var_1) happy_var_3\n        )\nhappyReduction_308 _ _ _  = notHappyAtAll \n\nhappyReduce_309 = happySpecReduce_1  110# happyReduction_309\nhappyReduction_309 (HappyAbsSyn118  happy_var_1)\n         =  HappyAbsSyn117\n                 (CstSigType Nothing happy_var_1\n        )\nhappyReduction_309 _  = notHappyAtAll \n\nhappyReduce_310 = happySpecReduce_3  111# happyReduction_310\nhappyReduction_310 (HappyAbsSyn118  happy_var_3)\n        _\n        (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn118\n                 (happy_var_1 : happy_var_3\n        )\nhappyReduction_310 _ _ _  = notHappyAtAll \n\nhappyReduce_311 = happySpecReduce_1  111# happyReduction_311\nhappyReduction_311 (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn118\n                 ([happy_var_1]\n        )\nhappyReduction_311 _  = notHappyAtAll \n\nhappyReduce_312 = happySpecReduce_3  112# happyReduction_312\nhappyReduction_312 _\n        (HappyTerminal happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, ExistU (TV (getName happy_var_2)) ([], Open) ([], Open))\n        )\nhappyReduction_312 _ _ _  = notHappyAtAll \n\nhappyReduce_313 = happyReduce 4# 112# happyReduction_313\nhappyReduction_313 ((HappyAbsSyn119  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn116  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn119\n                 ((locPos happy_var_1, EffectU (EffectSet (Set.fromList happy_var_2)) (snd happy_var_4))\n        ) `HappyStk` happyRest\n\nhappyReduce_314 = happySpecReduce_1  112# happyReduction_314\nhappyReduction_314 (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 (happy_var_1\n        )\nhappyReduction_314 _  = notHappyAtAll \n\nhappyReduce_315 = happySpecReduce_3  113# happyReduction_315\nhappyReduction_315 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 ((fst happy_var_1, NatAddU (snd happy_var_1) happy_var_3)\n        )\nhappyReduction_315 _ _ _  = notHappyAtAll \n\nhappyReduce_316 = happySpecReduce_3  113# happyReduction_316\nhappyReduction_316 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 ((fst happy_var_1, NatSubU (snd happy_var_1) happy_var_3)\n        )\nhappyReduction_316 _ _ _  = notHappyAtAll \n\nhappyReduce_317 = happySpecReduce_1  113# happyReduction_317\nhappyReduction_317 (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 (happy_var_1\n        )\nhappyReduction_317 _  = notHappyAtAll \n\nhappyReduce_318 = happySpecReduce_3  114# happyReduction_318\nhappyReduction_318 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 ((fst happy_var_1, NatMulU (snd happy_var_1) happy_var_3)\n        )\nhappyReduction_318 _ _ _  = notHappyAtAll \n\nhappyReduce_319 = happySpecReduce_3  114# happyReduction_319\nhappyReduction_319 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 ((fst happy_var_1, NatDivU (snd happy_var_1) happy_var_3)\n        )\nhappyReduction_319 _ _ _  = notHappyAtAll \n\nhappyReduce_320 = happySpecReduce_1  114# happyReduction_320\nhappyReduction_320 (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 (happy_var_1\n        )\nhappyReduction_320 _  = notHappyAtAll \n\nhappyReduce_321 = happySpecReduce_2  115# happyReduction_321\nhappyReduction_321 (HappyAbsSyn8  happy_var_2)\n        (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 ((fst happy_var_1, applyType (snd happy_var_1) happy_var_2)\n        )\nhappyReduction_321 _ _  = notHappyAtAll \n\nhappyReduce_322 = happySpecReduce_1  115# happyReduction_322\nhappyReduction_322 (HappyAbsSyn119  happy_var_1)\n         =  HappyAbsSyn119\n                 (happy_var_1\n        )\nhappyReduction_322 _  = notHappyAtAll \n\nhappyReduce_323 = happySpecReduce_2  116# happyReduction_323\nhappyReduction_323 _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, BT.unitU)\n        )\nhappyReduction_323 _ _  = notHappyAtAll \n\nhappyReduce_324 = happySpecReduce_3  116# happyReduction_324\nhappyReduction_324 _\n        (HappyAbsSyn8  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, happy_var_2)\n        )\nhappyReduction_324 _ _ _  = notHappyAtAll \n\nhappyReduce_325 = happyReduce 5# 116# happyReduction_325\nhappyReduction_325 (_ `HappyStk`\n        (HappyAbsSyn43  happy_var_4) `HappyStk`\n        _ `HappyStk`\n        (HappyAbsSyn8  happy_var_2) `HappyStk`\n        (HappyTerminal happy_var_1) `HappyStk`\n        happyRest)\n         = HappyAbsSyn119\n                 ((locPos happy_var_1, BT.tupleU (happy_var_2 : happy_var_4))\n        ) `HappyStk` happyRest\n\nhappyReduce_326 = happySpecReduce_3  116# happyReduction_326\nhappyReduction_326 _\n        (HappyAbsSyn8  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, BT.listU happy_var_2)\n        )\nhappyReduction_326 _ _ _  = notHappyAtAll \n\nhappyReduce_327 = happySpecReduce_2  116# happyReduction_327\nhappyReduction_327 (HappyAbsSyn119  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, OptionalU (snd happy_var_2))\n        )\nhappyReduction_327 _ _  = notHappyAtAll \n\nhappyReduce_328 = happySpecReduce_1  116# happyReduction_328\nhappyReduction_328 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, VarU (TV (getName happy_var_1)))\n        )\nhappyReduction_328 _  = notHappyAtAll \n\nhappyReduce_329 = happySpecReduce_3  116# happyReduction_329\nhappyReduction_329 (HappyAbsSyn8  happy_var_3)\n        _\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, LabeledU (TV (getName happy_var_1)) happy_var_3)\n        )\nhappyReduction_329 _ _ _  = notHappyAtAll \n\nhappyReduce_330 = happySpecReduce_1  116# happyReduction_330\nhappyReduction_330 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, VarU (TV (getName happy_var_1)))\n        )\nhappyReduction_330 _  = notHappyAtAll \n\nhappyReduce_331 = happySpecReduce_1  116# happyReduction_331\nhappyReduction_331 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, VarU (TV (getString happy_var_1)))\n        )\nhappyReduction_331 _  = notHappyAtAll \n\nhappyReduce_332 = happySpecReduce_1  116# happyReduction_332\nhappyReduction_332 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn119\n                 ((locPos happy_var_1, NatLitU (getInt happy_var_1))\n        )\nhappyReduction_332 _  = notHappyAtAll \n\nhappyReduce_333 = happySpecReduce_2  117# happyReduction_333\nhappyReduction_333 (HappyAbsSyn43  happy_var_2)\n        (HappyTerminal happy_var_1)\n         =  HappyAbsSyn124\n                 (Constraint (ClassName (getName happy_var_1)) happy_var_2\n        )\nhappyReduction_333 _ _  = notHappyAtAll \n\nhappyReduce_334 = happySpecReduce_1  118# happyReduction_334\nhappyReduction_334 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_334 _  = notHappyAtAll \n\nhappyReduce_335 = happySpecReduce_1  118# happyReduction_335\nhappyReduction_335 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_335 _  = notHappyAtAll \n\nhappyReduce_336 = happySpecReduce_1  118# happyReduction_336\nhappyReduction_336 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_336 _  = notHappyAtAll \n\nhappyReduce_337 = happySpecReduce_1  118# happyReduction_337\nhappyReduction_337 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_337 _  = notHappyAtAll \n\nhappyReduce_338 = happySpecReduce_1  118# happyReduction_338\nhappyReduction_338 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_338 _  = notHappyAtAll \n\nhappyReduce_339 = happySpecReduce_1  118# happyReduction_339\nhappyReduction_339 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_339 _  = notHappyAtAll \n\nhappyReduce_340 = happySpecReduce_1  119# happyReduction_340\nhappyReduction_340 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn24\n                 (happy_var_1\n        )\nhappyReduction_340 _  = notHappyAtAll \n\nhappyReduce_341 = happySpecReduce_3  119# happyReduction_341\nhappyReduction_341 _\n        (HappyAbsSyn24  happy_var_2)\n        _\n         =  HappyAbsSyn24\n                 (happy_var_2\n        )\nhappyReduction_341 _ _ _  = notHappyAtAll \n\nhappyReduce_342 = happySpecReduce_3  119# happyReduction_342\nhappyReduction_342 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn24\n                 (happy_var_2\n        )\nhappyReduction_342 _ _ _  = notHappyAtAll \n\nhappyReduce_343 = happySpecReduce_3  119# happyReduction_343\nhappyReduction_343 _\n        (HappyTerminal happy_var_2)\n        _\n         =  HappyAbsSyn24\n                 (happy_var_2\n        )\nhappyReduction_343 _ _ _  = notHappyAtAll \n\nhappyReduce_344 = happySpecReduce_0  120# happyReduction_344\nhappyReduction_344  =  HappyAbsSyn10\n                 ([]\n        )\n\nhappyReduce_345 = happyReduce 4# 120# happyReduction_345\nhappyReduction_345 (_ `HappyStk`\n        (HappyAbsSyn10  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 (happy_var_3\n        ) `HappyStk` happyRest\n\nhappyReduce_346 = happyReduce 4# 120# happyReduction_346\nhappyReduction_346 (_ `HappyStk`\n        (HappyAbsSyn10  happy_var_3) `HappyStk`\n        _ `HappyStk`\n        _ `HappyStk`\n        happyRest)\n         = HappyAbsSyn10\n                 (happy_var_3\n        ) `HappyStk` happyRest\n\nhappyReduce_347 = happySpecReduce_1  121# happyReduction_347\nhappyReduction_347 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_347 _  = notHappyAtAll \n\nhappyReduce_348 = happySpecReduce_3  121# happyReduction_348\nhappyReduction_348 (HappyAbsSyn10  happy_var_3)\n        _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ happy_var_3\n        )\nhappyReduction_348 _ _ _  = notHappyAtAll \n\nhappyReduce_349 = happySpecReduce_1  122# happyReduction_349\nhappyReduction_349 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_349 _  = notHappyAtAll \n\nhappyReduce_350 = happySpecReduce_3  122# happyReduction_350\nhappyReduction_350 (HappyAbsSyn10  happy_var_3)\n        _\n        (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1 ++ happy_var_3\n        )\nhappyReduction_350 _ _ _  = notHappyAtAll \n\nhappyReduce_351 = happySpecReduce_1  123# happyReduction_351\nhappyReduction_351 (HappyAbsSyn10  happy_var_1)\n         =  HappyAbsSyn10\n                 (happy_var_1\n        )\nhappyReduction_351 _  = notHappyAtAll \n\nhappyReduce_352 = happySpecReduce_0  124# happyReduction_352\nhappyReduction_352  =  HappyAbsSyn20\n                 ([]\n        )\n\nhappyReduce_353 = happySpecReduce_2  124# happyReduction_353\nhappyReduction_353 (HappyTerminal happy_var_2)\n        (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn20\n                 (happy_var_1 ++ [getName happy_var_2]\n        )\nhappyReduction_353 _ _  = notHappyAtAll \n\nhappyReduce_354 = happySpecReduce_1  125# happyReduction_354\nhappyReduction_354 (HappyTerminal happy_var_1)\n         =  HappyAbsSyn20\n                 ([getName happy_var_1]\n        )\nhappyReduction_354 _  = notHappyAtAll \n\nhappyReduce_355 = happySpecReduce_2  125# happyReduction_355\nhappyReduction_355 (HappyTerminal happy_var_2)\n        (HappyAbsSyn20  happy_var_1)\n         =  HappyAbsSyn20\n                 (happy_var_1 ++ [getName happy_var_2]\n        )\nhappyReduction_355 _ _  = notHappyAtAll \n\nhappyTerminalToTok term = case term of {\n        Located _ TokVLBrace _ -> 2#;\n        Located _ TokVRBrace _ -> 3#;\n        Located _ TokVSemi _ -> 4#;\n        Located _ TokLParen _ -> 5#;\n        Located _ TokRParen _ -> 6#;\n        Located _ TokLBracket _ -> 7#;\n        Located _ TokRBracket _ -> 8#;\n        Located _ TokLBrace _ -> 9#;\n        Located _ TokRBrace _ -> 10#;\n        Located _ TokLAngle _ -> 11#;\n        Located _ TokRAngle _ -> 12#;\n        Located _ TokComma _ -> 13#;\n        Located _ TokBackslash _ -> 14#;\n        Located _ TokUnderscore _ -> 15#;\n        Located _ TokBang _ -> 16#;\n        Located _ TokQuestion _ -> 17#;\n        Located _ TokDot _ -> 18#;\n        Located _ TokGetterDot _ -> 19#;\n        Located _ TokNsDot _ -> 20#;\n        Located _ TokLabelColon _ -> 21#;\n        Located _ TokGetterDotChain _ -> 22#;\n        Located _ TokEquals _ -> 23#;\n        Located _ TokDColon _ -> 24#;\n        Located _ TokArrow _ -> 25#;\n        Located _ TokFatArrow _ -> 26#;\n        Located _ TokBind _ -> 27#;\n        Located _ TokStar _ -> 28#;\n        Located _ TokMinus _ -> 29#;\n        Located _ TokColon _ -> 30#;\n        Located _ TokModule _ -> 31#;\n        Located _ TokImport _ -> 32#;\n        Located _ TokSource _ -> 33#;\n        Located _ TokFrom _ -> 34#;\n        Located _ TokWhere _ -> 35#;\n        Located _ TokAs _ -> 36#;\n        Located _ TokTrue _ -> 37#;\n        Located _ TokFalse _ -> 38#;\n        Located _ TokType _ -> 39#;\n        Located _ TokRecord _ -> 40#;\n        Located _ TokObject _ -> 41#;\n        Located _ TokTable _ -> 42#;\n        Located _ TokClass _ -> 43#;\n        Located _ TokInstance _ -> 44#;\n        Located _ TokInfixl _ -> 45#;\n        Located _ TokInfixr _ -> 46#;\n        Located _ TokInfix _ -> 47#;\n        Located _ TokLet _ -> 48#;\n        Located _ TokIn _ -> 49#;\n        Located _ TokDo _ -> 50#;\n        Located _ TokNull _ -> 51#;\n        Located _ (TokLowerName _) _ -> 52#;\n        Located _ (TokUpperName _) _ -> 53#;\n        Located _ (TokOperator \"+\") _ -> 54#;\n        Located _ (TokOperator \"/\") _ -> 55#;\n        Located _ (TokOperator _) _ -> 56#;\n        Located _ (TokInteger _) _ -> 57#;\n        Located _ (TokFloat _) _ -> 58#;\n        Located _ (TokString _) _ -> 59#;\n        Located _ (TokStringStart _) _ -> 60#;\n        Located _ (TokStringMid _) _ -> 61#;\n        Located _ (TokStringEnd _) _ -> 62#;\n        Located _ TokInterpOpen _ -> 63#;\n        Located _ TokInterpClose _ -> 64#;\n        Located _ (TokIntrinsic _) _ -> 65#;\n        Located _ TokSemicolon _ -> 66#;\n        Located _ TokPragmaInline _ -> 67#;\n        Located _ TokEOF _ -> 68#;\n        _ -> -1#;\n        }\n{-# NOINLINE happyTerminalToTok #-}\n\nhappyLex kend  _kmore []       = kend notHappyAtAll []\nhappyLex _kend kmore  (tk:tks) = kmore (happyTerminalToTok tk) tk tks\n{-# INLINE happyLex #-}\n\nhappyNewToken action sts stk = happyLex (\\tk -> happyDoAction 69# notHappyAtAll action sts stk) (\\i tk -> happyDoAction i tk action sts stk)\n\nhappyReport 69# tk explist resume tks = happyReport' tks explist resume\nhappyReport _ tk explist resume tks = happyReport' (tk:tks) explist (\\tks -> resume (Happy_Prelude.tail tks))\n\n\nhappyThen :: () => (P a) -> (a -> (P b)) -> (P b)\nhappyThen = ((>>=))\nhappyReturn :: () => a -> (P a)\nhappyReturn = (return)\nhappyThen1 m k tks = ((>>=)) m (\\a -> k a tks)\nhappyFmap1 f m tks = happyThen (m tks) (\\a -> happyReturn (f a))\nhappyReturn1 :: () => a -> b -> (P a)\nhappyReturn1 = \\a tks -> (return) a\nhappyReport' :: () => [(Located)] -> [Happy_Prelude.String] -> ([(Located)] -> (P a)) -> (P a)\nhappyReport' = (\\tokens expected resume -> (parseError) (tokens, expected))\n\nhappyAbort :: () => [(Located)] -> (P a)\nhappyAbort = Happy_Prelude.error \"Called abort handler in non-resumptive parser\"\n\nparseProgram tks = happySomeParser where\n happySomeParser = happyThen (happyDoParse 0# tks) (\\x -> case x of {HappyAbsSyn7 z -> happyReturn z; _other -> notHappyAtAll })\n\nparseTypeOnly tks = happySomeParser where\n happySomeParser = happyThen (happyDoParse 1# tks) (\\x -> case x of {HappyAbsSyn8 z -> happyReturn z; _other -> notHappyAtAll })\n\nparseExprOnly tks = happySomeParser where\n happySomeParser = happyThen (happyDoParse 2# tks) (\\x -> case x of {HappyAbsSyn9 z -> happyReturn z; _other -> notHappyAtAll })\n\nhappySeq = happyDontSeq\n\n\n--------------------------------------------------------------------\n-- Parser monad\n--------------------------------------------------------------------\n\ndata PState = PState\n  { psExpIndex    :: !Int\n  , psSourceMap   :: !(Map.Map Int SrcLoc)\n  , psModulePath  :: !(Maybe Path)\n  , psModuleConfig :: !ModuleConfig\n  , psDocMap      :: !(Map.Map Pos [Text])\n  , psSourceLines :: ![Text]\n  , psLangMap :: !(Map.Map T.Text Lang) -- alias -> Lang for all known languages\n  , psProjectRoot :: !(Maybe Path) -- project root (directory of entry-point file)\n  , psTermDocs    :: !(Map.Map EVar [Text])\n  , psWarnings    :: ![Text] -- docstring warnings accumulated during desugar\n  , psModuleDoc   :: ![Text] -- module-level description\n  , psModuleEpilogues :: ![[Text]] -- epilogue blocks\n  }\n  deriving (Show)\n\nemptyPState :: PState\nemptyPState = PState 1 Map.empty Nothing defaultValue Map.empty [] Map.empty Nothing Map.empty [] [] []\n\ntype P a = State.StateT PState (Either ParseError) a\n\n--------------------------------------------------------------------\n-- Token extraction helpers\n--------------------------------------------------------------------\n\ngetName :: Located -> Text\ngetName (Located _ (TokLowerName n) _) = n\ngetName (Located _ (TokUpperName n) _) = n\ngetName (Located _ _ t) = t\n\ngetInt :: Located -> Integer\ngetInt (Located _ (TokInteger n) _) = n\ngetInt _ = 0\n\ngetFloat :: Located -> Double\ngetFloat (Located _ (TokFloat d) _) = d\ngetFloat _ = 0\n\ngetString :: Located -> Text\ngetString (Located _ (TokString s) _) = s\ngetString (Located _ (TokStringStart s) _) = s\ngetString (Located _ (TokStringMid s) _) = s\ngetString (Located _ (TokStringEnd s) _) = s\ngetString (Located _ _ t) = t\n\ngetIntrinsicName :: Located -> Text\ngetIntrinsicName (Located _ (TokIntrinsic n) _) = n\ngetIntrinsicName _ = \"\"\n\nparseKind :: Text -> Kind\nparseKind \"Nat\" = KindNat\nparseKind _ = KindType\n\ngetOp :: Located -> Text\ngetOp (Located _ (TokOperator t) _) = t\ngetOp (Located _ TokMinus _) = \"-\"\ngetOp (Located _ TokStar _) = \"*\"\ngetOp (Located _ TokDot _) = \".\"\ngetOp (Located _ TokLAngle _) = \"<\"\ngetOp (Located _ TokRAngle _) = \">\"\ngetOp (Located _ _ t) = t\n\ntoEVar :: Located -> EVar\ntoEVar (Located _ (TokLowerName n) _) = EV n\ntoEVar (Located _ (TokOperator n) _) = EV n\ntoEVar (Located _ TokMinus _) = EV \"-\"\ntoEVar (Located _ TokStar _) = EV \"*\"\ntoEVar (Located _ TokDot _) = EV \".\"\ntoEVar (Located _ TokLAngle _) = EV \"<\"\ntoEVar (Located _ TokRAngle _) = EV \">\"\ntoEVar _ = EV \"?\"\n\n--------------------------------------------------------------------\n-- Type helper\n--------------------------------------------------------------------\n\napplyType :: TypeU -> TypeU -> TypeU\napplyType (AppU f args) x = AppU f (args ++ [x])\napplyType f x = AppU f [x]\n\n--------------------------------------------------------------------\n-- Error handling\n--------------------------------------------------------------------\n\nparseError :: ([Located], [String]) -> P a\nparseError ([], expected) = do\n  srcLines <- State.gets psSourceLines\n  State.lift (Left (ParseError (Pos 0 0 \"\") \"unexpected end of input\" expected srcLines))\nparseError (Located pos tok _ : _, expected) = do\n  srcLines <- State.gets psSourceLines\n  State.lift (Left (ParseError pos (\"unexpected \" ++ showToken tok) expected srcLines))\n\n--------------------------------------------------------------------\n-- Desugar bridge\n--------------------------------------------------------------------\n\ntoDState :: PState -> DState\ntoDState ps = DState\n  { dsExpIndex = psExpIndex ps\n  , dsSourceMap = psSourceMap ps\n  , dsDocMap = psDocMap ps\n  , dsModulePath = psModulePath ps\n  , dsModuleConfig = psModuleConfig ps\n  , dsSourceLines = psSourceLines ps\n  , dsLangMap = psLangMap ps\n  , dsProjectRoot = psProjectRoot ps\n  , dsTermDocs = psTermDocs ps\n  , dsWarnings = psWarnings ps\n  , dsModuleDoc = psModuleDoc ps\n  , dsModuleEpilogues = psModuleEpilogues ps\n  }\n\nfromDState :: PState -> DState -> PState\nfromDState ps ds = ps\n  { psExpIndex = dsExpIndex ds\n  , psSourceMap = dsSourceMap ds\n  , psTermDocs = dsTermDocs ds\n  , psWarnings = dsWarnings ds\n  , psModuleDoc = dsModuleDoc ds\n  , psModuleEpilogues = dsModuleEpilogues ds\n  }\n\n-- | Run parse + desugar\nparseAndDesugar :: PState -> [Located] -> Either ParseError ([ExprI], PState)\nparseAndDesugar pstate tokens =\n  case State.runStateT (parseProgram tokens) pstate of\n    Left err -> Left err\n    Right ((cstNodes, isImplicitMain), _parseState) ->\n      let dstate = toDState pstate\n      in case State.runStateT (desugarProgram isImplicitMain cstNodes) dstate of\n        Left err -> Left err\n        Right (exprIs, finalDState) ->\n          Right (exprIs, fromDState pstate finalDState)\n\n-- | Parse and desugar a single expression\nparseAndDesugarExpr :: PState -> [Located] -> Either ParseError (ExprI, PState)\nparseAndDesugarExpr pstate tokens =\n  case State.runStateT (parseExprOnly tokens) pstate of\n    Left err -> Left err\n    Right (cstExpr, _parseState) ->\n      let dstate = toDState pstate\n      in case State.runStateT (desugarExpr cstExpr) dstate of\n        Left err -> Left err\n        Right (exprI, finalDState) ->\n          Right (exprI, fromDState pstate finalDState)\n\n--------------------------------------------------------------------\n-- Public API\n--------------------------------------------------------------------\n\nreadProgram ::\n  Maybe MVar ->\n  Maybe Path ->\n  Text ->\n  PState ->\n  DAG MVar Import ExprI ->\n  Either String (DAG MVar Import ExprI, PState)\nreadProgram expectedName modulePath sourceCode pstate dag = do\n  let filename = maybe \"<expr>\" id modulePath\n  (tokens, docMap, groupToks) <- case lexMorloc filename sourceCode of\n    Left err -> Left (showLexError err)\n    Right r -> Right r\n  let srcLines = T.lines sourceCode\n      pstate' = pstate { psModulePath = modulePath, psDocMap = docMap, psSourceLines = srcLines }\n  -- Strategy 1: parse as-is (code with module declarations)\n  case parseAndDesugar pstate' tokens of\n    Right (result, finalState) ->\n      let dag' = foldl (addModule expectedName) dag result\n          dag'' = attachGroupAnnotations tokens groupToks dag'\n      in return (dag'', finalState)\n    Left err ->\n      -- Strategy 2: wrap in module, patch trailing expr as __expr__ assignment.\n      let wrappedCode = \"module main (*)\\n\" <> sourceCode\n      in case lexMorloc filename wrappedCode of\n        Right (wrappedTokens, wrappedDocMap, wrappedGroupToks) ->\n          let pstate'' = pstate' { psDocMap = wrappedDocMap, psSourceLines = T.lines wrappedCode }\n          in case parseAndDesugar pstate'' wrappedTokens of\n            Right (result, finalState) ->\n              let dag' = foldl (addModule expectedName) dag result\n                  dag'' = attachGroupAnnotations wrappedTokens wrappedGroupToks dag'\n              in return (dag'', finalState)\n            Left _ ->\n              case patchForTrailingExpr wrappedTokens of\n                Just patchedTokens ->\n                  case parseAndDesugar pstate'' patchedTokens of\n                    Right (result, finalState) ->\n                      let dag' = foldl (addModule expectedName) dag result\n                          dag'' = attachGroupAnnotations patchedTokens wrappedGroupToks dag'\n                      in return (dag'', finalState)\n                    Left _ -> tryExprFallback tokens pstate' dag filename err\n                Nothing -> tryExprFallback tokens pstate' dag filename err\n        Left _ -> tryExprFallback tokens pstate' dag filename err\n  where\n    tryExprFallback tokens' ps dag' filename' origErr =\n      let exprTokens = stripLayoutTokens tokens'\n      in case parseAndDesugarExpr ps exprTokens of\n        Right (exprI, exprState) -> do\n          let s = exprState\n              i1 = psExpIndex s\n              assI = ExprI i1 (AssE (EV \"__expr__\") exprI [])\n              s1 = s { psExpIndex = i1 + 1 }\n              i2 = psExpIndex s1\n              expI = ExprI i2 (ExpE ExportAll)\n              s2 = s1 { psExpIndex = i2 + 1 }\n              i3 = psExpIndex s2\n              modI = ExprI i3 (ModE (MV \"main\") [expI, assI])\n              finalState = s2 { psExpIndex = i3 + 1 }\n              dag'' = Map.insert (MV \"main\") (modI, []) dag'\n          return (dag'', finalState)\n        Left _ ->\n          Left (showParseError filename' origErr)\n\n    -- When an expected module name is provided (e.g., from \"import .units\"),\n    -- use it as the DAG key so edges from the importing module resolve correctly.\n    -- The file may declare \"module units (...)\" but the import edge targets \".units\".\n    addModule mayExpected d e@(ExprI _ (ModE n es)) =\n      let key = maybe n id mayExpected\n          imports = [(importModuleName i, i) | (ExprI _ (ImpE i)) <- es]\n      in Map.insert key (e, imports) d\n    addModule _ _ _ = error \"expected a module\"\n\npatchForTrailingExpr :: [Located] -> Maybe [Located]\npatchForTrailingExpr tokens = do\n  let tokens' = patchExport tokens\n  patchLastStmt tokens'\n\npatchExport :: [Located] -> [Located]\npatchExport [] = []\npatchExport (t@(Located _ TokLParen _) : Located p TokStar _ : rest) =\n  t : Located p (TokLowerName \"__expr__\") \"__expr__\" : rest\npatchExport (t : rest) = t : patchExport rest\n\npatchLastStmt :: [Located] -> Maybe [Located]\npatchLastStmt tokens =\n  case findLastTopVSemi tokens 0 0 Nothing of\n    Just idx ->\n      let (before, after) = splitAt (idx + 1) tokens\n          dummyPos = Pos 0 0 \"<expr>\"\n          exprTok = Located dummyPos (TokLowerName \"__expr__\") \"__expr__\"\n          eqTok = Located dummyPos TokEquals \"=\"\n      in Just (before ++ [exprTok, eqTok] ++ after)\n    Nothing -> Nothing\n  where\n    findLastTopVSemi :: [Located] -> Int -> Int -> Maybe Int -> Maybe Int\n    findLastTopVSemi [] _ _ lastIdx = lastIdx\n    findLastTopVSemi (Located _ TokVLBrace _ : rest) depth pos lastIdx =\n      findLastTopVSemi rest (depth + 1) (pos + 1) lastIdx\n    findLastTopVSemi (Located _ TokVRBrace _ : rest) depth pos lastIdx =\n      findLastTopVSemi rest (max 0 (depth - 1)) (pos + 1) lastIdx\n    findLastTopVSemi (Located _ TokVSemi _ : rest) depth pos _\n      | depth == 1 = findLastTopVSemi rest depth (pos + 1) (Just pos)\n    findLastTopVSemi (_ : rest) depth pos lastIdx =\n      findLastTopVSemi rest depth (pos + 1) lastIdx\n\nstripLayoutTokens :: [Located] -> [Located]\nstripLayoutTokens = filter (not . isLayoutToken)\n  where\n    isLayoutToken (Located _ TokVLBrace _) = True\n    isLayoutToken (Located _ TokVRBrace _) = True\n    isLayoutToken (Located _ TokVSemi _) = True\n    isLayoutToken _ = False\n\nreadType :: Text -> Either String TypeU\nreadType typeStr = do\n  let initState = emptyPState\n  (tokens, _, _) <- case lexMorloc \"<type>\" typeStr of\n    Left err -> Left (showLexError err)\n    Right r -> Right r\n  (result, _) <- case State.runStateT (parseTypeOnly tokens) initState of\n    Left err -> Left (showParseError \"<type>\" err)\n    Right r -> Right r\n  return result\n\n-- | Post-process the DAG to attach group annotations from --* tokens.\nattachGroupAnnotations :: [Located] -> [Located] -> DAG MVar Import ExprI -> DAG MVar Import ExprI\nattachGroupAnnotations _ [] dag = dag\nattachGroupAnnotations tokens groupToks dag =\n  let groupHeaders = parseGroupHeaders groupToks\n      exportSymPositions = findExportSymbolPositions tokens\n      membership = buildMembership groupHeaders exportSymPositions\n      ghdrMap = Map.fromList [(n, d) | (n, d, _) <- groupHeaders]\n  in Map.map (\\(e, es) -> (attachToExpr membership ghdrMap e, es)) dag\n  where\n    attachToExpr :: Map.Map T.Text T.Text -> Map.Map T.Text [T.Text] -> ExprI -> ExprI\n    attachToExpr mem ghdrs (ExprI i (ModE m es)) =\n      ExprI i (ModE m (map (attachToExpr mem ghdrs) es))\n    attachToExpr mem ghdrs (ExprI i (ExpE (ExportMany symbols _))) =\n      let groupedSymNames = Map.keysSet mem\n          groupNames = nubText [gn | (_, gn) <- Map.toList mem]\n          exportGroups =\n            [ ExportGroup gn (maybe [] id (Map.lookup gn ghdrs))\n                (Set.filter (\\(_, sym) -> Map.lookup (symText sym) mem == Just gn) symbols)\n            | gn <- groupNames\n            ]\n          ungrouped = Set.filter (\\(_, sym) -> not (Set.member (symText sym) groupedSymNames)) symbols\n      in ExprI i (ExpE (ExportMany ungrouped exportGroups))\n    attachToExpr _ _ e = e\n\n    nubText :: [T.Text] -> [T.Text]\n    nubText [] = []\n    nubText (x:xs) = x : nubText (filter (/= x) xs)\n\n    symText :: Symbol -> T.Text\n    symText (TermSymbol (EV n)) = n\n    symText (TypeSymbol (TV n)) = n\n    symText (ClassSymbol (ClassName n)) = n\n\nparseGroupHeaders :: [Located] -> [(T.Text, [T.Text], Pos)]\nparseGroupHeaders = foldl' accum [] . map extractLine\n  where\n    extractLine (Located pos (TokGroupLine txt) _) = (pos, stripOne txt)\n    extractLine (Located pos _ _) = (pos, T.empty)\n\n    -- consume one leading space after --*, preserve remaining indentation\n    stripOne t = T.stripEnd $ case T.uncons t of\n      Just (' ', rest) -> rest\n      _ -> t\n\n    accum :: [(T.Text, [T.Text], Pos)] -> (Pos, T.Text) -> [(T.Text, [T.Text], Pos)]\n    accum gs (pos, line)\n      | Just rest <- T.stripPrefix \"\\\\\" line = addDesc gs pos (T.stripEnd rest)\n      | Just name <- T.stripPrefix \"group:\" (T.stripStart line) =\n          let name' = T.strip name\n          in if T.null name'\n             then gs ++ [(T.empty, [], pos)]  -- --* group: (no name) = terminator\n             else case gs of\n               -- last entry has no name yet: set it\n               _ | not (null gs), let (n, _, _) = last gs, T.null n ->\n                   init gs ++ [let (_, ds, p) = last gs in (name', ds, p)]\n               _ -> gs ++ [(name', [], pos)]\n      | otherwise = addDesc gs pos line  -- includes blank lines\n\n    addDesc [] pos d = [(T.empty, [d], pos)]  -- no group yet, start unnamed entry\n    addDesc gs _ d = init gs ++ [let (n, ds, p) = last gs in (n, ds ++ [d], p)]\n\nfindExportSymbolPositions :: [Located] -> [(T.Text, Pos)]\nfindExportSymbolPositions = findModule\n  where\n    findModule (Located _ TokModule _ : rest) = findLParen rest\n    findModule (_ : rest) = findModule rest\n    findModule [] = []\n\n    findLParen (Located _ TokLParen _ : rest) = scanExports 1 rest\n    findLParen (Located _ TokStar _ : _) = []\n    findLParen (_ : rest) = findLParen rest\n    findLParen [] = []\n\n    scanExports :: Int -> [Located] -> [(T.Text, Pos)]\n    scanExports 0 _ = []\n    scanExports depth (Located _ TokLParen _ : rest) = scanExports (depth + 1) rest\n    scanExports depth (Located _ TokRParen _ : rest)\n      | depth <= 1 = []\n      | otherwise = scanExports (depth - 1) rest\n    scanExports depth (Located pos (TokLowerName n) _ : rest) = (n, pos) : scanExports depth rest\n    scanExports depth (Located pos (TokUpperName n) _ : rest) = (n, pos) : scanExports depth rest\n    scanExports depth (_ : rest) = scanExports depth rest\n    scanExports _ [] = []\n\nbuildMembership :: [(T.Text, [T.Text], Pos)] -> [(T.Text, Pos)] -> Map.Map T.Text T.Text\nbuildMembership groupHeaders exportSyms = Map.fromList\n  [ (sym, gname)\n  | (sym, symPos) <- exportSyms\n  , Just gname <- [findGroup symPos]\n  ]\n  where\n    sortedGroups = sortBy (\\(_,_,p1) (_,_,p2) -> compare p1 p2) groupHeaders\n\n    findGroup :: Pos -> Maybe T.Text\n    findGroup symPos = case filter (\\(_,_,gpos) -> gpos < symPos) (reverse sortedGroups) of\n      ((gname,_,_):_)\n        | T.null gname -> Nothing  -- empty name = group terminator\n        | otherwise -> Just gname\n      [] -> Nothing\n-- $Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp $\n\n#if !defined(__GLASGOW_HASKELL__)\n#  error This code isn't being built with GHC.\n#endif\n\n-- Get WORDS_BIGENDIAN (if defined)\n#include \"MachDeps.h\"\n\n-- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex.\n#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Happy_Prelude.Bool)\n#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Happy_Prelude.Bool)\n#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Happy_Prelude.Bool)\n#define PLUS(n,m) (n Happy_GHC_Exts.+# m)\n#define MINUS(n,m) (n Happy_GHC_Exts.-# m)\n#define TIMES(n,m) (n Happy_GHC_Exts.*# m)\n#define NEGATE(n) (Happy_GHC_Exts.negateInt# (n))\n\ntype Happy_Int = Happy_GHC_Exts.Int#\ndata Happy_IntList = HappyCons Happy_Int Happy_IntList\n\n#define INVALID_TOK -1#\n#define ERROR_TOK 0#\n#define CATCH_TOK 1#\n\n#if defined(HAPPY_COERCE)\n#  define GET_ERROR_TOKEN(x)  (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# i) -> i })\n#  define MK_ERROR_TOKEN(i)   (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# i))\n#  define MK_TOKEN(x)         (happyInTok (x))\n#else\n#  define GET_ERROR_TOKEN(x)  (case x of { HappyErrorToken (Happy_GHC_Exts.I# i) -> i })\n#  define MK_ERROR_TOKEN(i)   (HappyErrorToken (Happy_GHC_Exts.I# i))\n#  define MK_TOKEN(x)         (HappyTerminal (x))\n#endif\n\n#if defined(HAPPY_DEBUG)\n#  define DEBUG_TRACE(s)    (happyTrace (s)) Happy_Prelude.$\nhappyTrace string expr = Happy_System_IO_Unsafe.unsafePerformIO Happy_Prelude.$ do\n    Happy_System_IO.hPutStr Happy_System_IO.stderr string\n    Happy_Prelude.return expr\n#else\n#  define DEBUG_TRACE(s)    {- nothing -}\n#endif\n\ninfixr 9 `HappyStk`\ndata HappyStk a = HappyStk a (HappyStk a)\n\n-----------------------------------------------------------------------------\n-- starting the parse\n\nhappyDoParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll\n\n-----------------------------------------------------------------------------\n-- Accepting the parse\n\n-- If the current token is ERROR_TOK, it means we've just accepted a partial\n-- parse (a %partial parser).  We must ignore the saved token on the top of\n-- the stack in this case.\nhappyAccept ERROR_TOK tk st sts (_ `HappyStk` ans `HappyStk` _) =\n        happyReturn1 ans\nhappyAccept j tk st sts (HappyStk ans _) =\n        (happyTcHack j (happyTcHack st)) (happyReturn1 ans)\n\n-----------------------------------------------------------------------------\n-- Arrays only: do the next action\n\nhappyDoAction i tk st =\n  DEBUG_TRACE(\"state: \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++\n              \",\\ttoken: \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++\n              \",\\taction: \")\n  case happyDecodeAction (happyNextAction i st) of\n    HappyFail             -> DEBUG_TRACE(\"failing.\\n\")\n                             happyFail i tk st\n    HappyAccept           -> DEBUG_TRACE(\"accept.\\n\")\n                             happyAccept i tk st\n    HappyReduce rule      -> DEBUG_TRACE(\"reduce (rule \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# rule) Happy_Prelude.++ \")\")\n                             (happyReduceArr Happy_Data_Array.! (Happy_GHC_Exts.I# rule)) i tk st\n    HappyShift  new_state -> DEBUG_TRACE(\"shift, enter state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# new_state) Happy_Prelude.++ \"\\n\")\n                             happyShift new_state i tk st\n\n{-# INLINE happyNextAction #-}\nhappyNextAction i st = case happyIndexActionTable i st of\n  Happy_Prelude.Just (Happy_GHC_Exts.I# act) -> act\n  Happy_Prelude.Nothing                      -> happyIndexOffAddr happyDefActions st\n\n{-# INLINE happyIndexActionTable #-}\nhappyIndexActionTable i st\n  | GTE(i, 0#), GTE(off, 0#), EQ(happyIndexOffAddr happyCheck off, i)\n  -- i >= 0:   Guard against INVALID_TOK (do the default action, which ultimately errors)\n  -- off >= 0: Otherwise it's a default action\n  -- equality check: Ensure that the entry in the compressed array is owned by st\n  = Happy_Prelude.Just (Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off))\n  | Happy_Prelude.otherwise\n  = Happy_Prelude.Nothing\n  where\n    off = PLUS(happyIndexOffAddr happyActOffsets st, i)\n\ndata HappyAction\n  = HappyFail\n  | HappyAccept\n  | HappyReduce Happy_Int -- rule number\n  | HappyShift Happy_Int  -- new state\n  deriving Happy_Prelude.Show\n\n{-# INLINE happyDecodeAction #-}\nhappyDecodeAction :: Happy_Int -> HappyAction\nhappyDecodeAction  0#                        = HappyFail\nhappyDecodeAction -1#                        = HappyAccept\nhappyDecodeAction action | LT(action, 0#)    = HappyReduce NEGATE(PLUS(action, 1#))\n                         | Happy_Prelude.otherwise = HappyShift MINUS(action, 1#)\n\n{-# INLINE happyIndexGotoTable #-}\nhappyIndexGotoTable nt st = happyIndexOffAddr happyTable off\n  where\n    off = PLUS(happyIndexOffAddr happyGotoOffsets st, nt)\n\n{-# INLINE happyIndexOffAddr #-}\nhappyIndexOffAddr :: HappyAddr -> Happy_Int -> Happy_Int\nhappyIndexOffAddr (HappyA# arr) off =\n#if __GLASGOW_HASKELL__ >= 901\n  Happy_GHC_Exts.int32ToInt# -- qualified import because it doesn't exist on older GHC's\n#endif\n#ifdef WORDS_BIGENDIAN\n  -- The CI of `alex` tests this code path\n  (Happy_GHC_Exts.word32ToInt32# (Happy_GHC_Exts.wordToWord32# (Happy_GHC_Exts.byteSwap32# (Happy_GHC_Exts.word32ToWord# (Happy_GHC_Exts.int32ToWord32#\n#endif\n  (Happy_GHC_Exts.indexInt32OffAddr# arr off)\n#ifdef WORDS_BIGENDIAN\n  )))))\n#endif\n\nhappyIndexRuleArr :: Happy_Int -> (# Happy_Int, Happy_Int #)\nhappyIndexRuleArr r = (# nt, len #)\n  where\n    !(Happy_GHC_Exts.I# n_starts) = happy_n_starts\n    offs = TIMES(MINUS(r,n_starts),2#)\n    nt = happyIndexOffAddr happyRuleArr offs\n    len = happyIndexOffAddr happyRuleArr PLUS(offs,1#)\n\ndata HappyAddr = HappyA# Happy_GHC_Exts.Addr#\n\n-----------------------------------------------------------------------------\n-- Shifting a token\n\nhappyShift new_state ERROR_TOK tk st sts stk@(x `HappyStk` _) =\n     -- See \"Error Fixup\" below\n     let i = GET_ERROR_TOKEN(x) in\n     DEBUG_TRACE(\"shifting the error token\")\n     happyDoAction i tk new_state (HappyCons st sts) stk\n\nhappyShift new_state i tk st sts stk =\n     happyNewToken new_state (HappyCons st sts) (MK_TOKEN(tk) `HappyStk` stk)\n\n-- happyReduce is specialised for the common cases.\n\nhappySpecReduce_0 nt fn j tk st sts stk\n     = happySeq fn (happyGoto nt j tk st (HappyCons st sts) (fn `HappyStk` stk))\n\nhappySpecReduce_1 nt fn j tk old_st sts@(HappyCons st _) (v1 `HappyStk` stk')\n     = let r = fn v1 in\n       happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')))\n\nhappySpecReduce_2 nt fn j tk old_st\n  (HappyCons _ sts@(HappyCons st _))\n  (v1 `HappyStk` v2 `HappyStk` stk')\n     = let r = fn v1 v2 in\n       happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')))\n\nhappySpecReduce_3 nt fn j tk old_st\n  (HappyCons _ (HappyCons _ sts@(HappyCons st _)))\n  (v1 `HappyStk` v2 `HappyStk` v3 `HappyStk` stk')\n     = let r = fn v1 v2 v3 in\n       happyTcHack old_st (happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')))\n\nhappyReduce k nt fn j tk st sts stk\n     = case happyDrop MINUS(k,(1# :: Happy_Int)) sts of\n         sts1@(HappyCons st1 _) ->\n                let r = fn stk in -- it doesn't hurt to always seq here...\n                st `happyTcHack` happyDoSeq r (happyGoto nt j tk st1 sts1 r)\n\nhappyMonadReduce k nt fn j tk st sts stk =\n      case happyDrop k (HappyCons st sts) of\n        sts1@(HappyCons st1 _) ->\n          let drop_stk = happyDropStk k stk in\n          j `happyTcHack` happyThen1 (fn stk tk)\n                                     (\\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk))\n\nhappyMonad2Reduce k nt fn j tk st sts stk =\n      case happyDrop k (HappyCons st sts) of\n        sts1@(HappyCons st1 _) ->\n          let drop_stk = happyDropStk k stk\n              off = happyIndexOffAddr happyGotoOffsets st1\n              off_i = PLUS(off, nt)\n              new_state = happyIndexOffAddr happyTable off_i\n          in\n            j `happyTcHack` happyThen1 (fn stk tk)\n                                       (\\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))\n\nhappyDrop 0# l               = l\nhappyDrop n  (HappyCons _ t) = happyDrop MINUS(n,(1# :: Happy_Int)) t\n\nhappyDropStk 0# l                 = l\nhappyDropStk n  (x `HappyStk` xs) = happyDropStk MINUS(n,(1#::Happy_Int)) xs\n\n-----------------------------------------------------------------------------\n-- Moving to a new state after a reduction\n\nhappyGoto nt j tk st =\n   DEBUG_TRACE(\", goto state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# new_state) Happy_Prelude.++ \"\\n\")\n   happyDoAction j tk new_state\n  where new_state = happyIndexGotoTable nt st\n\n{- Note [Error recovery]\n~~~~~~~~~~~~~~~~~~~~~~~~\nWhen there is no applicable action for the current lookahead token `tk`,\nhappy enters error recovery mode. Depending on whether the grammar file\ndeclares the two action form `%error { abort } { report }` for\n    Resumptive Error Handling,\nit works in one (not resumptive) or two phases (resumptive):\n\n 1. Fixup mode:\n    Try to see if there is an action for the error token ERROR_TOK. If there\n    is, do *not* emit an error and pretend instead that an `error` token was\n    inserted.\n    When there is no ERROR_TOK action, report an error.\n\n    In non-resumptive error handling, calling the single error handler\n    (e.g. `happyError`) will throw an exception and abort the parser.\n    However, in resumptive error handling we enter *error resumption mode*.\n\n 2. Error resumption mode:\n    After reporting the error (with `report`), happy will attempt to find\n    a good state stack to resume parsing in.\n    For each candidate stack, it discards input until one of the candidates\n    resumes (i.e. shifts the current input).\n    If no candidate resumes before the end of input, resumption failed and\n    calls the `abort` function, to much the same effect as in non-resumptive\n    error handling.\n\n    Candidate stacks are declared by the grammar author using the special\n    `catch` terminal and called \"catch frames\".\n    This mechanism is described in detail in Note [happyResume].\n\nThe `catch` resumption mechanism (2) is what usually is associated with\n`error` in `bison` or `menhir`. Since `error` is used for the Fixup mechanism\n(1) above, we call the corresponding token `catch`.\nFurthermore, in constrast to `bison`, our implementation of `catch`\nnon-deterministically considers multiple catch frames on the stack for\nresumption (See Note [Multiple catch frames]).\n\nNote [happyResume]\n~~~~~~~~~~~~~~~~~~\n`happyResume` implements the resumption mechanism from Note [Error recovery].\nIt is best understood by example. Consider\n\nExp :: { String }\nExp : '1'                { \"1\" }\n    | catch              { \"catch\" }\n    | Exp '+' Exp %shift { $1 Happy_Prelude.++ \" + \" Happy_Prelude.++ $3 } -- %shift: associate 1 + 1 + 1 to the right\n    | '(' Exp ')'        { \"(\" Happy_Prelude.++ $2 Happy_Prelude.++ \")\" }\n\nThe idea of the use of `catch` here is that upon encountering a parse error\nduring expression parsing, we can gracefully degrade using the `catch` rule,\nstill producing a partial syntax tree and keep on parsing to find further\nsyntax errors.\n\nLet's trace the parser state for input 11+1, which will error out after shifting 1.\nAfter shifting, we have the following item stack (growing downwards and omitting\ntransitive closure items):\n\n  State 0: %start_parseExp -> . Exp\n  State 5: Exp -> '1' .\n\n(Stack as a list of state numbers: [5,0].)\nAs Note [Error recovery] describes, we will first try Fixup mode.\nThat fails because no production can shift the `error` token.\nNext we try Error resumption mode. This works as follows:\n\n  1. Pop off the item stack until we find an item that can shift the `catch`\n     token. (Implemented in `pop_items`.)\n       * State 5 cannot shift catch. Pop.\n       * State 0 can shift catch, which would transition into\n          State 4: Exp -> catch .\n     So record the *stack* `[4,0]` after doing the shift transition.\n     We call this a *catch frame*, where the top is a *catch state*,\n     corresponding to an item in which we just shifted a `catch` token.\n     There can be multiple such catch stacks, see Note [Multiple catch frames].\n\n  2. Discard tokens from the input until the lookahead can be shifted in one\n     of the catch stacks. (Implemented in `discard_input_until_exp` and\n     `some_catch_state_shifts`.)\n       * We cannot shift the current lookahead '1' in state 4, so we discard\n       * We *can* shift the next lookahead '+' in state 4, but only after\n         reducing, which pops State 4 and goes to State 3:\n           State 3: %start_parseExp -> Exp .\n                    Exp -> Exp . '+' Exp\n         Here we can shift '+'.\n     As you can see, to implement this machinery we need to simulate\n     the operation of the LALR automaton, especially reduction\n     (`happySimulateReduce`).\n\nNote [Multiple catch frames]\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nFor fewer spurious error messages, it can be beneficial to trace multiple catch\nitems. Consider\n\nExp : '1'\n    | catch\n    | Exp '+' Exp %shift\n    | '(' Exp ')'\n\nLet's trace the parser state for input (;+1, which will error out after shifting (.\nAfter shifting, we have the following item stack (growing downwards):\n\n  State 0: %start_parseExp -> . Exp\n  State 6: Exp -> '(' . Exp ')'\n\nUpon error, we want to find items in the stack which can shift a catch token.\nNote that both State 0 and State 6 can shift a catch token, transitioning into\n  State 4: Exp -> catch .\nHence we record the catch frames `[4,6,0]` and `[4,0]` for possible resumption.\n\nWhich catch frame do we pick for resumption?\nNote that resuming catch frame `[4,0]` will parse as \"catch+1\", whereas\nresuming the innermost frame `[4,6,0]` corresponds to parsing \"(catch+1\".\nThe latter would keep discarding input until the closing ')' is found.\nSo we will discard + and 1, leading to a spurious syntax error at the end of\ninput, aborting the parse and never producing a partial syntax tree. Bad!\n\nIt is far preferable to resume with catch frame `[4,0]`, where we can resume\nsuccessfully on input +, so that is what we do.\n\nIn general, we pick the catch frame for resumption that discards the least\namount of input for a successful shift, preferring the topmost such catch frame.\n-}\n\n-- happyFail :: Happy_Int -> Token -> Happy_Int -> _\n-- This function triggers Note [Error recovery].\n-- If the current token is ERROR_TOK, phase (1) has failed and we might try\n-- phase (2).\nhappyFail ERROR_TOK = happyFixupFailed\nhappyFail i         = happyTryFixup i\n\n-- Enter Error Fixup (see Note [Error recovery]):\n-- generate an error token, save the old token and carry on.\n-- When a `happyShift` accepts the error token, we will pop off the error token\n-- to resume parsing with the current lookahead `i`.\nhappyTryFixup i tk action sts stk =\n  DEBUG_TRACE(\"entering `error` fixup.\\n\")\n  happyDoAction ERROR_TOK tk action sts (MK_ERROR_TOKEN(i) `HappyStk` stk)\n  -- NB: `happyShift` will simply pop the error token and carry on with\n  --     `tk`. Hence we don't change `tk` in the call here\n\n-- See Note [Error recovery], phase (2).\n-- Enter resumption mode after reporting the error by calling `happyResume`.\nhappyFixupFailed tk st sts (x `HappyStk` stk) =\n  let i = GET_ERROR_TOKEN(x) in\n  DEBUG_TRACE(\"`error` fixup failed.\\n\")\n  let resume   = happyResume i tk st sts stk\n      expected = happyExpectedTokens st sts in\n  happyReport i tk expected resume\n\n-- happyResume :: Happy_Int -> Token -> Happy_Int -> _\n-- See Note [happyResume]\nhappyResume i tk st sts stk = pop_items [] st sts stk\n  where\n    !(Happy_GHC_Exts.I# n_starts) = happy_n_starts   -- this is to test whether we have a start token\n    !(Happy_GHC_Exts.I# eof_i) = happy_n_terms Happy_Prelude.- 1   -- this is the token number of the EOF token\n    happy_list_to_list :: Happy_IntList -> [Happy_Prelude.Int]\n    happy_list_to_list (HappyCons st sts)\n      | LT(st, n_starts)\n      = [(Happy_GHC_Exts.I# st)]\n      | Happy_Prelude.otherwise\n      = (Happy_GHC_Exts.I# st) : happy_list_to_list sts\n\n    -- See (1) of Note [happyResume]\n    pop_items catch_frames st sts stk\n      | LT(st, n_starts)\n      = DEBUG_TRACE(\"reached start state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ \", \")\n        if Happy_Prelude.null catch_frames_new\n          then DEBUG_TRACE(\"no resumption.\\n\")\n               happyAbort\n          else DEBUG_TRACE(\"now discard input, trying to anchor in states \" Happy_Prelude.++ Happy_Prelude.show (Happy_Prelude.map (happy_list_to_list . Happy_Prelude.fst) (Happy_Prelude.reverse catch_frames_new)) Happy_Prelude.++ \".\\n\")\n               discard_input_until_exp i tk (Happy_Prelude.reverse catch_frames_new)\n      | (HappyCons st1 sts1) <- sts, _ `HappyStk` stk1 <- stk\n      = pop_items catch_frames_new st1 sts1 stk1\n      where\n        !catch_frames_new\n          | HappyShift new_state <- happyDecodeAction (happyNextAction CATCH_TOK st)\n          , DEBUG_TRACE(\"can shift catch token in state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ \", into state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# new_state) Happy_Prelude.++ \"\\n\")\n            Happy_Prelude.null (Happy_Prelude.filter (\\(HappyCons _ (HappyCons h _),_) -> EQ(st,h)) catch_frames)\n          = (HappyCons new_state (HappyCons st sts), MK_ERROR_TOKEN(i) `HappyStk` stk):catch_frames -- MK_ERROR_TOKEN(i) is just some dummy that should not be accessed by user code\n          | Happy_Prelude.otherwise\n          = DEBUG_TRACE(\"already shifted or can't shift catch in \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ \"\\n\")\n            catch_frames\n\n    -- See (2) of Note [happyResume]\n    discard_input_until_exp i tk catch_frames\n      | Happy_Prelude.Just (HappyCons st (HappyCons catch_st sts), catch_frame) <- some_catch_state_shifts i catch_frames\n      = DEBUG_TRACE(\"found expected token in state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ \" after shifting from \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# catch_st) Happy_Prelude.++ \": \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ \"\\n\")\n        happyDoAction i tk st (HappyCons catch_st sts) catch_frame\n      | EQ(i,eof_i) -- is i EOF?\n      = DEBUG_TRACE(\"reached EOF, cannot resume. abort parse :(\\n\")\n        happyAbort\n      | Happy_Prelude.otherwise\n      = DEBUG_TRACE(\"discard token \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ \"\\n\")\n        happyLex (\\eof_tk -> discard_input_until_exp eof_i eof_tk catch_frames) -- eof\n                 (\\i tk   -> discard_input_until_exp i tk catch_frames)         -- not eof\n\n    some_catch_state_shifts _ [] = DEBUG_TRACE(\"no catch state could shift.\\n\") Happy_Prelude.Nothing\n    some_catch_state_shifts i catch_frames@(((HappyCons st sts),_):_) = try_head i st sts catch_frames\n      where\n        try_head i st sts catch_frames = -- PRECONDITION: head catch_frames = (HappyCons st sts)\n          DEBUG_TRACE(\"trying token \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ \" in state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ \": \")\n          case happyDecodeAction (happyNextAction i st) of\n            HappyFail     -> DEBUG_TRACE(\"fail.\\n\")   some_catch_state_shifts i (Happy_Prelude.tail catch_frames)\n            HappyAccept   -> DEBUG_TRACE(\"accept.\\n\") Happy_Prelude.Just (Happy_Prelude.head catch_frames)\n            HappyShift _  -> DEBUG_TRACE(\"shift.\\n\")  Happy_Prelude.Just (Happy_Prelude.head catch_frames)\n            HappyReduce r -> case happySimulateReduce r st sts of\n              (HappyCons st1 sts1) -> try_head i st1 sts1 catch_frames\n\nhappySimulateReduce r st sts =\n  DEBUG_TRACE(\"simulate reduction of rule \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# r) Happy_Prelude.++ \", \")\n  let (# nt, len #) = happyIndexRuleArr r in\n  DEBUG_TRACE(\"nt \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# nt) Happy_Prelude.++ \", len: \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# len) Happy_Prelude.++ \", new_st \")\n  let !(sts1@(HappyCons st1 _)) = happyDrop len (HappyCons st sts)\n      new_st = happyIndexGotoTable nt st1 in\n  DEBUG_TRACE(Happy_Prelude.show (Happy_GHC_Exts.I# new_st) Happy_Prelude.++ \".\\n\")\n  (HappyCons new_st sts1)\n\nhappyTokenToString :: Happy_Prelude.Int -> Happy_Prelude.String\nhappyTokenToString i = happyTokenStrings Happy_Prelude.!! (i Happy_Prelude.- 2) -- 2: errorTok, catchTok\n\nhappyExpectedTokens :: Happy_Int -> Happy_IntList -> [Happy_Prelude.String]\n-- Upon a parse error, we want to suggest tokens that are expected in that\n-- situation. This function computes such tokens.\n-- It works by examining the top of the state stack.\n-- For every token number that does a shift transition, record that token number.\n-- For every token number that does a reduce transition, simulate that reduction\n-- on the state state stack and repeat.\n-- The recorded token numbers are then formatted with 'happyTokenToString' and\n-- returned.\nhappyExpectedTokens st sts =\n  DEBUG_TRACE(\"constructing expected tokens.\\n\")\n  Happy_Prelude.map happyTokenToString (search_shifts st sts [])\n  where\n    search_shifts st sts shifts = Happy_Prelude.foldr (add_action st sts) shifts (distinct_actions st)\n    add_action st sts (Happy_GHC_Exts.I# i, Happy_GHC_Exts.I# act) shifts =\n      DEBUG_TRACE(\"found action in state \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# st) Happy_Prelude.++ \", input \" Happy_Prelude.++ Happy_Prelude.show (Happy_GHC_Exts.I# i) Happy_Prelude.++ \", \" Happy_Prelude.++ Happy_Prelude.show (happyDecodeAction act) Happy_Prelude.++ \"\\n\")\n      case happyDecodeAction act of\n        HappyFail     -> shifts\n        HappyAccept   -> shifts -- This would always be %eof or error... Not helpful\n        HappyShift _  -> Happy_Prelude.insert (Happy_GHC_Exts.I# i) shifts\n        HappyReduce r -> case happySimulateReduce r st sts of\n          (HappyCons st1 sts1) -> search_shifts st1 sts1 shifts\n    distinct_actions st\n      -- The (token number, action) pairs of all actions in the given state\n      = ((-1), (Happy_GHC_Exts.I# (happyIndexOffAddr happyDefActions st)))\n      : [ (i, act) | i <- [begin_i..happy_n_terms], act <- get_act row_off i ]\n      where\n        row_off = happyIndexOffAddr happyActOffsets st\n        begin_i = 2 -- +2: errorTok,catchTok\n    get_act off (Happy_GHC_Exts.I# i) -- happyIndexActionTable with cached row offset\n      | let off_i = PLUS(off,i)\n      , GTE(off_i,0#)\n      , EQ(happyIndexOffAddr happyCheck off_i,i)\n      = [(Happy_GHC_Exts.I# (happyIndexOffAddr happyTable off_i))]\n      | Happy_Prelude.otherwise\n      = []\n\n-- Internal happy errors:\n\nnotHappyAtAll :: a\nnotHappyAtAll = Happy_Prelude.error \"Internal Happy parser panic. This is not supposed to happen! Please open a bug report at https://github.com/haskell/happy/issues.\\n\"\n\n-----------------------------------------------------------------------------\n-- Hack to get the typechecker to accept our action functions\n\nhappyTcHack :: Happy_Int -> a -> a\nhappyTcHack x y = y\n{-# INLINE happyTcHack #-}\n\n-----------------------------------------------------------------------------\n-- Seq-ing.  If the --strict flag is given, then Happy emits\n--      happySeq = happyDoSeq\n-- otherwise it emits\n--      happySeq = happyDontSeq\n\nhappyDoSeq, happyDontSeq :: a -> b -> b\nhappyDoSeq   a b = a `Happy_GHC_Exts.seq` b\nhappyDontSeq a b = b\n\n-----------------------------------------------------------------------------\n-- Don't inline any functions from the template.  GHC has a nasty habit\n-- of deciding to inline happyGoto everywhere, which increases the size of\n-- the generated parser quite a bit.\n\n{-# NOINLINE happyDoAction #-}\n{-# NOINLINE happyTable #-}\n{-# NOINLINE happyCheck #-}\n{-# NOINLINE happyActOffsets #-}\n{-# NOINLINE happyGotoOffsets #-}\n{-# NOINLINE happyDefActions #-}\n\n{-# NOINLINE happyShift #-}\n{-# NOINLINE happySpecReduce_0 #-}\n{-# NOINLINE happySpecReduce_1 #-}\n{-# NOINLINE happySpecReduce_2 #-}\n{-# NOINLINE happySpecReduce_3 #-}\n{-# NOINLINE happyReduce #-}\n{-# NOINLINE happyMonadReduce #-}\n{-# NOINLINE happyGoto #-}\n{-# NOINLINE happyFail #-}\n\n-- end of Happy Template.\n"
  },
  {
    "path": "library/Morloc/Frontend/Parser.y",
    "content": "{\n{-# LANGUAGE OverloadedStrings #-}\n\nmodule Morloc.Frontend.Parser\n  ( readProgram\n  , readType\n  , PState (..)\n  , emptyPState\n  ) where\n\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Data.Map.Strict as Map\nimport qualified Data.Set as Set\nimport qualified Data.Scientific as DS\nimport Data.List (sortBy, foldl')\nimport qualified Control.Monad.State.Strict as State\nimport Morloc.Frontend.Token\nimport Morloc.Frontend.Lexer (lexMorloc, showLexError)\nimport Morloc.Frontend.CST\nimport Morloc.Frontend.Desugar (DState(..), D, ParseError(..), showParseError, desugarProgram, desugarExpr)\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.Type\nimport Morloc.Namespace.Expr\nimport qualified Morloc.BaseTypes as BT\n}\n\n%name parseProgram program\n%name parseTypeOnly type_eof\n%name parseExprOnly expr_eof\n\n%tokentype { Located }\n%monad { P } { (>>=) } { return }\n%error { parseError }\n%errorhandlertype explist\n\n-- shift/reduce conflicts, all resolved correctly by shift:\n-- - 1 from :: type annotation (infix_expr followed by ::)\n-- - 2 from app_expr (./- could start new atom or be operators)\n-- - 1 from sig_or_ass (LOWER could be param or start of new decl)\n-- - 6 from class_constraints (UPPER could be constraint arg or end of constraint)\n-- - 3 from original grammar\n-- - 4 from accessor_tail (GDOT/= could continue chain or end it)\n-- - 2 from guard_clauses ('?' could start guard or be part of next decl)\n-- - 1 from guard_expr ('?' in expr could be nested guard_expr or next guard_clause)\n-- - 4 from optional type syntax ('?' could start optional type or guard)\n-- Note: force_expr (!) re-added for inline effect forcing in do-blocks\n-- - 2 from force_expr ('!' could start force or be part of another expr)\n-- - 1 from import_module_name (module_comp could be namespace prefix or whole name)\n-- - 0 from var_expr qualified name and import 'as' namespace (no new conflicts)\n-- - 13 from type-level Nat arithmetic ('+' and '*' in add_type/mul_type rules)\n%expect 84\n\n%token\n  VLBRACE    { Located _ TokVLBrace _ }\n  VRBRACE    { Located _ TokVRBrace _ }\n  VSEMI      { Located _ TokVSemi _ }\n  '('        { Located _ TokLParen _ }\n  ')'        { Located _ TokRParen _ }\n  '['        { Located _ TokLBracket _ }\n  ']'        { Located _ TokRBracket _ }\n  '{'        { Located _ TokLBrace _ }\n  '}'        { Located _ TokRBrace _ }\n  '<'        { Located _ TokLAngle _ }\n  '>'        { Located _ TokRAngle _ }\n  ','        { Located _ TokComma _ }\n  '\\\\'       { Located _ TokBackslash _ }\n  '_'        { Located _ TokUnderscore _ }\n  '!'        { Located _ TokBang _ }\n  '?'        { Located _ TokQuestion _ }\n  '.'        { Located _ TokDot _ }\n  GDOT       { Located _ TokGetterDot _ }\n  NSDOT      { Located _ TokNsDot _ }\n  LABELCOLON { Located _ TokLabelColon _ }\n  GDOTCHAIN  { Located _ TokGetterDotChain _ }\n  '='        { Located _ TokEquals _ }\n  '::'       { Located _ TokDColon _ }\n  '->'       { Located _ TokArrow _ }\n  '=>'       { Located _ TokFatArrow _ }\n  '<-'       { Located _ TokBind _ }\n  '*'        { Located _ TokStar _ }\n  '-'        { Located _ TokMinus _ }\n  ':'        { Located _ TokColon _ }\n  'module'   { Located _ TokModule _ }\n  'import'   { Located _ TokImport _ }\n  'source'   { Located _ TokSource _ }\n  'from'     { Located _ TokFrom _ }\n  'where'    { Located _ TokWhere _ }\n  'as'       { Located _ TokAs _ }\n  'True'     { Located _ TokTrue _ }\n  'False'    { Located _ TokFalse _ }\n  'type'     { Located _ TokType _ }\n  'record'   { Located _ TokRecord _ }\n  'object'   { Located _ TokObject _ }\n  'table'    { Located _ TokTable _ }\n  'class'    { Located _ TokClass _ }\n  'instance' { Located _ TokInstance _ }\n  'infixl'   { Located _ TokInfixl _ }\n  'infixr'   { Located _ TokInfixr _ }\n  'infix'    { Located _ TokInfix _ }\n  'let'      { Located _ TokLet _ }\n  'in'       { Located _ TokIn _ }\n  'do'       { Located _ TokDo _ }\n  'Null'     { Located _ TokNull _ }\n  LOWER      { Located _ (TokLowerName _) _ }\n  UPPER      { Located _ (TokUpperName _) _ }\n  '+'        { Located _ (TokOperator \"+\") _ }\n  '/'        { Located _ (TokOperator \"/\") _ }\n  OPERATOR   { Located _ (TokOperator _) _ }\n  INTEGER    { Located _ (TokInteger _) _ }\n  FLOAT      { Located _ (TokFloat _) _ }\n  STRING     { Located _ (TokString _) _ }\n  STRSTART   { Located _ (TokStringStart _) _ }\n  STRMID     { Located _ (TokStringMid _) _ }\n  STREND     { Located _ (TokStringEnd _) _ }\n  INTERPOPEN { Located _ TokInterpOpen _ }\n  INTERPCLOSE { Located _ TokInterpClose _ }\n  INTRINSIC  { Located _ (TokIntrinsic _) _ }\n  ';'        { Located _ TokSemicolon _ }\n  '%inline'  { Located _ TokPragmaInline _ }\n  EOF        { Located _ TokEOF _ }\n\n%%\n\n--------------------------------------------------------------------\n-- Program\n--------------------------------------------------------------------\n\nprogram :: { ([Loc CstExpr], Bool) }\n  : modules EOF             { ($1, False) }\n  | top_body EOF            { ($1, True) }\n\n-- Standalone entry points with explicit EOF\ntype_eof :: { TypeU }\n  : type EOF                { $1 }\n\nexpr_eof :: { Loc CstExpr }\n  : expr EOF                { $1 }\n\nmodules :: { [Loc CstExpr] }\n  : module                   { [$1] }\n  | modules module           { $1 ++ [$2] }\n\nmodule :: { Loc CstExpr }\n  : 'module' module_name '(' exports ')' top_body\n      { at $1 (CModE (Just $2) $4 $6) }\n  | 'module' '(' exports ')' top_body\n      { at $1 (CModE Nothing $3 $5) }\n\ntop_body :: { [Loc CstExpr] }\n  : VLBRACE top_decls VRBRACE   { $2 }\n  | VLBRACE VRBRACE              { [] }\n  | '{' top_decls_explicit '}'   { $2 }\n  | '{' '}'                      { [] }\n\ntop_decls :: { [Loc CstExpr] }\n  : top_decl                     { $1 }\n  | top_decls VSEMI top_decl     { $1 ++ $3 }\n\ntop_decls_explicit :: { [Loc CstExpr] }\n  : top_decl                           { $1 }\n  | top_decls_explicit ';' top_decl    { $1 ++ $3 }\n\ntop_decl :: { [Loc CstExpr] }\n  : import_decl       { [$1] }\n  | typedef_decl      { [$1] }\n  | typeclass_decl    { [$1] }\n  | instance_decl     { $1 }\n  | fixity_decl       { [$1] }\n  | source_decl       { $1 }\n  | '%inline' source_decl { map (\\(Loc sp e) -> Loc sp (CInlineE (Loc sp e))) $2 }\n  | sig_or_ass        { $1 }\n\nsig_or_ass :: { [Loc CstExpr] }\n  : evar_or_op '::' sig_type\n      { [at $1 (CSigE (toEVar $1) $3)] }\n  | evar_or_op lower_names '=' expr opt_where_decls\n      { [at $1 (CAssE (toEVar $1) $2 $4 $5)] }\n  | evar_or_op lower_names guard_clauses ':' expr opt_where_decls\n      { [at $1 (CGuardedAssE (toEVar $1) $2 $3 $5 $6)] }\n\nguard_clauses :: { [(Loc CstExpr, Loc CstExpr)] }\n  : guard_clause                    { [$1] }\n  | guard_clauses guard_clause      { $1 ++ [$2] }\n\nguard_clause :: { (Loc CstExpr, Loc CstExpr) }\n  : '?' expr '=' expr              { ($2, $4) }\n\n--------------------------------------------------------------------\n-- Module names\n--------------------------------------------------------------------\n\nmodule_name :: { Text }\n  : module_parts             { T.intercalate \".\" $1 }\n\nmodule_parts :: { [Text] }\n  : module_comp                        { [$1] }\n  | module_parts '.' module_comp       { $1 ++ [$3] }\n  | module_parts GDOT module_comp      { $1 ++ [$3] }\n  | module_parts NSDOT module_comp     { $1 ++ [$3] }\n  | module_parts GDOTCHAIN module_comp { $1 ++ [$3] }\n\nmodule_comp :: { Text }\n  : LOWER                              { getName $1 }\n  | module_comp '-' LOWER              { $1 <> \"-\" <> getName $3 }\n\n--------------------------------------------------------------------\n-- Exports\n--------------------------------------------------------------------\n\nexports :: { CstExport }\n  : '*'                     { CstExportAll }\n  | export_list             { CstExportMany $1 }\n\nexport_list :: { [Located] }\n  : export_item                      { [$1] }\n  | export_list ',' export_item      { $1 ++ [$3] }\n\nexport_item :: { Located }\n  : symbol              { $1 }\n\nsymbol :: { Located }\n  : LOWER               { $1 }\n  | '(' operator_name ')' { $2 }\n  | '(' '-' ')'          { $2 }\n  | '(' '.' ')'          { $2 }\n  | UPPER               { $1 }\n\n--------------------------------------------------------------------\n-- Imports\n--------------------------------------------------------------------\n\nimport_decl :: { Loc CstExpr }\n  : 'import' import_module_name opt_import_list\n      { at $1 (CImpE (Import (MV $2) $3 [] Nothing)) }\n  | 'import' import_module_name 'as' LOWER opt_import_list\n      { at $1 (CImpE (Import (MV $2) $5 [] (Just (EV (getName $4))))) }\n  | 'import' GDOT module_name opt_import_list\n      { at $1 (CImpE (Import (MV (\".\" <> $3)) $4 [] Nothing)) }\n  | 'import' GDOT module_name 'as' LOWER opt_import_list\n      { at $1 (CImpE (Import (MV (\".\" <> $3)) $6 [] (Just (EV (getName $5))))) }\n\n-- | Module names in import context allow an optional namespace prefix: owner/name\nimport_module_name :: { Text }\n  : module_comp '/' module_name      { $1 <> \"/\" <> $3 }\n  | module_name                      { $1 }\n\nopt_import_list :: { Maybe [AliasedSymbol] }\n  : {- empty -}                            { Nothing }\n  | '(' import_items ')'                   { Just $2 }\n\nimport_items :: { [AliasedSymbol] }\n  : import_item                            { [$1] }\n  | import_items ',' import_item           { $1 ++ [$3] }\n\nimport_item :: { AliasedSymbol }\n  : LOWER                              { AliasedTerm (EV (getName $1)) (EV (getName $1)) }\n  | LOWER 'as' LOWER                   { AliasedTerm (EV (getName $1)) (EV (getName $3)) }\n  | '(' operator_name ')' 'as' LOWER   { AliasedTerm (EV (getOp $2)) (EV (getName $5)) }\n  | '(' operator_name ')'              { AliasedTerm (EV (getOp $2)) (EV (getOp $2)) }\n  | '(' '-' ')' 'as' LOWER            { AliasedTerm (EV \"-\") (EV (getName $5)) }\n  | '(' '-' ')'                        { AliasedTerm (EV \"-\") (EV \"-\") }\n  | '(' '.' ')' 'as' LOWER            { AliasedTerm (EV \".\") (EV (getName $5)) }\n  | '(' '.' ')'                        { AliasedTerm (EV \".\") (EV \".\") }\n  | UPPER                              { AliasedType (TV (getName $1)) (TV (getName $1)) }\n  | UPPER 'as' UPPER                   { AliasedType (TV (getName $1)) (TV (getName $3)) }\n\n--------------------------------------------------------------------\n-- Type definitions\n--------------------------------------------------------------------\n\ntypedef_decl :: { Loc CstExpr }\n  : 'type' UPPER '=>' typedef_term '=' concrete_rhs\n      { at $1 (CTypE (CstTypeAlias (Just $2) $4 $6)) }\n  | 'type' LOWER '=>' typedef_term '=' concrete_rhs\n      { at $1 (CTypE (CstTypeAlias (Just $2) $4 $6)) }\n  | 'type' UPPER typedef_params '=' type\n      { at $1 (CTypE (CstTypeAlias Nothing (TV (getName $2), $3) ($5, False))) }\n  | 'type' UPPER typedef_params\n      { at $1 (CTypE (CstTypeAliasForward (TV (getName $2), $3))) }\n  | 'type' '(' UPPER typedef_params ')' '=' type\n      { at $1 (CTypE (CstTypeAlias Nothing (TV (getName $3), $4) ($7, False))) }\n  | 'type' '(' UPPER typedef_params ')'\n      { at $1 (CTypE (CstTypeAliasForward (TV (getName $3), $4))) }\n  | nam_type typedef_term 'where' VLBRACE nam_entry_list_loc VRBRACE\n      { at (fst $1) (CTypE (CstNamTypeWhere (snd $1) $2 $5)) }\n  | nam_type typedef_term '=' nam_constructor opt_nam_entries\n      { at (fst $1) (CTypE (CstNamTypeLegacy Nothing (snd $1) $2 $4 $5)) }\n  | nam_type UPPER '=>' typedef_term '=' nam_constructor opt_nam_entries\n      { at (fst $1) (CTypE (CstNamTypeLegacy (Just $2) (snd $1) $4 $6 $7)) }\n  | nam_type LOWER '=>' typedef_term '=' nam_constructor opt_nam_entries\n      { at (fst $1) (CTypE (CstNamTypeLegacy (Just $2) (snd $1) $4 $6 $7)) }\n\nnam_type :: { (Located, NamType) }\n  : 'record'   { ($1, NamRecord) }\n  | 'object'   { ($1, NamObject) }\n  | 'table'    { ($1, NamTable) }\n\nnam_constructor :: { (Text, Bool) }\n  : STRING                    { (getString $1, True) }\n  | UPPER                     { (getName $1, False) }\n  | LOWER                     { (getName $1, False) }\n\nopt_nam_entries :: { [(Key, TypeU)] }\n  : {- empty -}              { [] }\n  | '{' nam_entries '}'       { $2 }\n\nlang_token :: { Located }\n  : UPPER                    { $1 }\n  | LOWER                    { $1 }\n\ntypedef_term :: { (TVar, [Either (TVar, Kind) TypeU]) }\n  : UPPER typedef_params              { (TV (getName $1), $2) }\n  | '(' UPPER typedef_params ')'     { (TV (getName $2), $3) }\n\ntypedef_params :: { [Either (TVar, Kind) TypeU] }\n  : {- empty -}                        { [] }\n  | typedef_params LOWER               { $1 ++ [Left (TV (getName $2), KindType)] }\n  | typedef_params '(' LOWER '::' UPPER ')'  { $1 ++ [Left (TV (getName $3), parseKind (getName $5))] }\n  | typedef_params '(' type ')'        { $1 ++ [Right $3] }\n\nnam_entry :: { (Key, TypeU) }\n  : LOWER '::' type          { (Key (getName $1), $3) }\n\nnam_entry_loc :: { (Located, Key, TypeU) }\n  : LOWER '::' type          { ($1, Key (getName $1), $3) }\n\nnam_entry_list_loc :: { [(Located, Key, TypeU)] }\n  : nam_entry_loc                              { [$1] }\n  | nam_entry_list_loc VSEMI nam_entry_loc     { $1 ++ [$3] }\n\nnam_entries :: { [(Key, TypeU)] }\n  : nam_entry                          { [$1] }\n  | nam_entries ',' nam_entry          { $1 ++ [$3] }\n\nconcrete_rhs :: { (TypeU, Bool) }\n  : STRING concrete_rhs_args    { (case $2 of { [] -> VarU (TV (getString $1)); ts -> AppU (VarU (TV (getString $1))) ts }, True) }\n  | non_string_type             { ($1, False) }\n\nconcrete_rhs_args :: { [TypeU] }\n  : {- empty -}                       { [] }\n  | concrete_rhs_args atom_type       { $1 ++ [$2] }\n\nnon_string_type :: { TypeU }\n  : non_string_non_fun '->' type  { case $3 of { FunU args ret -> FunU ($1 : args) ret; t -> FunU [$1] t } }\n  | non_string_non_fun            { $1 }\n\nnon_string_non_fun :: { TypeU }\n  : '<' LOWER '>'            { ExistU (TV (getName $2)) ([], Open) ([], Open) }\n  | '<' effect_labels '>' non_string_non_fun  { EffectU (EffectSet (Set.fromList $2)) $4 }\n  | non_string_add            { $1 }\n\nnon_string_add :: { TypeU }\n  : non_string_add '+' non_string_mul  { NatAddU $1 $3 }\n  | non_string_add '-' non_string_mul  { NatSubU $1 $3 }\n  | non_string_mul                      { $1 }\n\nnon_string_mul :: { TypeU }\n  : non_string_mul '*' non_string_app  { NatMulU $1 $3 }\n  | non_string_mul '/' non_string_app  { NatDivU $1 $3 }\n  | non_string_app                      { $1 }\n\nnon_string_app :: { TypeU }\n  : non_string_app atom_type  { applyType $1 $2 }\n  | non_string_atom           { $1 }\n\nnon_string_atom :: { TypeU }\n  : '(' ')'                  { BT.unitU }\n  | '(' type ')'             { $2 }\n  | '(' type ',' type_list1 ')' { BT.tupleU ($2 : $4) }\n  | '[' type ']'              { BT.listU $2 }\n  | '?' non_string_atom       { OptionalU $2 }\n  | UPPER                     { VarU (TV (getName $1)) }\n  | LOWER ':' non_fun_type   { LabeledU (TV (getName $1)) $3 }\n  | LOWER                     { VarU (TV (getName $1)) }\n  | INTEGER                   { NatLitU (getInt $1) }\n\n--------------------------------------------------------------------\n-- Typeclasses\n--------------------------------------------------------------------\n\ntypeclass_decl :: { Loc CstExpr }\n  : 'class' class_head 'where' VLBRACE sig_list VRBRACE\n      { at $1 (CClsE $2 $5) }\n  | 'class' class_head\n      { at $1 (CClsE $2 []) }\n\nclass_head :: { CstClassHead }\n  : app_type '=>' app_type\n      { CCHConstrained $1 $3 }\n  | '(' class_constraints ')' '=>' app_type\n      { CCHMultiConstrained $2 $5 }\n  | app_type\n      { CCHSimple $1 }\n\nclass_constraints :: { [Constraint] }\n  : single_constraint                            { [$1] }\n  | class_constraints ',' single_constraint      { $1 ++ [$3] }\n\nsig_list :: { [CstSigItem] }\n  : signature                     { [$1] }\n  | sig_list VSEMI signature      { $1 ++ [$3] }\n\nsignature :: { CstSigItem }\n  : evar_or_op '::' sig_type\n      { CstSigItem (toEVar $1) $3 }\n\n--------------------------------------------------------------------\n-- Instances\n--------------------------------------------------------------------\n\ninstance_decl :: { [Loc CstExpr] }\n  : 'instance' instance_heads 'where' VLBRACE instance_items VRBRACE\n      { [at $1 (CIstE cn ts (concat $5)) | (cn, ts) <- $2] }\n  | 'instance' instance_heads\n      { [at $1 (CIstE cn ts []) | (cn, ts) <- $2] }\n\ninstance_heads :: { [(ClassName, [TypeU])] }\n  : UPPER types1                              { [(ClassName (getName $1), $2)] }\n  | instance_heads ',' UPPER types1           { $1 ++ [(ClassName (getName $3), $4)] }\n\ninstance_items :: { [[Loc CstExpr]] }\n  : instance_item                        { [$1] }\n  | instance_items VSEMI instance_item   { $1 ++ [$3] }\n\ninstance_item :: { [Loc CstExpr] }\n  : source_decl             { $1 }\n  | '%inline' source_decl   { map (\\(Loc sp e) -> Loc sp (CInlineE (Loc sp e))) $2 }\n  | sig_or_ass              { $1 }\n\n--------------------------------------------------------------------\n-- Fixity declarations\n--------------------------------------------------------------------\n\nfixity_decl :: { Loc CstExpr }\n  : 'infixl' INTEGER operator_names\n      { at $1 (CFixE InfixL (fromInteger (getInt $2)) $3) }\n  | 'infixr' INTEGER operator_names\n      { at $1 (CFixE InfixR (fromInteger (getInt $2)) $3) }\n  | 'infix' INTEGER operator_names\n      { at $1 (CFixE InfixN (fromInteger (getInt $2)) $3) }\n\noperator_names :: { [EVar] }\n  : operator_ref                         { [$1] }\n  | operator_names ',' operator_ref      { $1 ++ [$3] }\n\noperator_ref :: { EVar }\n  : '(' operator_name ')'     { EV (getOp $2) }\n  | '(' '-' ')'               { EV \"-\" }\n  | '(' '.' ')'               { EV \".\" }\n  | operator_name              { EV (getOp $1) }\n  | '.'                        { EV \".\" }\n  | '-'                        { EV \"-\" }\n  | LOWER                      { EV (getName $1) }\n\n--------------------------------------------------------------------\n-- Source declarations\n--------------------------------------------------------------------\n\nsource_decl :: { [Loc CstExpr] }\n  : 'source' lang_token opt_from '(' source_items ')'\n      { [at $1 (CSrcOldE $2 $3 $5)] }\n  | 'source' lang_token opt_from 'where' VLBRACE source_new_items VRBRACE\n      { [at $1 (CSrcNewE $2 $3 $6)] }\n\nopt_from :: { Maybe Text }\n  : {- empty -}                    { Nothing }\n  | 'from' STRING                  { Just (getString $2) }\n\nsource_items :: { [(Text, Maybe Text)] }\n  : source_item                          { [$1] }\n  | source_items ',' source_item         { $1 ++ [$3] }\n\nsource_item :: { (Text, Maybe Text) }\n  : STRING                              { (getString $1, Nothing) }\n  | STRING 'as' LOWER                   { (getString $1, Just (getName $3)) }\n  | STRING 'as' UPPER                   { (getString $1, Just (getName $3)) }\n  | STRING 'as' source_op               { (getString $1, Just $3) }\n  | source_op                           { ($1, Nothing) }\n  | source_op 'as' source_op            { ($1, Just $3) }\n  | source_op 'as' LOWER               { ($1, Just (getName $3)) }\n  | source_op 'as' UPPER               { ($1, Just (getName $3)) }\n\nsource_op :: { Text }\n  : '(' operator_name ')'              { getOp $2 }\n  | '(' '-' ')'                        { \"-\" }\n  | '(' '.' ')'                        { \".\" }\n\nsource_new_items :: { [(Bool, Text, Located)] }\n  : source_new_item                          { [$1] }\n  | source_new_items VSEMI source_new_item   { $1 ++ [$3] }\n\nsource_new_item :: { (Bool, Text, Located) }\n  : '%inline' source_new_term         { (True, fst $2, snd $2) }\n  | source_new_term                   { (False, fst $1, snd $1) }\n\nsource_new_term :: { (Text, Located) }\n  : LOWER                             { (getName $1, $1) }\n  | '(' operator_name ')'            { (getOp $2, $2) }\n  | '(' '-' ')'                      { (\"-\", $2) }\n  | '(' '.' ')'                      { (\".\", $2) }\n\n--------------------------------------------------------------------\n-- Expressions\n--------------------------------------------------------------------\n\nexpr :: { Loc CstExpr }\n  : let_expr                { $1 }\n  | lambda_expr             { $1 }\n  | guard_expr              { $1 }\n  | infix_expr              { $1 }\n  | infix_expr '::' type    { at $2 (CAnnE $1 $3) }\n\nguard_expr :: { Loc CstExpr }\n  : guard_clauses ':' expr\n      { Loc (fst (head $1) <-> $3) (CGuardExprE $1 $3) }\n\nlet_expr :: { Loc CstExpr }\n  : 'let' VLBRACE let_bindings VRBRACE 'in' expr\n      { at $1 (CLetE $3 $6) }\n  | 'let' VLBRACE let_bindings VRBRACE let_expr\n      { at $1 (CLetE $3 $5) }\n  | 'let' '{' let_bindings_explicit '}' 'in' expr\n      { at $1 (CLetE $3 $6) }\n  | 'let' '{' let_bindings_explicit '}' let_expr\n      { at $1 (CLetE $3 $5) }\n\nlet_bindings :: { [(EVar, Loc CstExpr)] }\n  : let_binding                        { [$1] }\n  | let_bindings VSEMI let_binding     { $1 ++ [$3] }\n\nlet_bindings_explicit :: { [(EVar, Loc CstExpr)] }\n  : let_binding                              { [$1] }\n  | let_bindings_explicit ';' let_binding    { $1 ++ [$3] }\n\nlet_binding :: { (EVar, Loc CstExpr) }\n  : LOWER '=' expr                     { (EV (getName $1), $3) }\n  | '_' '=' expr                       { (EV \"_\", $3) }\n  | LOWER guard_clauses ':' expr\n      { (EV (getName $1), Loc ($1 <-> $4) (CGuardExprE $2 $4)) }\n\nlambda_expr :: { Loc CstExpr }\n  : '\\\\' lower_names1 '->' expr\n      { at $1 (CLamE (map EV $2) $4) }\n\ninfix_expr :: { Loc CstExpr }\n  : operand                  { $1 }\n  | operand operator_name expr\n      { at $2 (CBopE $1 $2 $3) }\n  | operand '-' expr\n      { at $2 (CBopE $1 $2 $3) }\n  | operand '.' expr\n      { at $2 (CBopE $1 $2 $3) }\n\noperand :: { Loc CstExpr }\n  : app_expr                 { $1 }\n  | '-' INTEGER              { at $1 (CIntE (negate (getInt $2))) }\n  | '-' FLOAT                { at $1 (CRealE (DS.fromFloatDigits (negate (getFloat $2)))) }\n\napp_expr :: { Loc CstExpr }\n  : force_expr                     { $1 }\n  | force_expr atom_exprs1         { Loc ($1 <-> last $2) (CAppE $1 $2) }\n\nforce_expr :: { Loc CstExpr }\n  : '!' atom_expr                  { Loc ($1 <-> $2) (CForceE $2) }\n  | atom_expr                      { $1 }\n\natom_exprs1 :: { [Loc CstExpr] }\n  : force_expr                     { [$1] }\n  | atom_exprs1 force_expr         { $1 ++ [$2] }\n\natom_expr :: { Loc CstExpr }\n  : paren_expr                { $1 }\n  | getter_expr               { $1 }\n  | string_expr               { $1 }\n  | bool_expr                 { $1 }\n  | num_expr                  { $1 }\n  | list_expr                 { $1 }\n  | record_expr               { $1 }\n  | var_expr                  { $1 }\n  | hole_expr                 { $1 }\n  | do_expr                   { $1 }\n  | null_expr                 { $1 }\n  | intrinsic_expr            { $1 }\n\nnull_expr :: { Loc CstExpr }\n  : 'Null'                    { at $1 CNullE }\n\nintrinsic_expr :: { Loc CstExpr }\n  : INTRINSIC                 { at $1 (CIntrinsicE (getIntrinsicName $1)) }\n\nparen_expr :: { Loc CstExpr }\n  : '(' ')'                   { at $1 CUniE }\n  | '(' operator_name ')'     { at $1 (CVarE (EV (getOp $2))) }\n  | '(' '-' ')'               { at $1 (CVarE (EV \"-\")) }\n  | '(' '.' ')'               { at $1 (CVarE (EV \".\")) }\n  | '(' expr ')'              { Loc ($1 <-> $3) (CParenE $2) }\n  | '(' expr ',' expr_list1 ')' { Loc ($1 <-> $5) (CTupE ($2 : $4)) }\n\nexpr_list1 :: { [Loc CstExpr] }\n  : expr                       { [$1] }\n  | expr_list1 ',' expr        { $1 ++ [$3] }\n\nrecord_expr :: { Loc CstExpr }\n  : '{' record_entries '}'    { Loc ($1 <-> $3) (CNamE $2) }\n\nrecord_entries :: { [(Key, Loc CstExpr)] }\n  : record_entry                         { [$1] }\n  | record_entries ',' record_entry      { $1 ++ [$3] }\n\nrecord_entry :: { (Key, Loc CstExpr) }\n  : LOWER '=' expr                       { (Key (getName $1), $3) }\n\nlist_expr :: { Loc CstExpr }\n  : '[' ']'                   { Loc ($1 <-> $2) (CLstE []) }\n  | '[' expr_list1 ']'        { Loc ($1 <-> $3) (CLstE $2) }\n\ndo_expr :: { Loc CstExpr }\n  : 'do' VLBRACE do_stmts VRBRACE     { Loc ($1 <-> $4) (CDoE $3) }\n  | 'do' '{' do_stmts_explicit '}'    { Loc ($1 <-> $4) (CDoE $3) }\n\ndo_stmts :: { [CstDoStmt] }\n  : do_stmt                   { $1 }\n  | do_stmts VSEMI do_stmt    { $1 ++ $3 }\n\ndo_stmts_explicit :: { [CstDoStmt] }\n  : do_stmt                              { $1 }\n  | do_stmts_explicit ';' do_stmt        { $1 ++ $3 }\n\ndo_stmt :: { [CstDoStmt] }\n  : LOWER '<-' expr            { [CstDoBind (EV (getName $1)) $3] }\n  | 'let' VLBRACE let_bindings VRBRACE\n      { [CstDoLet (EV v) e | (EV v, e) <- $3] }\n  | expr                       { [CstDoBare $1] }\n\ngetter_expr :: { Loc CstExpr }\n  : GDOT accessor_body      { at $1 (CAccessorE $2) }\n  | GDOTCHAIN accessor_body { at $1 (CAccessorE $2) }\n\naccessor_body :: { CstAccessorBody }\n  : LOWER accessor_tail           { CABKey (getName $1) $2 }\n  | INTEGER accessor_tail         { CABIdx (fromInteger (getInt $1)) $2 }\n  | '(' grouped_accessors ')'    { CABGroup $2 }\n\naccessor_tail :: { CstAccessorTail }\n  : {- empty -}                   { CATEnd }\n  | '=' expr                      { CATSet $2 }\n  | GDOTCHAIN accessor_body       { CATChain $2 }\n\ngrouped_accessors :: { [CstAccessorBody] }\n  : grouped_accessor                          { [$1] }\n  | grouped_accessors ',' grouped_accessor   { $1 ++ [$3] }\n\ngrouped_accessor :: { CstAccessorBody }\n  : GDOT accessor_body      { $2 }\n  | GDOTCHAIN accessor_body { $2 }\n\nvar_expr :: { Loc CstExpr }\n  : LOWER NSDOT LOWER         { Loc ($1 <-> $3) (CVarE (EV (getName $1 <> \".\" <> getName $3))) }\n  | LOWER LABELCOLON LOWER    { Loc ($1 <-> $3) (CLabeledVarE (getName $1) (EV (getName $3))) }\n  | LOWER                     { at $1 (CVarE (EV (getName $1))) }\n\nhole_expr :: { Loc CstExpr }\n  : '_'                        { at $1 CHolE }\n\nbool_expr :: { Loc CstExpr }\n  : 'True'                     { at $1 (CLogE True) }\n  | 'False'                    { at $1 (CLogE False) }\n\nnum_expr :: { Loc CstExpr }\n  : INTEGER                    { at $1 (CIntE (getInt $1)) }\n  | FLOAT                      { at $1 (CRealE (DS.fromFloatDigits (getFloat $1))) }\n\nstring_expr :: { Loc CstExpr }\n  : STRING                     { at $1 (CStrE (getString $1)) }\n  | interp_string              { $1 }\n\ninterp_string :: { Loc CstExpr }\n  : STRSTART interp_body STREND\n      { Loc ($1 <-> $3) (CInterpE (getString $1) (fst $2) (snd $2) (getString $3)) }\n\ninterp_body :: { ([Loc CstExpr], [Text]) }\n  : INTERPOPEN expr INTERPCLOSE\n      { ([$2], []) }\n  | interp_body STRMID INTERPOPEN expr INTERPCLOSE\n      { let (es, ms) = $1 in (es ++ [$4], ms ++ [getString $2]) }\n\n--------------------------------------------------------------------\n-- Types\n--------------------------------------------------------------------\n\ntype :: { TypeU }\n  : fun_type                 { $1 }\n  | non_fun_type             { $1 }\n\nfun_type :: { TypeU }\n  : non_fun_type '->' type   { case $3 of { FunU args ret -> FunU ($1 : args) ret; t -> FunU [$1] t } }\n\nnon_fun_type :: { TypeU }\n  : '<' LOWER '>'            { ExistU (TV (getName $2)) ([], Open) ([], Open) }\n  | '<' effect_labels '>' non_fun_type  { EffectU (EffectSet (Set.fromList $2)) $4 }\n  | add_type                  { $1 }\n\nadd_type :: { TypeU }\n  : add_type '+' mul_type     { NatAddU $1 $3 }\n  | add_type '-' mul_type     { NatSubU $1 $3 }\n  | mul_type                  { $1 }\n\nmul_type :: { TypeU }\n  : mul_type '*' app_type     { NatMulU $1 $3 }\n  | mul_type '/' app_type     { NatDivU $1 $3 }\n  | app_type                  { $1 }\n\napp_type :: { TypeU }\n  : app_type atom_type        { applyType $1 $2 }\n  | atom_type                 { $1 }\n\natom_type :: { TypeU }\n  : '(' ')'                  { BT.unitU }\n  | '(' type ')'             { $2 }\n  | '(' type ',' type_list1 ')' { BT.tupleU ($2 : $4) }\n  | '[' type ']'              { BT.listU $2 }\n  | '?' atom_type             { OptionalU $2 }\n  | UPPER                     { VarU (TV (getName $1)) }\n  | LOWER ':' non_fun_type   { $3 }\n  | LOWER                     { VarU (TV (getName $1)) }\n  | STRING                    { VarU (TV (getString $1)) }\n  | INTEGER                   { NatLitU (getInt $1) }\n\ntype_list1 :: { [TypeU] }\n  : type                      { [$1] }\n  | type_list1 ',' type       { $1 ++ [$3] }\n\ntypes1 :: { [TypeU] }\n  : atom_type                  { [$1] }\n  | types1 atom_type           { $1 ++ [$2] }\n\neffect_labels :: { [EffectLabel] }\n  : UPPER                       { [getName $1] }\n  | effect_labels ',' UPPER     { $1 ++ [getName $3] }\n\n--------------------------------------------------------------------\n-- Constraints and signature types\n--------------------------------------------------------------------\n\nsig_type :: { CstSigType }\n  : sig_fun_args '=>' sig_fun_args\n      { CstSigType (Just $1) $3 }\n  | sig_fun_args\n      { CstSigType Nothing $1 }\n\nsig_fun_args :: { [(Pos, TypeU)] }\n  : pos_non_fun_type '->' sig_fun_args  { $1 : $3 }\n  | pos_non_fun_type                     { [$1] }\n\npos_non_fun_type :: { (Pos, TypeU) }\n  : '<' LOWER '>'     { (locPos $1, ExistU (TV (getName $2)) ([], Open) ([], Open)) }\n  | '<' effect_labels '>' pos_non_fun_type  { (locPos $1, EffectU (EffectSet (Set.fromList $2)) (snd $4)) }\n  | pos_add_type       { $1 }\n\npos_add_type :: { (Pos, TypeU) }\n  : pos_add_type '+' mul_type  { (fst $1, NatAddU (snd $1) $3) }\n  | pos_add_type '-' mul_type  { (fst $1, NatSubU (snd $1) $3) }\n  | pos_mul_type                { $1 }\n\npos_mul_type :: { (Pos, TypeU) }\n  : pos_mul_type '*' app_type  { (fst $1, NatMulU (snd $1) $3) }\n  | pos_mul_type '/' app_type  { (fst $1, NatDivU (snd $1) $3) }\n  | pos_app_type                { $1 }\n\npos_app_type :: { (Pos, TypeU) }\n  : pos_app_type atom_type  { (fst $1, applyType (snd $1) $2) }\n  | pos_atom_type            { $1 }\n\npos_atom_type :: { (Pos, TypeU) }\n  : '(' ')'                     { (locPos $1, BT.unitU) }\n  | '(' type ')'                { (locPos $1, $2) }\n  | '(' type ',' type_list1 ')' { (locPos $1, BT.tupleU ($2 : $4)) }\n  | '[' type ']'                { (locPos $1, BT.listU $2) }\n  | '?' pos_atom_type            { (locPos $1, OptionalU (snd $2)) }\n  | UPPER                       { (locPos $1, VarU (TV (getName $1))) }\n  | LOWER ':' non_fun_type      { (locPos $1, LabeledU (TV (getName $1)) $3) }\n  | LOWER                       { (locPos $1, VarU (TV (getName $1))) }\n  | STRING                      { (locPos $1, VarU (TV (getString $1))) }\n  | INTEGER                     { (locPos $1, NatLitU (getInt $1)) }\n\nsingle_constraint :: { Constraint }\n  : UPPER types1                         { Constraint (ClassName (getName $1)) $2 }\n\n--------------------------------------------------------------------\n-- Helpers\n--------------------------------------------------------------------\n\noperator_name :: { Located }\n  : OPERATOR                  { $1 }\n  | '+'                       { $1 }\n  | '*'                       { $1 }\n  | '<'                       { $1 }\n  | '>'                       { $1 }\n  | '/'                       { $1 }\n\nevar_or_op :: { Located }\n  : LOWER                     { $1 }\n  | '(' operator_name ')'     { $2 }\n  | '(' '-' ')'               { $2 }\n  | '(' '.' ')'               { $2 }\n\nopt_where_decls :: { [Loc CstExpr] }\n  : {- empty -}                               { [] }\n  | 'where' VLBRACE where_items VRBRACE       { $3 }\n  | 'where' '{' where_items_explicit '}'      { $3 }\n\nwhere_items :: { [Loc CstExpr] }\n  : where_item                      { $1 }\n  | where_items VSEMI where_item    { $1 ++ $3 }\n\nwhere_items_explicit :: { [Loc CstExpr] }\n  : where_item                             { $1 }\n  | where_items_explicit ';' where_item    { $1 ++ $3 }\n\nwhere_item :: { [Loc CstExpr] }\n  : sig_or_ass                { $1 }\n\nlower_names :: { [Text] }\n  : {- empty -}               { [] }\n  | lower_names LOWER         { $1 ++ [getName $2] }\n\nlower_names1 :: { [Text] }\n  : LOWER                     { [getName $1] }\n  | lower_names1 LOWER        { $1 ++ [getName $2] }\n\n{\n\n--------------------------------------------------------------------\n-- Parser monad\n--------------------------------------------------------------------\n\ndata PState = PState\n  { psExpIndex    :: !Int\n  , psSourceMap   :: !(Map.Map Int SrcLoc)\n  , psModulePath  :: !(Maybe Path)\n  , psModuleConfig :: !ModuleConfig\n  , psDocMap      :: !(Map.Map Pos [Text])\n  , psSourceLines :: ![Text]\n  , psLangMap :: !(Map.Map T.Text Lang) -- alias -> Lang for all known languages\n  , psProjectRoot :: !(Maybe Path) -- project root (directory of entry-point file)\n  , psTermDocs    :: !(Map.Map EVar [Text])\n  , psWarnings    :: ![Text] -- docstring warnings accumulated during desugar\n  , psModuleDoc   :: ![Text] -- module-level description\n  , psModuleEpilogues :: ![[Text]] -- epilogue blocks\n  }\n  deriving (Show)\n\nemptyPState :: PState\nemptyPState = PState 1 Map.empty Nothing defaultValue Map.empty [] Map.empty Nothing Map.empty [] [] []\n\ntype P a = State.StateT PState (Either ParseError) a\n\n--------------------------------------------------------------------\n-- Token extraction helpers\n--------------------------------------------------------------------\n\ngetName :: Located -> Text\ngetName (Located _ (TokLowerName n) _) = n\ngetName (Located _ (TokUpperName n) _) = n\ngetName (Located _ _ t) = t\n\ngetInt :: Located -> Integer\ngetInt (Located _ (TokInteger n) _) = n\ngetInt _ = 0\n\ngetFloat :: Located -> Double\ngetFloat (Located _ (TokFloat d) _) = d\ngetFloat _ = 0\n\ngetString :: Located -> Text\ngetString (Located _ (TokString s) _) = s\ngetString (Located _ (TokStringStart s) _) = s\ngetString (Located _ (TokStringMid s) _) = s\ngetString (Located _ (TokStringEnd s) _) = s\ngetString (Located _ _ t) = t\n\ngetIntrinsicName :: Located -> Text\ngetIntrinsicName (Located _ (TokIntrinsic n) _) = n\ngetIntrinsicName _ = \"\"\n\nparseKind :: Text -> Kind\nparseKind \"Nat\" = KindNat\nparseKind _ = KindType\n\ngetOp :: Located -> Text\ngetOp (Located _ (TokOperator t) _) = t\ngetOp (Located _ TokMinus _) = \"-\"\ngetOp (Located _ TokStar _) = \"*\"\ngetOp (Located _ TokDot _) = \".\"\ngetOp (Located _ TokLAngle _) = \"<\"\ngetOp (Located _ TokRAngle _) = \">\"\ngetOp (Located _ _ t) = t\n\ntoEVar :: Located -> EVar\ntoEVar (Located _ (TokLowerName n) _) = EV n\ntoEVar (Located _ (TokOperator n) _) = EV n\ntoEVar (Located _ TokMinus _) = EV \"-\"\ntoEVar (Located _ TokStar _) = EV \"*\"\ntoEVar (Located _ TokDot _) = EV \".\"\ntoEVar (Located _ TokLAngle _) = EV \"<\"\ntoEVar (Located _ TokRAngle _) = EV \">\"\ntoEVar _ = EV \"?\"\n\n--------------------------------------------------------------------\n-- Type helper\n--------------------------------------------------------------------\n\napplyType :: TypeU -> TypeU -> TypeU\napplyType (AppU f args) x = AppU f (args ++ [x])\napplyType f x = AppU f [x]\n\n--------------------------------------------------------------------\n-- Error handling\n--------------------------------------------------------------------\n\nparseError :: ([Located], [String]) -> P a\nparseError ([], expected) = do\n  srcLines <- State.gets psSourceLines\n  State.lift (Left (ParseError (Pos 0 0 \"\") \"unexpected end of input\" expected srcLines))\nparseError (Located pos tok _ : _, expected) = do\n  srcLines <- State.gets psSourceLines\n  State.lift (Left (ParseError pos (\"unexpected \" ++ showToken tok) expected srcLines))\n\n--------------------------------------------------------------------\n-- Desugar bridge\n--------------------------------------------------------------------\n\ntoDState :: PState -> DState\ntoDState ps = DState\n  { dsExpIndex = psExpIndex ps\n  , dsSourceMap = psSourceMap ps\n  , dsDocMap = psDocMap ps\n  , dsModulePath = psModulePath ps\n  , dsModuleConfig = psModuleConfig ps\n  , dsSourceLines = psSourceLines ps\n  , dsLangMap = psLangMap ps\n  , dsProjectRoot = psProjectRoot ps\n  , dsTermDocs = psTermDocs ps\n  , dsWarnings = psWarnings ps\n  , dsModuleDoc = psModuleDoc ps\n  , dsModuleEpilogues = psModuleEpilogues ps\n  }\n\nfromDState :: PState -> DState -> PState\nfromDState ps ds = ps\n  { psExpIndex = dsExpIndex ds\n  , psSourceMap = dsSourceMap ds\n  , psTermDocs = dsTermDocs ds\n  , psWarnings = dsWarnings ds\n  , psModuleDoc = dsModuleDoc ds\n  , psModuleEpilogues = dsModuleEpilogues ds\n  }\n\n-- | Run parse + desugar\nparseAndDesugar :: PState -> [Located] -> Either ParseError ([ExprI], PState)\nparseAndDesugar pstate tokens =\n  case State.runStateT (parseProgram tokens) pstate of\n    Left err -> Left err\n    Right ((cstNodes, isImplicitMain), _parseState) ->\n      let dstate = toDState pstate\n      in case State.runStateT (desugarProgram isImplicitMain cstNodes) dstate of\n        Left err -> Left err\n        Right (exprIs, finalDState) ->\n          Right (exprIs, fromDState pstate finalDState)\n\n-- | Parse and desugar a single expression\nparseAndDesugarExpr :: PState -> [Located] -> Either ParseError (ExprI, PState)\nparseAndDesugarExpr pstate tokens =\n  case State.runStateT (parseExprOnly tokens) pstate of\n    Left err -> Left err\n    Right (cstExpr, _parseState) ->\n      let dstate = toDState pstate\n      in case State.runStateT (desugarExpr cstExpr) dstate of\n        Left err -> Left err\n        Right (exprI, finalDState) ->\n          Right (exprI, fromDState pstate finalDState)\n\n--------------------------------------------------------------------\n-- Public API\n--------------------------------------------------------------------\n\nreadProgram ::\n  Maybe MVar ->\n  Maybe Path ->\n  Text ->\n  PState ->\n  DAG MVar Import ExprI ->\n  Either String (DAG MVar Import ExprI, PState)\nreadProgram _moduleName modulePath sourceCode pstate dag = do\n  let filename = maybe \"<expr>\" id modulePath\n  (tokens, docMap, groupToks) <- case lexMorloc filename sourceCode of\n    Left err -> Left (showLexError err)\n    Right r -> Right r\n  let srcLines = T.lines sourceCode\n      pstate' = pstate { psModulePath = modulePath, psDocMap = docMap, psSourceLines = srcLines }\n  -- Strategy 1: parse as-is (code with module declarations)\n  case parseAndDesugar pstate' tokens of\n    Right (result, finalState) ->\n      let dag' = foldl addModule dag result\n          dag'' = attachGroupAnnotations tokens groupToks dag'\n      in return (dag'', finalState)\n    Left err ->\n      -- Strategy 2: wrap in module, patch trailing expr as __expr__ assignment.\n      let wrappedCode = \"module main (*)\\n\" <> sourceCode\n      in case lexMorloc filename wrappedCode of\n        Right (wrappedTokens, wrappedDocMap, wrappedGroupToks) ->\n          let pstate'' = pstate' { psDocMap = wrappedDocMap, psSourceLines = T.lines wrappedCode }\n          in case parseAndDesugar pstate'' wrappedTokens of\n            Right (result, finalState) ->\n              let dag' = foldl addModule dag result\n                  dag'' = attachGroupAnnotations wrappedTokens wrappedGroupToks dag'\n              in return (dag'', finalState)\n            Left _ ->\n              case patchForTrailingExpr wrappedTokens of\n                Just patchedTokens ->\n                  case parseAndDesugar pstate'' patchedTokens of\n                    Right (result, finalState) ->\n                      let dag' = foldl addModule dag result\n                          dag'' = attachGroupAnnotations patchedTokens wrappedGroupToks dag'\n                      in return (dag'', finalState)\n                    Left _ -> tryExprFallback tokens pstate' dag filename err\n                Nothing -> tryExprFallback tokens pstate' dag filename err\n        Left _ -> tryExprFallback tokens pstate' dag filename err\n  where\n    tryExprFallback tokens' ps dag' filename' origErr =\n      let exprTokens = stripLayoutTokens tokens'\n      in case parseAndDesugarExpr ps exprTokens of\n        Right (exprI, exprState) -> do\n          let s = exprState\n              i1 = psExpIndex s\n              assI = ExprI i1 (AssE (EV \"__expr__\") exprI [])\n              s1 = s { psExpIndex = i1 + 1 }\n              i2 = psExpIndex s1\n              expI = ExprI i2 (ExpE ExportAll)\n              s2 = s1 { psExpIndex = i2 + 1 }\n              i3 = psExpIndex s2\n              modI = ExprI i3 (ModE (MV \"main\") [expI, assI])\n              finalState = s2 { psExpIndex = i3 + 1 }\n              dag'' = Map.insert (MV \"main\") (modI, []) dag'\n          return (dag'', finalState)\n        Left _ ->\n          Left (showParseError filename' origErr)\n\n    addModule d e@(ExprI _ (ModE n es)) =\n      let imports = [(importModuleName i', i') | (ExprI _ (ImpE i')) <- es]\n      in Map.insert n (e, imports) d\n    addModule _ _ = error \"expected a module\"\n\npatchForTrailingExpr :: [Located] -> Maybe [Located]\npatchForTrailingExpr tokens = do\n  let tokens' = patchExport tokens\n  patchLastStmt tokens'\n\npatchExport :: [Located] -> [Located]\npatchExport [] = []\npatchExport (t@(Located _ TokLParen _) : Located p TokStar _ : rest) =\n  t : Located p (TokLowerName \"__expr__\") \"__expr__\" : rest\npatchExport (t : rest) = t : patchExport rest\n\npatchLastStmt :: [Located] -> Maybe [Located]\npatchLastStmt tokens =\n  case findLastTopVSemi tokens 0 0 Nothing of\n    Just idx ->\n      let (before, after) = splitAt (idx + 1) tokens\n          dummyPos = Pos 0 0 \"<expr>\"\n          exprTok = Located dummyPos (TokLowerName \"__expr__\") \"__expr__\"\n          eqTok = Located dummyPos TokEquals \"=\"\n      in Just (before ++ [exprTok, eqTok] ++ after)\n    Nothing -> Nothing\n  where\n    findLastTopVSemi :: [Located] -> Int -> Int -> Maybe Int -> Maybe Int\n    findLastTopVSemi [] _ _ lastIdx = lastIdx\n    findLastTopVSemi (Located _ TokVLBrace _ : rest) depth pos lastIdx =\n      findLastTopVSemi rest (depth + 1) (pos + 1) lastIdx\n    findLastTopVSemi (Located _ TokVRBrace _ : rest) depth pos lastIdx =\n      findLastTopVSemi rest (max 0 (depth - 1)) (pos + 1) lastIdx\n    findLastTopVSemi (Located _ TokVSemi _ : rest) depth pos _\n      | depth == 1 = findLastTopVSemi rest depth (pos + 1) (Just pos)\n    findLastTopVSemi (_ : rest) depth pos lastIdx =\n      findLastTopVSemi rest depth (pos + 1) lastIdx\n\nstripLayoutTokens :: [Located] -> [Located]\nstripLayoutTokens = filter (not . isLayoutToken)\n  where\n    isLayoutToken (Located _ TokVLBrace _) = True\n    isLayoutToken (Located _ TokVRBrace _) = True\n    isLayoutToken (Located _ TokVSemi _) = True\n    isLayoutToken _ = False\n\nreadType :: Text -> Either String TypeU\nreadType typeStr = do\n  let initState = emptyPState\n  (tokens, _, _) <- case lexMorloc \"<type>\" typeStr of\n    Left err -> Left (showLexError err)\n    Right r -> Right r\n  (result, _) <- case State.runStateT (parseTypeOnly tokens) initState of\n    Left err -> Left (showParseError \"<type>\" err)\n    Right r -> Right r\n  return result\n\n-- | Post-process the DAG to attach group annotations from --* tokens.\nattachGroupAnnotations :: [Located] -> [Located] -> DAG MVar Import ExprI -> DAG MVar Import ExprI\nattachGroupAnnotations _ [] dag = dag\nattachGroupAnnotations tokens groupToks dag =\n  let groupHeaders = parseGroupHeaders groupToks\n      exportSymPositions = findExportSymbolPositions tokens\n      membership = buildMembership groupHeaders exportSymPositions\n      ghdrMap = Map.fromList [(n, d) | (n, d, _) <- groupHeaders]\n  in Map.map (\\(e, es) -> (attachToExpr membership ghdrMap e, es)) dag\n  where\n    attachToExpr :: Map.Map T.Text T.Text -> Map.Map T.Text [T.Text] -> ExprI -> ExprI\n    attachToExpr mem ghdrs (ExprI i (ModE m es)) =\n      ExprI i (ModE m (map (attachToExpr mem ghdrs) es))\n    attachToExpr mem ghdrs (ExprI i (ExpE (ExportMany symbols _))) =\n      let groupedSymNames = Map.keysSet mem\n          groupNames = nubText [gn | (_, gn) <- Map.toList mem]\n          exportGroups =\n            [ ExportGroup gn (maybe [] id (Map.lookup gn ghdrs))\n                (Set.filter (\\(_, sym) -> Map.lookup (symText sym) mem == Just gn) symbols)\n            | gn <- groupNames\n            ]\n          ungrouped = Set.filter (\\(_, sym) -> not (Set.member (symText sym) groupedSymNames)) symbols\n      in ExprI i (ExpE (ExportMany ungrouped exportGroups))\n    attachToExpr _ _ e = e\n\n    nubText :: [T.Text] -> [T.Text]\n    nubText [] = []\n    nubText (x:xs) = x : nubText (filter (/= x) xs)\n\n    symText :: Symbol -> T.Text\n    symText (TermSymbol (EV n)) = n\n    symText (TypeSymbol (TV n)) = n\n    symText (ClassSymbol (ClassName n)) = n\n\nparseGroupHeaders :: [Located] -> [(T.Text, [T.Text], Pos)]\nparseGroupHeaders = foldl' accum [] . map extractLine\n  where\n    extractLine (Located pos (TokGroupLine txt) _) = (pos, stripOne txt)\n    extractLine (Located pos _ _) = (pos, T.empty)\n\n    -- consume one leading space after --*, preserve remaining indentation\n    stripOne t = T.stripEnd $ case T.uncons t of\n      Just (' ', rest) -> rest\n      _ -> t\n\n    accum :: [(T.Text, [T.Text], Pos)] -> (Pos, T.Text) -> [(T.Text, [T.Text], Pos)]\n    accum gs (pos, line)\n      | Just rest <- T.stripPrefix \"\\\\\" line = addDesc gs pos (T.stripEnd rest)\n      | Just name <- T.stripPrefix \"group:\" (T.stripStart line) =\n          let name' = T.strip name\n          in if T.null name'\n             then gs ++ [(T.empty, [], pos)]  -- --* group: (no name) = terminator\n             else case gs of\n               -- last entry has no name yet: set it\n               _ | not (null gs), let (n, _, _) = last gs, T.null n ->\n                   init gs ++ [let (_, ds, p) = last gs in (name', ds, p)]\n               _ -> gs ++ [(name', [], pos)]\n      | otherwise = addDesc gs pos line  -- includes blank lines\n\n    addDesc [] pos d = [(T.empty, [d], pos)]  -- no group yet, start unnamed entry\n    addDesc gs _ d = init gs ++ [let (n, ds, p) = last gs in (n, ds ++ [d], p)]\n\nfindExportSymbolPositions :: [Located] -> [(T.Text, Pos)]\nfindExportSymbolPositions = findModule\n  where\n    findModule (Located _ TokModule _ : rest) = findLParen rest\n    findModule (_ : rest) = findModule rest\n    findModule [] = []\n\n    findLParen (Located _ TokLParen _ : rest) = scanExports 1 rest\n    findLParen (Located _ TokStar _ : _) = []\n    findLParen (_ : rest) = findLParen rest\n    findLParen [] = []\n\n    scanExports :: Int -> [Located] -> [(T.Text, Pos)]\n    scanExports 0 _ = []\n    scanExports depth (Located _ TokLParen _ : rest) = scanExports (depth + 1) rest\n    scanExports depth (Located _ TokRParen _ : rest)\n      | depth <= 1 = []\n      | otherwise = scanExports (depth - 1) rest\n    scanExports depth (Located pos (TokLowerName n) _ : rest) = (n, pos) : scanExports depth rest\n    scanExports depth (Located pos (TokUpperName n) _ : rest) = (n, pos) : scanExports depth rest\n    scanExports depth (_ : rest) = scanExports depth rest\n    scanExports _ [] = []\n\nbuildMembership :: [(T.Text, [T.Text], Pos)] -> [(T.Text, Pos)] -> Map.Map T.Text T.Text\nbuildMembership groupHeaders exportSyms = Map.fromList\n  [ (sym, gname)\n  | (sym, symPos) <- exportSyms\n  , Just gname <- [findGroup symPos]\n  ]\n  where\n    sortedGroups = sortBy (\\(_,_,p1) (_,_,p2) -> compare p1 p2) groupHeaders\n\n    findGroup :: Pos -> Maybe T.Text\n    findGroup symPos = case filter (\\(_,_,gpos) -> gpos < symPos) (reverse sortedGroups) of\n      ((gname,_,_):_)\n        | T.null gname -> Nothing  -- empty name = group terminator\n        | otherwise -> Just gname\n      [] -> Nothing\n}\n"
  },
  {
    "path": "library/Morloc/Frontend/Restructure.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE TupleSections #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Frontend.Restructure\nDescription : Resolve imports, exports, binary operators, holes, and type aliases\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nTransforms the raw module DAG into the form expected by 'Link' and 'Treeify'.\nThis pass resolves import\\/export edges to alias maps, converts binary\noperator chains into correctly-associated application trees (via Pratt\nparsing), expands hole expressions into lambdas, removes self-referential\ntype declarations, and collects type definitions and source mappings into\n'MorlocState'.\n-}\nmodule Morloc.Frontend.Restructure (restructure) where\n\nimport Data.Set (Set)\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Morloc.Data.DAG as DAG\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport Morloc.Data.Map (Map)\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Frontend.AST as AST\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\n\n-- | Resolve type aliases, term aliases and import/exports\nrestructure ::\n  DAG MVar Import ExprI ->\n  MorlocMonad (DAG MVar [AliasedSymbol] ExprI)\nrestructure s = do\n  -- Set the counter for reindexing expressions.\n  --\n  -- This is the first use of the morloc state counter. The indices currently\n  -- in the tree were generated by the lexer state counter. So now we need\n  -- to find the largest index in the tree and resume counting from there.\n  -- Since d is the entire tree, the initizalized counter will start at global maximum.\n  MM.setCounter $ maximum (map AST.maxIndex (DAG.nodes s)) + 1\n\n  checkForSelfRecursion s -- currently, do no not allow type self-recursion\n    >>= resolveImports -- rewrite DAG edges to map imported terms to their aliases\n    >>= handleBinops -- first resolve binary operators\n    >>= resolveHoles -- then holes\n    >>= refineKinds -- promote VarU to NatVarU based on typedef param kinds (before self-defs are removed)\n      |>> handleTypeDeclarations\n    >>= doM collectTags\n    >>= doM collectTypes\n    >>= (\\x -> collectUniversalTypes >> return x)\n    >>= doM collectSources\n\ndoM :: (Monad m) => (a -> m ()) -> a -> m a\ndoM f x = f x >> return x\n\n{- | Check for infinitely expanding self-recursive types\n\nThere are cases were the defined term may appear on the right. For example:\n\n  type Py (Tree n e l) = \"Tree\" n e l\n\nHere the general type Tree is mapped to the concrete type \"Tree\" in Python.\nThe fact that the general and concrete names are the same is fine. They are\ndifferent languages. But what about:\n\n  type (Tree n) = Node n [Tree n]\n\nThis type should be legal, but currently it is not supported. Which\nis why I need to raise an explicit error to avoid infinite loops.\n-}\ncheckForSelfRecursion :: DAG k e ExprI -> MorlocMonad (DAG k e ExprI)\ncheckForSelfRecursion d = do\n  _ <- DAG.mapNodeM (AST.checkExprI isExprSelfRecursive) d\n  return d\n  where\n    -- A typedef is self-recursive if its name appears in its definition\n    isExprSelfRecursive :: ExprI -> MorlocMonad ()\n    -- Allow general type existence statements without parameters\n    isExprSelfRecursive (ExprI _ (TypE (ExprTypeE Nothing _ [] _ _))) = return ()\n    --  and also with parameters\n    isExprSelfRecursive (ExprI i (TypE (ExprTypeE Nothing v vs t _)))\n      | t == AppU (VarU v) (map (either (VarU . fst) id) vs) = return ()\n      | hasTerm v t = MM.throwSourcedError i $ \"Found unsupported self-recursive type alias:\" <+> pretty v\n      | otherwise = return ()\n    -- otherwise disallow self-recursion\n    isExprSelfRecursive (ExprI i (TypE (ExprTypeE _ v ts t _)))\n      | any (hasTerm v) (t : (rights ts)) =\n          MM.throwSourcedError i $ \"Found unsupported self-recursive type alias:\" <+> pretty v\n      | otherwise = return ()\n    isExprSelfRecursive _ = return ()\n\n    -- check if a given term appears in a type\n    hasTerm :: TVar -> TypeU -> Bool\n    hasTerm v (VarU v') = v == v'\n    hasTerm _ (NatVarU _) = False\n    hasTerm v (ForallU _ t) = hasTerm v t\n    hasTerm v (FunU (t1 : rs) t2) = hasTerm v t1 || hasTerm v (FunU rs t2)\n    hasTerm v (FunU [] t) = hasTerm v t\n    hasTerm v (AppU t1 (t2 : rs)) = hasTerm v t2 || hasTerm v (AppU t1 rs)\n    hasTerm v (AppU t1 []) = hasTerm v t1\n    hasTerm v (NamU o n ps ((_, t) : rs)) = hasTerm v t || hasTerm v (NamU o n ps rs)\n    hasTerm v (NamU o n (p : ps) []) = hasTerm v p || hasTerm v (NamU o n ps [])\n    hasTerm _ (NamU _ _ [] []) = False\n    hasTerm v (EffectU _ t) = hasTerm v t\n    hasTerm v (OptionalU t) = hasTerm v t\n    hasTerm _ (NatLitU _) = False\n    hasTerm v (NatAddU a b) = hasTerm v a || hasTerm v b\n    hasTerm v (NatMulU a b) = hasTerm v a || hasTerm v b\n    hasTerm v (NatSubU a b) = hasTerm v a || hasTerm v b\n    hasTerm v (NatDivU a b) = hasTerm v a || hasTerm v b\n    hasTerm v (LabeledU _ t) = hasTerm v t\n    hasTerm _ ExistU {} = error \"There should not be existentionals in typedefs\"\n\nresolveHoles ::\n  DAG MVar [AliasedSymbol] ExprI ->\n  MorlocMonad (DAG MVar [AliasedSymbol] ExprI)\nresolveHoles = DAG.mapNodeM unhole\n  where\n    unhole :: ExprI -> MorlocMonad ExprI\n    unhole e@(ExprI _ (LstE _)) = unholeContainer e\n    unhole e@(ExprI _ (TupE _)) = unholeContainer e\n    unhole e@(ExprI _ (NamE _)) = unholeContainer e\n    unhole (ExprI i (AppE e0 es0)) =\n      case length [HolE | (ExprI _ HolE) <- es0] of\n        0 -> AppE <$> unhole e0 <*> mapM unhole es0 |>> ExprI i\n        n -> do\n          lambdaIndex <- MM.getCounter\n          let vs = map (nameHole lambdaIndex) [1 .. n]\n              (_, es) = statefulMap insertHole vs es0\n          newApp <- AppE <$> unhole e0 <*> mapM unhole es\n          return $ ExprI lambdaIndex (LamE vs (ExprI i newApp))\n      where\n        insertHole :: [EVar] -> ExprI -> ([EVar], ExprI)\n        insertHole (v : vs) (ExprI j HolE) = (vs, ExprI j (VarE defaultValue v))\n        insertHole vs e = (vs, e)\n    -- simple recursion\n    unhole (ExprI i (ModE m es)) = ModE m <$> mapM unhole es |>> ExprI i\n    unhole (ExprI i (IstE c ts es)) = IstE c ts <$> mapM unhole es |>> ExprI i\n    unhole (ExprI i (AssE v e es)) = AssE v <$> unhole e <*> mapM unhole es |>> ExprI i\n    unhole (ExprI i (HolE)) = return HolE |>> ExprI i\n    unhole (ExprI i (LamE vs e)) = LamE vs <$> unhole e |>> ExprI i\n    unhole (ExprI i (AnnE e t)) = AnnE <$> unhole e <*> pure t |>> ExprI i\n    unhole (ExprI i (LetE bindings body)) = do\n      bindings' <- mapM (\\(v, e) -> (,) v <$> unhole e) bindings\n      body' <- unhole body\n      return $ ExprI i (LetE bindings' body')\n    unhole (ExprI i (IfE c t e)) = IfE <$> unhole c <*> unhole t <*> unhole e |>> ExprI i\n    unhole (ExprI i (DoBlockE e)) = DoBlockE <$> unhole e |>> ExprI i\n    unhole (ExprI i (EvalE e)) = EvalE <$> unhole e |>> ExprI i\n    unhole (ExprI i (IntrinsicE intr es)) = IntrinsicE intr <$> mapM unhole es |>> ExprI i\n    unhole (ExprI _ (BopE _ _ _ _)) = error \"Bop should have been resolved\"\n    unhole e = return e\n\n    unholeContainer :: ExprI -> MorlocMonad ExprI\n    unholeContainer e0@(ExprI i0 _) =\n      case countHoles e0 of\n        0 -> descend e0\n        n -> do\n          let vs = map (nameHole i0) [1 .. n]\n          e <- descend . snd . insertHoles vs $ e0\n          MM.sayVVV $ \"unholeContainer vs:\" <+> list (map pretty vs)\n          MM.sayVVV $ \"unholeContainer e:\" <+> pretty e\n          return $ ExprI i0 (LamE vs e)\n          where\n            insertHoles :: [EVar] -> ExprI -> ([EVar], ExprI)\n            insertHoles (v : vs) (ExprI i HolE) = (vs, ExprI i (VarE defaultValue v))\n            insertHoles vs (ExprI i (LstE es)) =\n              let (vs', es') = statefulMap insertHoles vs es\n               in (vs', ExprI i (LstE es'))\n            insertHoles vs (ExprI i (TupE es)) =\n              let (vs', es') = statefulMap insertHoles vs es\n               in (vs', ExprI i (TupE es'))\n            insertHoles vs (ExprI i (NamE (unzip -> (ks, es)))) =\n              let (vs', es') = statefulMap insertHoles vs es\n               in (vs', ExprI i (NamE (zip ks es')))\n            insertHoles vs e = (vs, e)\n\n    countHoles :: ExprI -> Int\n    countHoles (ExprI _ HolE) = 1\n    countHoles (ExprI _ (LstE xs)) = sum (map countHoles xs)\n    countHoles (ExprI _ (TupE xs)) = sum (map countHoles xs)\n    countHoles (ExprI _ (NamE (map snd -> xs))) = sum (map countHoles xs)\n    countHoles _ = 0\n\n    descend :: ExprI -> MorlocMonad ExprI\n    -- refresh\n    descend e@(ExprI _ (AppE _ _)) = unhole e\n    -- simple recurse\n    descend (ExprI i (LamE vs e)) = LamE vs <$> descend e |>> ExprI i\n    descend (ExprI i (LstE es)) = LstE <$> mapM descend es |>> ExprI i\n    descend (ExprI i (TupE es)) = TupE <$> mapM descend es |>> ExprI i\n    descend (ExprI i (NamE rs)) = NamE <$> mapM (\\(k, e) -> (,) k <$> descend e) rs |>> ExprI i\n    descend (ExprI i (AnnE e t)) = AnnE <$> descend e <*> pure t |>> ExprI i\n    descend (ExprI i (IfE c t e)) = IfE <$> descend c <*> descend t <*> descend e |>> ExprI i\n    descend (ExprI i (DoBlockE e)) = DoBlockE <$> descend e |>> ExprI i\n    descend (ExprI i (EvalE e)) = EvalE <$> descend e |>> ExprI i\n    descend (ExprI i (IntrinsicE intr es)) = IntrinsicE intr <$> mapM descend es |>> ExprI i\n    descend e = return e\n\n    -- name a hole based on the index of the new lambda and the hole position\n    nameHole :: Int -> Int -> EVar\n    nameHole lidx aidx = EV (\"_hole\" <> MT.show' lidx <> \"_\" <> MT.show' aidx)\n\n{- | Use export/import information to find which terms are imported into each module\n* reduces the Import edge type to an alias map.\n* replaces Export terms in expressions\n-}\nresolveImports ::\n  DAG MVar Import ExprI ->\n  MorlocMonad (DAG MVar [AliasedSymbol] ExprI)\nresolveImports d0 =\n  DAG.synthesize resolveExports resolveEdge d0\n    >>= maybe (MM.throwSystemError \"Cyclical import dependency in resolveImports\") return\n  where\n    -- Collect all exported terms from a module (including those imported\n    -- without qualification. Then update the ExpE term\n    resolveExports :: MVar -> ExprI -> [(MVar, Import, ExprI)] -> MorlocMonad ExprI\n    resolveExports m e children = do\n      let allLocalSymbols = findSymbols e -- Set Symbol\n          export = AST.findExport e -- Export\n      allImportedSymbols <-\n        Set.unions <$> mapM (\\(_, imp', expr') -> filterImports m imp' (AST.findExport expr')) children\n\n      let allSymbols = Set.union allLocalSymbols allImportedSymbols\n\n      case export of\n        ExportAll -> do\n          exports <- mapM addIndex (Set.toList allSymbols) |>> Set.fromList\n          return $ AST.setExport (ExportMany exports []) e\n        (ExportMany ungroupedExports groups) ->\n          let allExplicit = Set.unions (ungroupedExports : [exportGroupMembers g | g <- groups])\n              resolved = resolveExplicitTypeclasses allSymbols allExplicit\n              missing = Set.map snd resolved `Set.difference` allSymbols\n           in if Set.null missing\n                then do\n                  -- Rebuild groups with resolved typeclasses\n                  let resolvedGroups =\n                        map\n                          (\\g -> g {exportGroupMembers = resolveExplicitTypeclasses allSymbols (exportGroupMembers g)})\n                          groups\n                      resolvedUngrouped = resolveExplicitTypeclasses allSymbols ungroupedExports\n                  return $ AST.setExport (ExportMany resolvedUngrouped resolvedGroups) e\n                else\n                  MM.throwSystemError $\n                    \"Module\"\n                      <+> squotes (pretty m)\n                      <+> \"does not export the following terms or types:\"\n                      <+> list (map pretty (Set.toList missing))\n\n    resolveExplicitTypeclasses :: Set Symbol -> Set (Int, Symbol) -> Set (Int, Symbol)\n    resolveExplicitTypeclasses ss sis = Set.map f sis\n      where\n        f :: (Int, Symbol) -> (Int, Symbol)\n        f (i, TypeSymbol (TV x))\n          | (ClassSymbol (ClassName x)) `Set.member` ss = (i, ClassSymbol (ClassName x))\n          | otherwise = (i, TypeSymbol (TV x))\n        f x = x\n\n    addIndex :: a -> MorlocMonad (Int, a)\n    addIndex x = (,) <$> MM.getCounter <*> pure x\n\n    -- TODO: distinguish between these expressions at the type-level\n    --       the contains unresolved imports, that later resolved\n    resolveEdge ::\n      Import ->\n      ExprI -> -- importing module expression (with resolved exports)\n      ExprI -> -- imported module expression  (with resolved exports)\n      MorlocMonad [AliasedSymbol]\n    resolveEdge imp _ childX = case (importInclude imp, importNamespace imp, AST.findExport childX) of\n      (_, _, ExportAll) -> error \"This should have been resolved already\"\n      -- No namespace: existing behavior\n      (Nothing, Nothing, ExportMany exps gs) ->\n        let allExps = Set.unions (exps : [exportGroupMembers g | g <- gs])\n         in return $ map (toAliasedSymbol . snd) (Set.toList allExps)\n      (Just ass, Nothing, ExportMany exps gs) -> return . catMaybes $ map (importAlias . unAliasedSymbol) ass\n        where\n          allExps = Set.unions (exps : [exportGroupMembers g | g <- gs])\n          exportMap = Map.fromList [(unSymbol s, s) | (_, s) <- Set.toList allExps]\n          excludes = map unSymbol (importExclude imp)\n\n          importAlias :: (Text, Text) -> Maybe AliasedSymbol\n          importAlias (name, alias)\n            | name `elem` excludes = Nothing\n            | otherwise = case Map.lookup name exportMap of\n                Nothing -> Nothing\n                (Just (TermSymbol _)) -> Just $ AliasedTerm (EV name) (EV alias)\n                (Just (TypeSymbol _)) -> Just $ AliasedType (TV name) (TV alias)\n                (Just (ClassSymbol _)) -> Just $ AliasedClass (ClassName name)\n      -- With namespace: prefix term aliases (V1: types/classes not prefixed)\n      (Nothing, Just ns, ExportMany exps gs) ->\n        let allExps = Set.unions (exps : [exportGroupMembers g | g <- gs])\n         in return $ map (prefixAlias ns . toAliasedSymbol . snd) (Set.toList allExps)\n      (Just ass, Just ns, ExportMany exps gs) -> return . catMaybes $ map (importAlias . unAliasedSymbol) ass\n        where\n          allExps = Set.unions (exps : [exportGroupMembers g | g <- gs])\n          exportMap = Map.fromList [(unSymbol s, s) | (_, s) <- Set.toList allExps]\n          excludes = map unSymbol (importExclude imp)\n\n          importAlias :: (Text, Text) -> Maybe AliasedSymbol\n          importAlias (name, alias)\n            | name `elem` excludes = Nothing\n            | otherwise = case Map.lookup name exportMap of\n                Nothing -> Nothing\n                (Just (TermSymbol _)) -> Just $ prefixAlias ns (AliasedTerm (EV name) (EV alias))\n                (Just (TypeSymbol _)) -> Just $ AliasedType (TV name) (TV alias)\n                (Just (ClassSymbol _)) -> Just $ AliasedClass (ClassName name)\n\n    prefixAlias :: EVar -> AliasedSymbol -> AliasedSymbol\n    prefixAlias (EV ns) (AliasedTerm orig (EV alias)) = AliasedTerm orig (EV (ns <> \".\" <> alias))\n    prefixAlias _ sym = sym\n\n    filterImports ::\n      MVar ->\n      Import -> -- the current node import list\n      Export -> -- the imported modules export list\n      MorlocMonad (Set Symbol)\n    -- No namespace, no include list: import everything\n    filterImports _ (Import _ Nothing exclude Nothing) (ExportMany exports gs) =\n      let allExports = Set.unions (exports : [exportGroupMembers g | g <- gs])\n       in return $ (Set.map snd allExports) `Set.difference` (Set.fromList exclude)\n    -- With namespace, no include list: prefix all terms\n    filterImports _ (Import _ Nothing exclude (Just (EV ns))) (ExportMany exports gs) =\n      let allExports = Set.unions (exports : [exportGroupMembers g | g <- gs])\n       in return $ Set.map (prefixSymbol ns) ((Set.map snd allExports) `Set.difference` (Set.fromList exclude))\n    -- No namespace, with include list: existing behavior\n    filterImports m1 (Import m2 (Just as) (map unSymbol -> exclude) Nothing) (ExportMany exports gs) =\n      case partitionEithers . catMaybes $ map importAlias (map unAliasedSymbol as) of\n        ([], imps) -> return $ Set.fromList imps\n        (missing, _) ->\n          MM.throwSystemError $\n            \"The terms imported from\"\n              <+> squotes (pretty m1)\n              <+> \"are not exported from module\"\n              <+> squotes (pretty m2)\n              <> \":\\n\"\n                <+> indent 2 (vsep (map pretty missing))\n      where\n        allExports = Set.unions (exports : [exportGroupMembers g | g <- gs])\n        exportMap = Map.fromList [(unSymbol s, s) | (_, s) <- Set.toList allExports]\n\n        importAlias :: (Text, Text) -> Maybe (Either Text Symbol)\n        importAlias (name, alias)\n          | name `elem` exclude = Nothing\n          | otherwise = case Map.lookup name exportMap of\n              Nothing -> Just (Left name)\n              (Just (TermSymbol _)) -> Just . Right $ TermSymbol (EV alias)\n              (Just (TypeSymbol _)) -> Just . Right $ TypeSymbol (TV alias)\n              (Just (ClassSymbol _)) -> Just . Right $ ClassSymbol (ClassName alias)\n    -- With namespace and include list: prefix selected terms\n    filterImports m1 (Import m2 (Just as) (map unSymbol -> exclude) (Just (EV ns))) (ExportMany exports gs) =\n      case partitionEithers . catMaybes $ map importAlias (map unAliasedSymbol as) of\n        ([], imps) -> return $ Set.fromList imps\n        (missing, _) ->\n          MM.throwSystemError $\n            \"The terms imported from\"\n              <+> squotes (pretty m1)\n              <+> \"are not exported from module\"\n              <+> squotes (pretty m2)\n              <> \":\\n\"\n                <+> indent 2 (vsep (map pretty missing))\n      where\n        allExports = Set.unions (exports : [exportGroupMembers g | g <- gs])\n        exportMap = Map.fromList [(unSymbol s, s) | (_, s) <- Set.toList allExports]\n\n        importAlias :: (Text, Text) -> Maybe (Either Text Symbol)\n        importAlias (name, alias)\n          | name `elem` exclude = Nothing\n          | otherwise = case Map.lookup name exportMap of\n              Nothing -> Just (Left name)\n              (Just (TermSymbol _)) -> Just . Right $ TermSymbol (EV (ns <> \".\" <> alias))\n              (Just (TypeSymbol _)) -> Just . Right $ TypeSymbol (TV alias)\n              (Just (ClassSymbol _)) -> Just . Right $ ClassSymbol (ClassName alias)\n    filterImports _ _ _ = error \"Unreachable -- all Export values should have been converted to ExportMany\"\n\n    prefixSymbol :: Text -> Symbol -> Symbol\n    prefixSymbol ns (TermSymbol (EV name)) = TermSymbol (EV (ns <> \".\" <> name))\n    prefixSymbol _ sym = sym\n\n    findSymbols :: ExprI -> Set Symbol\n    findSymbols (ExprI _ (ModE _ es)) = Set.unions (map findSymbols es)\n    findSymbols (ExprI _ (TypE (ExprTypeE _ v _ _ _))) = Set.singleton $ TypeSymbol v\n    findSymbols (ExprI _ (AssE e _ _)) = Set.singleton $ TermSymbol e\n    findSymbols (ExprI _ (ClsE (Typeclass _ cls _ _))) = Set.singleton $ ClassSymbol cls\n    findSymbols (ExprI _ (SigE (Signature e _ _))) = Set.singleton $ TermSymbol e\n    findSymbols (ExprI _ (SrcE src)) = Set.singleton $ TermSymbol (srcAlias src)\n    -- The definition of an instance does not automatically imply export or make\n    -- the values available. The instance is ALWAYS relative to the class\n    -- definition (either local or imported).\n    findSymbols (ExprI _ (IstE cls _ _)) = Set.singleton $ ClassSymbol cls\n    findSymbols _ = Set.empty\n\n    unSymbol :: Symbol -> Text\n    unSymbol (TypeSymbol (TV v)) = v\n    unSymbol (TermSymbol (EV v)) = v\n    unSymbol (ClassSymbol (ClassName v)) = v\n\n    unAliasedSymbol :: AliasedSymbol -> (Text, Text)\n    unAliasedSymbol (AliasedType x y) = (unTVar x, unTVar y)\n    unAliasedSymbol (AliasedTerm x y) = (unEVar x, unEVar y)\n    unAliasedSymbol (AliasedClass x) = (unClassName x, unClassName x)\n\n    toAliasedSymbol :: Symbol -> AliasedSymbol\n    toAliasedSymbol (TypeSymbol x) = AliasedType x x\n    toAliasedSymbol (TermSymbol x) = AliasedTerm x x\n    toAliasedSymbol (ClassSymbol x) = AliasedClass x\n\nhandleTypeDeclarations ::\n  DAG k e ExprI ->\n  DAG k e ExprI\nhandleTypeDeclarations = DAG.mapNode f\n  where\n    f (ExprI i (ModE m es)) = ExprI i (ModE m (filter isNotSelfDef es))\n    f e = e\n\n    isNotSelfDef :: ExprI -> Bool\n    isNotSelfDef (ExprI _ (TypE (ExprTypeE Nothing v [] (VarU v') _))) = v /= v'\n    isNotSelfDef (ExprI _ (TypE (ExprTypeE Nothing (VarU -> v) (map (either (VarU . fst) id) -> vs) (AppU v' vs') _))) =\n      v /= v' || length vs /= length vs' || not (all (uncurry (==)) (zip vs vs'))\n    isNotSelfDef _ = True\n\nhandleBinops :: DAG MVar [AliasedSymbol] ExprI -> MorlocMonad (DAG MVar [AliasedSymbol] ExprI)\nhandleBinops d0 = do\n  mayN <- DAG.synthesize updateNode (\\e _ _ -> return e) d0\n  case mayN of\n    (Just e') -> return $ DAG.mapNode (\\(e, _, _) -> e) e'\n    Nothing -> error \"Unreachable?\"\n  where\n    updateNode ::\n      MVar ->\n      ExprI ->\n      [(MVar, [AliasedSymbol], (ExprI, Map ClassName [EVar], Map EVar (Associativity, Int)))] ->\n      MorlocMonad (ExprI, Map ClassName [EVar], Map EVar (Associativity, Int))\n    updateNode _ e es = do\n      thisFixityMap <- AST.findFixityMap e\n\n      let clsOps = Map.unions $ findClassOps e : [filterClsOps ss cs | (_, ss, (_, cs, _)) <- es]\n          clsOpSet = Set.fromList . concat $ Map.elems clsOps\n\n      fixityMap <-\n        mergeFixityMaps $ thisFixityMap : [filterTerms clsOpSet m ss | (_, ss, (_, _, m)) <- es]\n      e' <- updateBinopExprs fixityMap e\n      return (e', clsOps, fixityMap)\n\n    filterClsOps :: [AliasedSymbol] -> Map ClassName [EVar] -> Map ClassName [EVar]\n    filterClsOps ass clsmap =\n      let clss = [cls | (AliasedClass cls) <- ass]\n       in Map.filterWithKey (\\k _ -> elem k clss) clsmap\n\n    findClassOps :: ExprI -> Map ClassName [EVar]\n    findClassOps (ExprI _ (ModE _ es)) = Map.unions (map findClassOps es)\n    findClassOps (ExprI _ (ClsE (Typeclass _ cls _ sigs))) = Map.singleton cls [v | (Signature v _ _) <- sigs]\n    findClassOps _ = Map.empty\n\n    filterTerms :: Set EVar -> Map EVar a -> [AliasedSymbol] -> Map EVar a\n    filterTerms cs m ss = Map.union unaliasedOps clsOps\n      where\n        symMap = Map.fromList [(k, v) | (AliasedTerm k v) <- ss]\n\n        -- gather non-typeclass operator aliases\n        unaliasedOps = Map.fromList . catMaybes $ [Map.lookup k symMap |>> (,v) | (k, v) <- Map.toList m]\n\n        -- gather typeclass operators aliases\n        clsOps = Map.filterWithKey (\\k _ -> Set.member k cs) m\n\n    mergeFixityMaps :: (Eq a) => [Map EVar a] -> MorlocMonad (Map EVar a)\n    mergeFixityMaps [] = return Map.empty\n    mergeFixityMaps [e1] = return e1\n    mergeFixityMaps (e1 : e2 : es) = do\n      e' <- foldlM strictInsert e1 (Map.toList e2)\n      mergeFixityMaps (e' : es)\n\n    strictInsert :: (Eq v) => Map EVar v -> (EVar, v) -> MorlocMonad (Map EVar v)\n    strictInsert m (k, v) = case Map.lookup k m of\n      Nothing -> return $ Map.insert k v m\n      (Just v') ->\n        if v == v'\n          then return m\n          else MM.throwSystemError $ \"Conflicting fixity definitions for\" <+> pretty k\n\n    updateBinopExprs :: Map EVar (Associativity, Int) -> ExprI -> MorlocMonad ExprI\n    updateBinopExprs m0 = f\n      where\n        f e@(ExprI _ BopE {}) = resolveBinop m0 e >>= f\n        f (ExprI i (ModE m es)) = ModE m <$> mapM f es |>> ExprI i\n        f (ExprI i (IstE cls ts es)) = IstE cls ts <$> mapM f es |>> ExprI i\n        f (ExprI i (AssE v e es)) = AssE v <$> f e <*> mapM f es |>> ExprI i\n        f (ExprI i (LstE es)) = LstE <$> mapM f es |>> ExprI i\n        f (ExprI i (TupE es)) = TupE <$> mapM f es |>> ExprI i\n        f (ExprI i (NamE rs)) = do\n          es' <- mapM (f . snd) rs\n          return $ ExprI i (NamE (zip (map fst rs) es'))\n        f (ExprI i (AppE e es)) = AppE <$> f e <*> mapM f es |>> ExprI i\n        f (ExprI i (LamE vs e)) = LamE vs <$> f e |>> ExprI i\n        f (ExprI i (AnnE e t)) = AnnE <$> f e <*> pure t |>> ExprI i\n        f (ExprI i (LetE bindings body)) = do\n          bindings' <- mapM (\\(v, e) -> (,) v <$> f e) bindings\n          body' <- f body\n          return $ ExprI i (LetE bindings' body')\n        f (ExprI i (IfE c t e)) = IfE <$> f c <*> f t <*> f e |>> ExprI i\n        f (ExprI i (DoBlockE e)) = DoBlockE <$> f e |>> ExprI i\n        f (ExprI i (EvalE e)) = EvalE <$> f e |>> ExprI i\n        f (ExprI i (IntrinsicE intr es)) = IntrinsicE intr <$> mapM f es |>> ExprI i\n        f (ExprI _ (ParenE inner)) = f inner\n        f e = return e\n\n    -- \\| Rewrite a right-nested BopE chain into a correctly-associated AppE tree.\n    -- Uses the Pratt (precedence climbing) algorithm.\n    -- Operators not in fixMap default to infixl 9.\n    resolveBinop :: Map EVar (Associativity, Int) -> ExprI -> MorlocMonad ExprI\n    resolveBinop fixMap expr = do\n      let (lhs0, ops) = flatten expr\n      (result, _) <- pratt 0 lhs0 ops\n      return result\n      where\n        lookupFixity :: EVar -> (Associativity, Int)\n        lookupFixity v = Map.findWithDefault (InfixL, 9) v fixMap\n\n        -- Walk the right spine of BopE nodes into a flat list.\n        -- Each entry: (outerIdx, opIdx, opName, rightOperand)\n        flatten :: ExprI -> (ExprI, [(Int, Int, EVar, ExprI)])\n        flatten (ExprI outerI (BopE lhs opI op rhs)) =\n          let (rhsFirst, rhsRest) = flatten rhs\n           in (lhs, (outerI, opI, op, rhsFirst) : rhsRest)\n        flatten e = (e, [])\n\n        -- Pratt loop: consume operators with prec >= minPrec.\n        -- Returns the parsed lhs and the unconsumed tail.\n        pratt :: Int -> ExprI -> [(Int, Int, EVar, ExprI)] -> MorlocMonad (ExprI, [(Int, Int, EVar, ExprI)])\n        pratt _ lhs [] = return (lhs, [])\n        pratt minPrec lhs ((outerI, opI, op, rhs) : rest) = do\n          let (assoc, prec) = lookupFixity op\n          if prec < minPrec\n            then return (lhs, (outerI, opI, op, rhs) : rest)\n            else do\n              let nextMinPrec = if assoc == InfixR then prec else prec + 1\n              (rhsParsed, remaining) <- pratt nextMinPrec rhs rest\n              -- Ambiguity check: incompatible fixities at the same precedence\n              case remaining of\n                ((_, _, nextOp, _) : _) -> do\n                  let (nextAssoc, nextPrec) = lookupFixity nextOp\n                  when (nextPrec == prec && (assoc /= nextAssoc || assoc == InfixN)) . MM.throwSourcedError opI $\n                    \"Ambiguous use of\" <+> pretty op <+> \"and\" <+> pretty nextOp\n                      <> \": parenthesize or declare compatible fixities\"\n                [] -> return ()\n              let lhs' = ExprI outerI $ AppE (ExprI opI (VarE defaultValue op)) [lhs, rhsParsed]\n              pratt minPrec lhs' remaining\n\ncollectTags :: DAG MVar [AliasedSymbol] ExprI -> MorlocMonad ()\ncollectTags fullDag = do\n  _ <- DAG.mapNodeM f fullDag\n  return ()\n  where\n    -- \\* add ManifoldConfigs associated with VarE types to MorlocMonad state\n    -- \\* the configs store the metadata associated with the term tags\n    -- \\* later we use the manifold indices to lookup things like runtime info\n    --   (threads required and such)\n    f :: ExprI -> MorlocMonad ()\n    f (ExprI i (VarE config _)) = do\n      s <- MM.get\n      MM.put (s {stateManifoldConfig = Map.insert i config (stateManifoldConfig s)})\n    f (ExprI _ (ModE _ es)) = mapM_ f es\n    f (ExprI _ (IstE _ _ es)) = mapM_ f es\n    f (ExprI _ (AssE _ e es)) = mapM_ f (e : es)\n    f (ExprI _ (LstE es)) = mapM_ f es\n    f (ExprI _ (TupE es)) = mapM_ f es\n    f (ExprI _ (NamE rs)) = mapM_ (f . snd) rs\n    f (ExprI _ (AppE e es)) = mapM_ f (e : es)\n    f (ExprI _ (LamE _ e)) = f e\n    f (ExprI _ (AnnE e _)) = f e\n    f (ExprI _ (LetE bindings body)) = mapM_ (f . snd) bindings >> f body\n    f _ = return ()\n\ntype GCMap = (Scope, Map Lang Scope)\n\n{- | Add the following fields to state:\n  * stateGeneralTypedefs           :: GMap Int MVar Scope\n  * stateConcreteTypedefs          :: GMap Int MVar (Map Lang Scope)\n-}\ncollectTypes :: DAG MVar [AliasedSymbol] ExprI -> MorlocMonad ()\ncollectTypes fullDag = do\n  let typeDag = DAG.mapEdge (\\xs -> [(x, y) | AliasedType x y <- xs]) fullDag\n  result <- DAG.synthesizeNodes formTypes typeDag\n  case result of\n    Nothing -> MM.throwSystemError \"Found cyclic module dependency\"\n    Just _ -> return ()\n  where\n    formTypes ::\n      MVar ->\n      ExprI ->\n      [ ( MVar -- child module name\n        , [(TVar, TVar)] -- alias map\n        , GCMap\n        )\n      ] ->\n      MorlocMonad GCMap\n    formTypes m e0 childImports = do\n      let (generalTypemap, concreteTypemapsIncomplete) = foldl inherit (AST.findTypedefs e0) childImports\n\n      -- Here we are creating links from every indexed term in the module to the module\n      -- sources and aliases. When the module abstractions are factored out later,\n      -- this will be the only way to access module-specific info.\n      let indices = AST.getIndices e0\n\n      -- link concrete records to their full general forms\n      let concreteTypemaps = Map.map (completeRecords generalTypemap) concreteTypemapsIncomplete\n\n      s <- MM.get\n      MM.put\n        ( s\n            { stateGeneralTypedefs = GMap.insertMany indices m generalTypemap (stateGeneralTypedefs s)\n            , stateConcreteTypedefs = GMap.insertMany indices m concreteTypemaps (stateConcreteTypedefs s)\n            }\n        )\n\n      return (generalTypemap, concreteTypemaps)\n\n    inherit :: GCMap -> (key, [(TVar, TVar)], GCMap) -> GCMap\n    inherit (thisGmap, thisCmap) (_, links, (gmap, cmap)) =\n      let gmap' = filterAndSubstitute links gmap\n          cmap' = Map.map (filterAndSubstitute links) cmap\n       in ( Map.unionWith mergeEntries gmap' thisGmap\n          , Map.unionWith (Map.unionWith mergeEntries) cmap' thisCmap\n          )\n\n{- | collect type definitions globally\n  define:\n    * stateUniversalGeneralTypedefs\n    * stateUniversalConcreteTypedefs\n-}\ncollectUniversalTypes :: MorlocMonad ()\ncollectUniversalTypes = do\n  universalGeneralScope <- getUniversalGeneralScope\n  universalConcreteScope <- getUniversalConcreteScope universalGeneralScope\n\n  s <- MM.get\n  MM.put\n    ( s\n        { stateUniversalGeneralTypedefs = universalGeneralScope\n        , stateUniversalConcreteTypedefs = universalConcreteScope\n        }\n    )\n  where\n    getUniversalGeneralScope :: MorlocMonad Scope\n    getUniversalGeneralScope = do\n      (GMap _ (Map.elems -> scopes)) <- MM.gets stateGeneralTypedefs\n      return $ Map.unionsWith mergeEntries scopes\n\n    getUniversalConcreteScope :: Scope -> MorlocMonad (Map Lang Scope)\n    getUniversalConcreteScope gscope = do\n      (GMap _ modMaps) <- MM.gets stateConcreteTypedefs\n      let langs = unique $ concatMap Map.keys . Map.elems $ modMaps\n      scopes <- mapM getLangScope langs\n      return . Map.fromList $ zip langs scopes\n      where\n        getLangScope :: Lang -> MorlocMonad Scope\n        getLangScope lang = do\n          (GMap _ (Map.elems -> langMaps)) <- MM.gets stateConcreteTypedefs\n          -- See note above, here we are completing any incomplete concrete\n          -- record/table/object types\n          let langMaps' = map (Map.map (completeRecords gscope)) langMaps\n          return . Map.unionsWith mergeEntries . mapMaybe (Map.lookup lang) $ langMaps'\n\n{- | links the general entries from records to their abbreviated concrete cousins.\nFor example:\n  record (Person a) = Person {name :: Str, info a}\n  record Py => Person a = \"dict\"\nThis syntax avoids the need to duplicate the entire entry\n-}\ncompleteRecords :: Scope -> Scope -> Scope\ncompleteRecords gscope = Map.mapWithKey (completeRecord gscope)\n  where\n    completeRecord ::\n      Scope ->\n      TVar ->\n      [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)] ->\n      [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)]\n    completeRecord g v xs = case Map.lookup v g of\n      (Just ys) -> map (completeValue [(vs, t) | (vs, t, _, _) <- ys]) xs\n      Nothing -> xs\n\n    completeValue ::\n      [([Either (TVar, Kind) TypeU], TypeU)] ->\n      ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) ->\n      ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)\n    completeValue ((vs, NamU _ _ ps rs) : _) (_, NamU o v _ [], d, terminal) = (vs, NamU o v ps rs, d, terminal)\n    completeValue _ x = x\n\n-- merge type functions, names of generics do not matter\nmergeEntries ::\n  [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)] ->\n  [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)] ->\n  [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)]\nmergeEntries xs0 ys0 = filter (isNovel ys0) xs0 <> ys0\n  where\n    isNovel ::\n      [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)] -> ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) -> Bool\n    isNovel [] _ = True\n    isNovel ((vs2, t2, _, isTerminal1) : ys) x@(vs1, t1, _, isTerminal2)\n      | (length vs1 == length vs2)\n          && t1 == foldl (\\t (v1, v2) -> rename v2 v1 t) t2 [(fst v1, fst v2) | (Left v1, Left v2) <- zip vs1 vs2]\n          && isTerminal1 == isTerminal2 =\n          False\n      | otherwise = isNovel ys x\n\n-- clean imports\n--   * only keep the exports of a module that are explicitly imported\n--   * resolve any aliases\nfilterAndSubstitute :: [(TVar, TVar)] -> Scope -> Scope\nfilterAndSubstitute links typemap =\n  let importedTypes = Map.filterWithKey (\\k _ -> k `elem` map fst links) typemap\n   in foldl typeSubstitute importedTypes links\n  where\n    typeSubstitute ::\n      Scope -> -- imported map\n      (TVar, TVar) -> -- source name and local alias\n      Scope -- renamed map\n    typeSubstitute typedefs (sourceName, localAlias) =\n      case Map.lookup sourceName typedefs of\n        (Just xs) ->\n          Map.insert\n            localAlias\n            (map (\\(a, b, c, d) -> (a, rename sourceName localAlias b, c, d)) xs)\n            (Map.delete sourceName typedefs)\n        Nothing -> typedefs\n\n-- | Promote VarU to NatVarU based on typedef param kinds in all signatures.\n-- For each typedef like `type Tensor2 (d1 :: Nat) (d2 :: Nat) a`, build a map\n-- from type name to param kinds, then walk all types and upgrade KindType to\n-- KindNat for variables that appear in nat-kinded positions.\nrefineKinds ::\n  DAG MVar [AliasedSymbol] ExprI ->\n  MorlocMonad (DAG MVar [AliasedSymbol] ExprI)\nrefineKinds dag = do\n  let allParams = concat [collectAllTypeDefParams e | e <- DAG.nodes dag]\n      kindMap = Map.fromList [(v, map (either snd (const KindType)) ps) | (v, ps) <- allParams, any (\\p -> either (\\(_, k) -> k == KindNat) (const False) p) ps]\n  if Map.null kindMap\n    then return dag\n    else DAG.mapNodeM (refineExprKinds kindMap) dag\n  where\n    collectAllTypeDefParams :: ExprI -> [(TVar, [Either (TVar, Kind) TypeU])]\n    collectAllTypeDefParams (ExprI _ (ModE _ es)) = concatMap collectAllTypeDefParams es\n    collectAllTypeDefParams (ExprI _ (TypE (ExprTypeE _ v ps _ _))) = [(v, ps)]\n    collectAllTypeDefParams (ExprI _ (AssE _ e es)) = concatMap collectAllTypeDefParams (e:es)\n    collectAllTypeDefParams (ExprI _ (IstE _ _ es)) = concatMap collectAllTypeDefParams es\n    collectAllTypeDefParams _ = []\n\n    refineExprKinds :: Map TVar [Kind] -> ExprI -> MorlocMonad ExprI\n    refineExprKinds km = AST.mapTypeInExprI (refineTypeKinds km)\n\n    refineTypeKinds :: Map TVar [Kind] -> TypeU -> TypeU\n    refineTypeKinds km t0 =\n      let natVars = collectNatVarsFromScope km t0\n       in promoteNatVarsR natVars t0\n\n    -- Collect variables that appear in nat-kinded positions according to typedef param kinds\n    collectNatVarsFromScope :: Map TVar [Kind] -> TypeU -> Set TVar\n    collectNatVarsFromScope km = go\n      where\n        go (AppU (VarU v) args) = case Map.lookup v km of\n          Just kinds -> Set.unions $\n            [case (k, a) of\n               (KindNat, VarU tv) -> Set.singleton tv\n               _ -> go a\n            | (k, a) <- zip kinds args]\n            ++ [go a | a <- drop (length kinds) args]\n          Nothing -> Set.unions (map go args)\n        go (NatVarU v) = Set.singleton v\n        go (ForallU _ inner) = go inner\n        go (FunU args ret) = Set.unions (go ret : map go args)\n        go (NamU _ _ ps rs) = Set.unions (map go ps ++ map (go . snd) rs)\n        go (EffectU _ inner) = go inner\n        go (OptionalU inner) = go inner\n        go (NatAddU a b) = Set.union (goNat a) (goNat b)\n        go (NatMulU a b) = Set.union (goNat a) (goNat b)\n        go (NatSubU a b) = Set.union (goNat a) (goNat b)\n        go (NatDivU a b) = Set.union (goNat a) (goNat b)\n        go (LabeledU _ inner) = go inner\n        go _ = Set.empty\n\n        goNat (VarU v@(TV name))\n          | not (T.null name), isLower (T.head name) = Set.singleton v\n          | otherwise = Set.empty\n        goNat t = go t\n\n    -- Promote VarU to NatVarU for variables identified as nat-kinded,\n    -- and strip ForallU wrappers for those variables (they're implicitly quantified)\n    promoteNatVarsR :: Set TVar -> TypeU -> TypeU\n    promoteNatVarsR natVars = go\n      where\n        go (VarU v)\n          | Set.member v natVars = NatVarU v\n          | otherwise = VarU v\n        go t@(NatVarU _) = t\n        go (ForallU v t)\n          | Set.member v natVars = go t  -- strip ForallU for nat vars\n        go (ForallU v t) = ForallU v (go t)\n        go (FunU ts t) = FunU (map go ts) (go t)\n        go (AppU t ts) = AppU (go t) (map go ts)\n        go (NamU o v ps rs) = NamU o v (map go ps) [(k, go t) | (k, t) <- rs]\n        go (EffectU effs t) = EffectU effs (go t)\n        go (OptionalU t) = OptionalU (go t)\n        go (ExistU v (ps, pc) (rs, rc)) = ExistU v (map go ps, pc) (map (second go) rs, rc)\n        go t@(NatLitU _) = t\n        go (NatAddU a b) = NatAddU (go a) (go b)\n        go (NatMulU a b) = NatMulU (go a) (go b)\n        go (NatSubU a b) = NatSubU (go a) (go b)\n        go (NatDivU a b) = NatDivU (go a) (go b)\n        go (LabeledU n t) = LabeledU n (go t)\n\ncollectSources :: DAG MVar [AliasedSymbol] ExprI -> MorlocMonad ()\ncollectSources fullDag = do\n  _ <- DAG.mapNodeWithKeyM linkSources fullDag\n  return ()\n  where\n    linkSources :: MVar -> ExprI -> MorlocMonad ExprI\n    linkSources m e0 = do\n      let objSources = AST.findSources e0\n      let indices = AST.getIndices e0\n      s <- MM.get\n      MM.put (s {stateSources = GMap.insertManyWith (<>) indices m objSources (stateSources s)})\n      return e0\n\n-- Rename a variable. For example:\n--   import maps (Map as HashMap, foo, bar)\n--\n-- Here all uses `Map` in anything imported from `maps` needs to\n-- be renamed to `HashMap`. So we call:\n--   rename (TV \"Map\") (TV \"HashMap\") x\n-- where `x` is any term\nrename :: TVar -> TVar -> TypeU -> TypeU\nrename sourceName localAlias = f\n  where\n    f (VarU v)\n      | v == sourceName = VarU localAlias\n      | otherwise = VarU v\n    f (ExistU v (ps, o1) (rs, o2)) =\n      let v' = if v == sourceName then localAlias else v\n       in ExistU v' (map f ps, o1) (map (second f) rs, o2)\n    f (ForallU v t) = ForallU v (f t)\n    f (FunU ts t) = FunU (map f ts) (f t)\n    f (AppU t ts) = AppU (f t) (map f ts)\n    f (NamU o v ts rs) =\n      let v' = if v == sourceName then localAlias else v\n       in NamU o v' (map f ts) (map (second f) rs)\n    f (EffectU effs t) = EffectU effs (f t)\n    f (OptionalU t) = OptionalU (f t)\n    f t@(NatVarU _) = t\n    f t@(NatLitU _) = t\n    f (NatAddU a b) = NatAddU (f a) (f b)\n    f (NatMulU a b) = NatMulU (f a) (f b)\n    f (NatSubU a b) = NatSubU (f a) (f b)\n    f (NatDivU a b) = NatDivU (f a) (f b)\n    f (LabeledU n t) = LabeledU n (f t)\n"
  },
  {
    "path": "library/Morloc/Frontend/Token.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Frontend.Token\nDescription : Token types shared between the Alex lexer and Happy parser\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.Frontend.Token\n  ( Token (..)\n  , Located (..)\n  , Pos (..)\n  , startPos\n  , alexPos\n  , showToken\n  ) where\n\nimport Data.Text (Text)\nimport qualified Data.Text as T\n\n-- | Source position: line, column (1-based)\ndata Pos = Pos\n  { posLine :: {-# UNPACK #-} !Int\n  , posCol :: {-# UNPACK #-} !Int\n  , posFile :: !String\n  }\n  deriving (Show, Eq, Ord)\n\n-- | Initial position (line 1, column 1) for a given filename.\nstartPos :: String -> Pos\nstartPos f = Pos 1 1 f\n\n-- | Build a 'Pos' from the line and column reported by the lexer.\nalexPos :: String -> Int -> Int -> Pos\nalexPos f l c = Pos l c f\n\n-- | A token annotated with its source position and the matched text\ndata Located = Located\n  { locPos :: !Pos\n  , locToken :: !Token\n  , locText :: !Text\n  }\n  deriving (Show, Eq)\n\ndata Token\n  = -- Layout tokens (inserted by layout processor)\n\n    -- | virtual {\n    TokVLBrace\n  | -- | virtual }\n    TokVRBrace\n  | -- | virtual ;\n    TokVSemi\n  | -- Delimiters\n\n    -- | (\n    TokLParen\n  | -- | )\n    TokRParen\n  | -- | [\n    TokLBracket\n  | -- | ]\n    TokRBracket\n  | -- | {\n    TokLBrace\n  | -- | }\n    TokRBrace\n  | -- | <\n    TokLAngle\n  | -- | >\n    TokRAngle\n  | -- Punctuation\n\n    -- | ,\n    TokComma\n  | -- | ; (explicit)\n    TokSemicolon\n  | -- | backslash (lambda)\n    TokBackslash\n  | -- | _\n    TokUnderscore\n  | -- | !\n    TokBang\n  | -- | ?\n    TokQuestion\n  | -- | . (operator position, e.g., f . g)\n    TokDot\n  | -- | . (getter prefix, e.g., .name)\n    TokGetterDot\n  | -- | . (namespace qualifier, no space before dot, e.g., f.map)\n    TokNsDot\n  | -- | : (label qualifier, no space, e.g., large:mean)\n    TokLabelColon\n  | -- | . (chained getter, e.g., the second dot in .foo.bar)\n    TokGetterDotChain\n  | -- Reserved operators\n\n    -- | =\n    TokEquals\n  | -- | :\n    TokColon\n  | -- | ::\n    TokDColon\n  | -- | ->\n    TokArrow\n  | -- | =>\n    TokFatArrow\n  | -- | <-\n    TokBind\n  | -- | * (only in export context)\n    TokStar\n  | -- Keywords\n    TokModule\n  | TokImport\n  | TokExport\n  | TokSource\n  | TokFrom\n  | TokWhere\n  | TokAs\n  | TokTrue\n  | TokFalse\n  | TokType\n  | TokRecord\n  | TokObject\n  | TokTable\n  | TokClass\n  | TokInstance\n  | TokInfixl\n  | TokInfixr\n  | TokInfix\n  | TokLet\n  | TokIn\n  | TokDo\n  | TokNull\n  | -- Identifiers and literals\n\n    -- | lowercase identifier\n    TokLowerName !Text\n  | -- | uppercase identifier\n    TokUpperName !Text\n  | -- | operator symbol (e.g., +, *, .)\n    TokOperator !Text\n  | -- | - (needed separately for module names and unary negation)\n    TokMinus\n  | TokInteger !Integer\n  | TokFloat !Double\n  | -- | plain string (no interpolation)\n    TokString !Text\n  | -- | start of interpolated string: text before first #{}\n    TokStringStart !Text\n  | -- | text between #{} in interpolated string\n    TokStringMid !Text\n  | -- | text after last #{} to closing quote\n    TokStringEnd !Text\n  | -- | #{ opening interpolation\n    TokInterpOpen\n  | -- | } closing interpolation\n    TokInterpClose\n  | -- Docstrings\n\n    -- | --' followed by text\n    TokDocLine !Text\n  | -- Group annotations\n\n    -- | --* followed by text\n    TokGroupLine !Text\n  | -- Intrinsics (@name)\n    TokIntrinsic !Text\n  | -- Pragmas\n    TokPragmaInline\n  | -- Special\n    TokEOF\n  deriving (Show, Eq, Ord)\n\n-- | Human-readable token description for error messages\nshowToken :: Token -> String\nshowToken TokVLBrace = \"start of indented block\"\nshowToken TokVRBrace = \"end of indented block\"\nshowToken TokVSemi = \"new declaration\"\nshowToken TokLParen = \"'('\"\nshowToken TokRParen = \"')'\"\nshowToken TokLBracket = \"'['\"\nshowToken TokRBracket = \"']'\"\nshowToken TokLBrace = \"'{'\"\nshowToken TokRBrace = \"'}'\"\nshowToken TokLAngle = \"'<'\"\nshowToken TokRAngle = \"'>'\"\nshowToken TokComma = \"','\"\nshowToken TokSemicolon = \"';'\"\nshowToken TokBackslash = \"'\\\\'\"\nshowToken TokUnderscore = \"'_'\"\nshowToken TokBang = \"'!'\"\nshowToken TokQuestion = \"'?'\"\nshowToken TokDot = \"'.'\"\nshowToken TokGetterDot = \"'.'\"\nshowToken TokNsDot = \"'.' (namespace)\"\nshowToken TokLabelColon = \"':' (label)\"\nshowToken TokGetterDotChain = \"'.'\"\nshowToken TokEquals = \"'='\"\nshowToken TokColon = \"':'\"\nshowToken TokDColon = \"'::'\"\nshowToken TokArrow = \"'->'\"\nshowToken TokFatArrow = \"'=>'\"\nshowToken TokBind = \"'<-'\"\nshowToken TokStar = \"'*'\"\nshowToken TokModule = \"'module'\"\nshowToken TokImport = \"'import'\"\nshowToken TokExport = \"'export'\"\nshowToken TokSource = \"'source'\"\nshowToken TokFrom = \"'from'\"\nshowToken TokWhere = \"'where'\"\nshowToken TokAs = \"'as'\"\nshowToken TokTrue = \"'True'\"\nshowToken TokFalse = \"'False'\"\nshowToken TokType = \"'type'\"\nshowToken TokRecord = \"'record'\"\nshowToken TokObject = \"'object'\"\nshowToken TokTable = \"'table'\"\nshowToken TokClass = \"'class'\"\nshowToken TokInstance = \"'instance'\"\nshowToken TokInfixl = \"'infixl'\"\nshowToken TokInfixr = \"'infixr'\"\nshowToken TokInfix = \"'infix'\"\nshowToken TokLet = \"'let'\"\nshowToken TokIn = \"'in'\"\nshowToken TokDo = \"'do'\"\nshowToken TokNull = \"'Null'\"\nshowToken (TokLowerName n) = \"identifier '\" ++ T.unpack n ++ \"'\"\nshowToken (TokUpperName n) = \"type name '\" ++ T.unpack n ++ \"'\"\nshowToken (TokOperator n) = \"operator '\" ++ T.unpack n ++ \"'\"\nshowToken TokMinus = \"'-'\"\nshowToken (TokInteger _) = \"integer literal\"\nshowToken (TokFloat _) = \"float literal\"\nshowToken (TokString _) = \"string literal\"\nshowToken (TokStringStart _) = \"string literal\"\nshowToken (TokStringMid _) = \"string continuation\"\nshowToken (TokStringEnd _) = \"string end\"\nshowToken TokInterpOpen = \"'#{'\"\nshowToken TokInterpClose = \"'}' (interpolation)\"\nshowToken (TokDocLine _) = \"docstring\"\nshowToken (TokGroupLine _) = \"group annotation\"\nshowToken (TokIntrinsic n) = \"intrinsic '@\" ++ T.unpack n ++ \"'\"\nshowToken TokPragmaInline = \"'%inline'\"\nshowToken TokEOF = \"end of input\"\n"
  },
  {
    "path": "library/Morloc/Frontend/Treeify.hs",
    "content": "{-# LANGUAGE CPP #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Frontend.Treeify\nDescription : Dissolve the module DAG into per-export 'AnnoS' call trees\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nAfter linking populates 'MorlocState', this module builds one 'AnnoS' tree\nper exported function by inlining declarations, resolving sources, and\nrenaming lambda-bound variables for uniqueness. The resulting trees are the\ninput to the typechecker and code generator.\n-}\nmodule Morloc.Frontend.Treeify (treeify) where\n\nimport qualified Data.Set as Set\nimport qualified Morloc.Data.DAG as DAG\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Frontend.AST as AST\nimport qualified Morloc.Frontend.Link as MFL\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\n\n-- | Every term must either be sourced or declared.\ndata TermOrigin = Declared ExprI | Sourced Source\n  deriving (Show, Ord, Eq)\n\ndata BindKind = LambdaBound | LetBound\n  deriving (Show, Eq)\n\n-- Manage unique naming in each tree\ndata Namer = Namer\n  { namerMap :: Map.Map EVar (EVar, BindKind)\n  , namerIndex :: Int\n  , namerExpanding :: Set.Set EVar  -- functions currently being expanded (recursion detection)\n  }\n  deriving (Show)\n\n-- When I see a term, I need to look it up. To do so, I need to walk up through\n-- scope and fine all sources/declarations and type annotations. This involves\n-- walking up through where statements (shadowing is possible), up through lambdas\n-- (where again shadowing is possible), to the module scope, and to the imported\n-- scope (where terms are merged, not shadowed).\n--\n-- Generate integers for all positions in the tree, use these to map into a table that includes:\n--  * manual type annotations or signatures\n--  * inferred type annotations\n--\n-- All expressions are mapped to integer indices linking expressions to their\n-- ultimate type annotations. The indices also match terms to their signatures\n-- and locations in source code.\ntreeify ::\n  DAG MVar [AliasedSymbol] ExprI ->\n  MorlocMonad [AnnoS Int ManyPoly Int]\ntreeify d\n  | Map.size d == 0 = return []\n  | otherwise = case DAG.roots d of\n      -- if no parentless element exists, then the graph must be empty or cyclic\n      [] -> MM.throwSystemError \"cyclic import dependency in treeify\"\n      -- else if exactly one module name key (k) is found\n      [k] -> do\n        case DAG.lookupNode k d of\n          -- if the key is not in the DAG, then something is dreadfully wrong codewise\n          Nothing -> MM.throwSystemError $ \"Compiler bug (__FILE__:__LINE__): Module DAG is missing key\" <+> pretty k\n          (Just (AST.findExport -> ExportMany symbols groups)) -> do\n            d' <- DAG.mapNodeM linkAndRemoveAnnotations d\n\n            -- move all to state, after this the DAG will no longer be needed\n            _ <- MFL.link d'\n\n            -- find all term exports (ungrouped + grouped)\n            let allSymbols = Set.unions (symbols : [exportGroupMembers g | g <- groups])\n                exports = [(i, v) | (i, TermSymbol v) <- Set.toList allSymbols]\n\n            -- Build export group info for the state\n            let exportGroupInfo =\n                  Map.fromList\n                    [ ( exportGroupName g\n                      , (exportGroupDesc g, [i | (i, TermSymbol _) <- Set.toList (exportGroupMembers g)])\n                      )\n                    | g <- groups\n                    ]\n\n            -- Validate command groups\n            let ungroupedNames = Set.fromList [v | (_, TermSymbol v) <- Set.toList symbols]\n                groupNames = Set.fromList [exportGroupName g | g <- groups]\n                collisions = Set.intersection (Set.map unEVar ungroupedNames) groupNames\n            -- group names must not collide with ungrouped command names\n            if not (Set.null collisions)\n              then\n                MM.throwSystemError $\n                  \"Command group names collide with ungrouped command names:\"\n                    <+> list (map pretty (Set.toList collisions))\n              else return ()\n\n            -- - store all exported indices in state\n            -- - Add the export name to state. Failing to do so here, will lose\n            --   the name of terms that are exported but not defined, this leads\n            --   to cryptic error messages.\n            MM.modify\n              ( \\s ->\n                  s\n                    { stateExports = map fst exports\n                    , stateName = Map.union (stateName s) (Map.fromList exports)\n                    , stateExportGroups = exportGroupInfo\n                    }\n              )\n\n            -- dissolve modules, imports, and sources, leaving behind only a tree for each term exported from main\n            statefulMapM collect (Namer Map.empty 0 Set.empty) exports |>> snd\n          (Just _) ->\n            error \"This should not be possible, all ExportAll cases should have been removed in Restructure.hs\"\n\n      -- There is no currently supported use case that exposes multiple roots in\n      -- one compilation process. The compiler executable takes a single morloc\n      -- file as input, therefore this MUST be the root. In the future compiling\n      -- multiple projects in parallel with potentially shared information and\n      -- constraints could be valuable.\n      roots ->\n        MM.throwSystemError $\n          \"Compiler bug (__FILE__:__LINE__): unsupported multi-rooted module DAG:\"\n            <+> tupled (map pretty roots)\n\nlinkAndRemoveAnnotations :: ExprI -> MorlocMonad ExprI\nlinkAndRemoveAnnotations = f\n  where\n    f :: ExprI -> MorlocMonad ExprI\n    f (ExprI _ (AnnE e@(ExprI i _) t)) = do\n      --     ^                ^-- this one is connected to the given types\n      --     '-- this index disappears with the lost annotation node\n      s <- MM.get\n      MM.put $ s {stateAnnotations = Map.insert i t (stateAnnotations s)}\n      f e -- notice the topology change\n      -- everything below is boilerplate (this is why I need recursion schemes)\n    f (ExprI i (ModE v es)) = ExprI i <$> (ModE v <$> mapM f es)\n    f (ExprI i (AssE v e es)) = ExprI i <$> (AssE v <$> f e <*> mapM f es)\n    f (ExprI i (LstE es)) = ExprI i <$> (LstE <$> mapM f es)\n    f (ExprI i (TupE es)) = ExprI i <$> (TupE <$> mapM f es)\n    f (ExprI i (NamE rs)) = do\n      es' <- mapM (f . snd) rs\n      return . ExprI i $ NamE (zip (map fst rs) es')\n    f (ExprI i (AppE e es)) = ExprI i <$> (AppE <$> f e <*> mapM f es)\n    f (ExprI i (LamE vs e)) = ExprI i <$> (LamE vs <$> f e)\n    f (ExprI i (LetE bindings body)) = ExprI i <$> (LetE <$> mapM (\\(v, e) -> (,) v <$> f e) bindings <*> f body)\n    f (ExprI i (IfE c t e)) = ExprI i <$> (IfE <$> f c <*> f t <*> f e)\n    f (ExprI i (DoBlockE e)) = ExprI i <$> (DoBlockE <$> f e)\n    f (ExprI i (EvalE e)) = ExprI i <$> (EvalE <$> f e)\n    f (ExprI i (IntrinsicE intr es)) = ExprI i <$> (IntrinsicE intr <$> mapM f es)\n    f e@(ExprI _ _) = return e\n\n{- | Build the call tree for a single nexus command. The result is ambiguous,\nwith 1 or more possible tree topologies, each with one or more possible\nimplementations for each function.\n\nRewrite all lambda-bound variables to the unique names\n\"<name>@<index>\". Where \"<name>\" is the original name and \"<index>\" is an\nauto-incrementing integer. This solves naming conflicts while avoiding\nexcessive traversal of the tree.\n\nRecursion is handled via namerExpanding: when a function is being expanded,\nrecursive references to it emit CallS back-edge nodes instead of re-expanding.\n-}\ncollect ::\n  Namer ->\n  ( Int -- the general index for the term\n  , EVar -- name of root expression\n  ) ->\n  MorlocMonad (Namer, AnnoS Int ManyPoly Int)\ncollect namer0 (gi, v) = do\n  MM.sayVVV $\n    \"collect\"\n      <> \"\\n  gi:\" <+> pretty gi\n      <> \"\\n  v:\" <+> pretty v\n  (namer, e) <- collectExprS namer0 (ExprI gi (VarE defaultValue v))\n  return (namer, AnnoS gi gi e)\n\ncollectAnnoS :: Namer -> ExprI -> MorlocMonad (Namer, AnnoS Int ManyPoly Int)\ncollectAnnoS namer e@(ExprI gi _) = collectExprS namer e |>> second (AnnoS gi gi)\n\n-- | Translate ExprI to ExprS tree\ncollectExprS :: Namer -> ExprI -> MorlocMonad (Namer, ExprS Int ManyPoly Int)\ncollectExprS namer0 (ExprI gi0 e0) = f namer0 e0\n  where\n    f namer (VarE _ v)\n      | Set.member v (namerExpanding namer)\n      , Nothing <- Map.lookup v (namerMap namer) = do\n          -- Recursive reference detected (not shadowed by local binding)\n          MM.sayVVV $ \"collectExprS: recursive call to\" <+> pretty v\n          return (namer, CallS v)\n      | otherwise = do\n          MM.sayVVV $\n            \"collectExprS VarE\"\n              <> \"\\n  gi:\" <+> pretty gi0\n              <> \"\\n  v:\" <+> pretty v\n          sigs <- MM.gets stateSignatures\n\n          case GMap.lookup gi0 sigs of\n            -- A monomorphic term will have a type if it is linked to any source\n            -- since sources require signatures. But if it associated only with a\n            -- declaration, then it will have no type.\n            (GMapJust (Monomorphic t)) -> do\n              MM.sayVVV $ \"  searchged gi \" <+> pretty gi0 <+> \"for\" <+> pretty v\n\n              MM.sayVVV $ \"  monomorphic term\" <+> pretty v <> \":\" <+> maybe \"?\" pretty (termGeneral t)\n              let namer' = namer { namerExpanding = Set.insert v (namerExpanding namer) }\n              (namer'', es) <- termtypesToAnnoS gi0 namer' t\n              return $ (namer'' { namerExpanding = namerExpanding namer }, VarS v (MonomorphicExpr (termGeneral t) es))\n\n            -- A polymorphic term should always have a type.\n            (GMapJust (Polymorphic cls clsName t ts)) -> do\n              MM.sayVVV $\n                \"  polymorphic term\" <+> pretty v <> \":\" <+> list (map (maybe \"?\" pretty . termGeneral) ts)\n              let namer' = namer { namerExpanding = Set.insert v (namerExpanding namer) }\n              (namer'', ess) <- statefulMapM (termtypesToAnnoS gi0) namer' ts\n              let etypes = map (fromJust . termGeneral) ts\n              return $ (namer'' { namerExpanding = namerExpanding namer }, VarS v (PolymorphicExpr cls clsName t (zip etypes ess)))\n\n            -- Terms not associated with TermTypes objects must be lambda-bound or let-bound\n            -- These terms will be renamed for uniqueness\n            _ -> do\n              MM.sayVVV $ \"bound term\" <+> pretty v\n              case Map.lookup v (namerMap namer) of\n                (Just (v', LambdaBound)) -> return (namer, BndS v')\n                (Just (v', LetBound)) -> return (namer, LetBndS v')\n                Nothing -> MM.throwSourcedError gi0 $ \"Undefined term in namer map:\" <+> pretty v\n      where\n        termtypesToAnnoS :: Int -> Namer -> TermTypes -> MorlocMonad (Namer, [AnnoS Int ManyPoly Int])\n        termtypesToAnnoS gi n t = do\n          let calls = [AnnoS gi ci (ExeS (SrcCall src)) | (_, Idx ci src) <- termConcrete t]\n\n          (n', declarations) <- statefulMapM termExprToAnnoS n (termDecl t)\n          return (n', (calls <> declarations))\n\n        termExprToAnnoS :: Namer -> ExprI -> MorlocMonad (Namer, AnnoS Int ManyPoly Int)\n        termExprToAnnoS n e@(ExprI ci _) = do\n          (n', e') <- reindexExprI e >>= collectExprS n\n          return $ (n', AnnoS gi0 ci e')\n    f namer (LstE es) = statefulMapM collectAnnoS namer es |>> second LstS\n    f namer (TupE es) = statefulMapM collectAnnoS namer es |>> second TupS\n    f namer (NamE rs) = do\n      (namer', vals) <- statefulMapM collectAnnoS namer (map snd rs)\n      let keys = map fst rs\n      return (namer', NamS (zip keys vals))\n    f namer (LamE vs e) = do\n      let namer' = foldr (updateRenamer LambdaBound) namer vs\n          vs' = map (fst . fromJust . (flip Map.lookup) (namerMap namer')) vs\n      (_, e') <- collectAnnoS namer' e\n      -- return the original name, the lambda bound terms are defined only below\n      return (namer, LamS vs' e')\n    f namer (LetE ((v, e1) : rest) body) = do\n      (namer1, e1') <- collectAnnoS namer e1\n      let namer2 = updateRenamer LetBound v namer1\n          v' = fst $ fromJust $ Map.lookup v (namerMap namer2)\n          innerBody = case rest of\n            [] -> body\n            _ -> ExprI (exprIIdx body) (LetE rest body)\n      (_, body') <- collectAnnoS namer2 innerBody\n      return (namer, LetS v' e1' body')\n    f _ (LetE [] _) = error \"Bug in collectExprS: empty let bindings\"\n    f namer (AppE e es) = do\n      (namer', e') <- collectAnnoS namer e\n      (namer'', es') <- statefulMapM collectAnnoS namer' es\n      return (namer'', AppS e' es')\n    f namer UniE = return (namer, UniS)\n    f namer NullE = return (namer, NullS)\n    f namer (RealE x) = return (namer, RealS x)\n    f namer (IntE x) = return (namer, IntS x)\n    f namer (LogE x) = return (namer, LogS x)\n    f namer (StrE x) = return (namer, StrS x)\n    f namer (PatE p) = return (namer, ExeS (PatCall p))\n    f namer (DoBlockE e) = do\n      (namer', e') <- collectAnnoS namer e\n      return (namer', DoBlockS e')\n    f namer (EvalE e) = do\n      (namer', e') <- collectAnnoS namer e\n      return (namer', EvalS e')\n    f namer (IntrinsicE intr es) = do\n      (namer', es') <- go namer [] es\n      return (namer', IntrinsicS intr es')\n      where\n        go n acc [] = return (n, reverse acc)\n        go n acc (x:xs) = do\n          (n', x') <- collectAnnoS n x\n          go n' (x':acc) xs\n    f namer (IfE c t e) = do\n      (namer1, c') <- collectAnnoS namer c\n      (namer2, t') <- collectAnnoS namer1 t\n      (namer3, e') <- collectAnnoS namer2 e\n      return (namer3, IfS c' t' e')\n    -- all other expressions are strictly illegal here and represent compiler bugs\n    f _ e = error $ \"Bug in collectExprS: \" <> show (render (pretty e))\n\nupdateRenamer :: BindKind -> EVar -> Namer -> Namer\nupdateRenamer kind v namer =\n  let v' = EV (unEVar v <> \"@\" <> MT.show' (namerIndex namer))\n   in namer\n        { namerMap = Map.insert v (v', kind) (namerMap namer)\n        , namerIndex = namerIndex namer + 1\n        }\n\nexprIIdx :: ExprI -> Int\nexprIIdx (ExprI i _) = i\n\nreindexExprI :: ExprI -> MorlocMonad ExprI\nreindexExprI (ExprI i e) = ExprI <$> newIndex i <*> reindexExpr e\n\nreindexExpr :: Expr -> MorlocMonad Expr\nreindexExpr (ModE m es) = ModE m <$> mapM reindexExprI es\nreindexExpr (AnnE e ts) = AnnE <$> reindexExprI e <*> pure ts\nreindexExpr (AppE e es) = AppE <$> reindexExprI e <*> mapM reindexExprI es\nreindexExpr (AssE v e es) = AssE v <$> reindexExprI e <*> mapM reindexExprI es\nreindexExpr (LamE vs e) = LamE vs <$> reindexExprI e\nreindexExpr (LstE es) = LstE <$> mapM reindexExprI es\nreindexExpr (NamE rs) = NamE <$> mapM (\\(k, e) -> (,) k <$> reindexExprI e) rs\nreindexExpr (TupE es) = TupE <$> mapM reindexExprI es\nreindexExpr (LetE bindings body) = LetE <$> mapM (\\(v, e) -> (,) v <$> reindexExprI e) bindings <*> reindexExprI body\nreindexExpr (IfE c t e) = IfE <$> reindexExprI c <*> reindexExprI t <*> reindexExprI e\nreindexExpr (DoBlockE e) = DoBlockE <$> reindexExprI e\nreindexExpr (EvalE e) = EvalE <$> reindexExprI e\nreindexExpr (IntrinsicE intr es) = IntrinsicE intr <$> mapM reindexExprI es\nreindexExpr e = return e\n\n-- FIXME: when I add linking to line numbers, I'll need to update that map\n-- also. The trace should be recorded.\nnewIndex :: Int -> MorlocMonad Int\nnewIndex i = do\n  i' <- MM.getCounter\n  copyState i i'\n  MM.sayVVV $ \"Set indices \" <> pretty i <> \" = \" <> pretty i'\n  return i'\n"
  },
  {
    "path": "library/Morloc/Frontend/Typecheck.hs",
    "content": "{-# LANGUAGE CPP #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Frontend.Typecheck\nDescription : Bidirectional type inference and checking for general types\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nImplements bidirectional type inference over the 'AnnoS' trees produced by\n'Treeify'. Checks general (language-independent) types and resolves type\naliases. Concrete (language-specific) types are checked later after language\nsegregation in the code generator.\n-}\nmodule Morloc.Frontend.Typecheck (typecheck, resolveTypes, evaluateAnnoSTypes, peakSExpr) where\n\nimport qualified Data.IntMap.Strict as IntMap\nimport qualified Morloc.BaseTypes as BT\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Map as Map\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.TypeEval as TE\nimport Morloc.Typecheck.Internal\n\n{- | Each SAnno object in the input list represents one exported function.\nModules, scopes, imports and everything else are abstracted away.\n\nCheck the general types, do nothing to the concrete types which may only be\nsolved after segregation. Later the concrete types will need to be checked\nfor type consistency and correctness of packers.\n-}\ntypecheck ::\n  [AnnoS Int ManyPoly Int] ->\n  MorlocMonad [AnnoS (Indexed TypeU) Many Int]\ntypecheck = mapM run\n  where\n    run :: AnnoS Int ManyPoly Int -> MorlocMonad (AnnoS (Indexed TypeU) Many Int)\n    run e0 = do\n      -- standardize names for lambda bound variables (e.g., x0, x1 ...)\n      let g0 = Gamma {gammaCounter = 0, gammaSlot = 0, gammaContext = IntMap.empty, gammaExist = Map.empty, gammaSolved = Map.empty, gammaDeferred = [], gammaNatSubs = Map.empty, gammaIntVals = Map.empty}\n      (g1, _, e1) <- synthG g0 e0\n      insetSay \"-------- leaving frontend typechecker ------------------\"\n      insetSay \"g1:\"\n      seeGamma g1\n      insetSay \"========================================================\"\n      let e2 = mapAnnoSG (fmap normalizeType) . applyGen g1 $ e1\n\n      (g2, e3) <- resolveInstances g1 e2\n      let g3 = apply g2 g2\n\n      -- re-check deferred Nat constraints now that existentials are solved\n      case recheckDeferred g3 of\n        Left err -> MM.throwSystemError err\n        Right remaining ->\n          mapM_ (\\(t1, t2) ->\n            MM.sayV $ \"Warning: unresolved Nat constraint:\" <+> prettyTypeU t1 <+> \"~\" <+> prettyTypeU t2\n            ) remaining\n\n      -- perform a final application of gamma the final expression and return\n      -- (is this necessary?)\n      return (applyGen g3 e3)\n\n-- TypeU --> Type\nresolveTypes :: AnnoS (Indexed TypeU) Many Int -> AnnoS (Indexed Type) Many Int\nresolveTypes (AnnoS (Idx i t) ci e) =\n  AnnoS (Idx i (typeOf t)) ci (f e)\n  where\n    f :: ExprS (Indexed TypeU) Many Int -> ExprS (Indexed Type) Many Int\n    f (BndS x) = BndS x\n    f (LetBndS x) = LetBndS x\n    f (CallS x) = CallS x\n    f (LetS v e1 e2) = LetS v (resolveTypes e1) (resolveTypes e2)\n    f (VarS v xs) = VarS v (fmap resolveTypes xs)\n    f (ExeS exe) = ExeS exe\n    f (AppS x xs) = AppS (resolveTypes x) (map resolveTypes xs)\n    f (LamS vs x) = LamS vs (resolveTypes x)\n    f (LstS xs) = LstS (map resolveTypes xs)\n    f (TupS xs) = TupS (map resolveTypes xs)\n    f (NamS rs) = NamS (zip (map fst rs) (map (resolveTypes . snd) rs))\n    f (RealS x) = RealS x\n    f (IntS x) = IntS x\n    f (LogS x) = LogS x\n    f (StrS x) = StrS x\n    f UniS = UniS\n    f NullS = NullS\n    f (DoBlockS e') = DoBlockS (resolveTypes e')\n    f (EvalS e') = EvalS (resolveTypes e')\n    f (CoerceS c e') = CoerceS c (resolveTypes e')\n    f (IfS c t' e') = IfS (resolveTypes c) (resolveTypes t') (resolveTypes e')\n    f (IntrinsicS intr es) = IntrinsicS intr (map resolveTypes es)\n\nresolveInstances ::\n  Gamma -> AnnoS (Indexed TypeU) ManyPoly Int -> MorlocMonad (Gamma, AnnoS (Indexed TypeU) Many Int)\nresolveInstances g (AnnoS gi@(Idx genIndex gt) ci e0) = do\n  gscope <- MM.getGeneralScope genIndex\n  (g', e1) <- f gscope g e0\n  return (g', AnnoS gi ci e1)\n  where\n    f ::\n      Scope ->\n      Gamma ->\n      ExprS (Indexed TypeU) ManyPoly Int ->\n      MorlocMonad (Gamma, ExprS (Indexed TypeU) Many Int)\n\n    -- resolve instances\n    f scope g0 (VarS v (PolymorphicExpr clsName _ _ rss)) = do\n      -- find all instances that are a subtype of the inferred type\n      -- Expand aliases in gt before matching (e.g., Vector 4 Int -> List Int)\n      -- so instances for List match against Vector usage.\n      let gtEval = case TE.evaluateType scope gt of\n            Right et -> et\n            Left _ -> gt\n          emptyGamma = Gamma 0 0 IntMap.empty Map.empty Map.empty [] Map.empty Map.empty\n          isCompatible t = isSubtypeOf2 scope t gtEval\n                        || isJust (tryCoerce scope t gtEval emptyGamma)\n          rssSubtypes = [x | x@(EType t _ _ _, _) <- rss, isCompatible t]\n\n\n      -- find the most specific instance at the general level, this does not\n      -- consider a type to be more specific it is more evaluated.\n      --\n      -- So for the types below\n      --  type Stack = List\n      --  type SpecialStack = Stack\n      --\n      -- And the instances here:\n      --  instance Foo Stack where\n      --      bar :: ...\n      --  instance Foo SpecialStack where\n      --      bar :: ...\n      --\n      --  The two bar instances would be considered equally specialized\n      --\n      --  They will be separated later when concrete types are considered. From\n      --  the general perspective, the evaluate to being equal.\n      (g2, es1) <- case mostSpecific [t | (EType t _ _ _, _) <- rssSubtypes] of\n        -- if there are no suitable instances, die\n        [] ->\n          throwTypeError genIndex $\n            \"No instance found for\" <+> pretty clsName\n              <> \"::\"\n              <> pretty v\n              <> \"\\n  Are you missing a top-level type signature?\"\n        -- There may be many suitable instances from the general type level,\n        -- however, they may differ at the concrete level, so keep all for know\n        -- and let the concrete inference code sort things out later.\n        manyTypes -> do\n          -- Deduplicate alias-equivalent types: e.g., Array a and List a\n          -- are structurally different but evaluate to the same type.\n          -- Without this, N aliases cause exponential branching in recursive\n          -- resolveInstances calls.\n          let eval t = case TE.evaluateType scope t of\n                Right et -> et\n                Left _ -> t\n              deduped = nubBy (\\t1 t2 -> eval t1 == eval t2) manyTypes\n              es0 = concat [rs | (t, rs) <- rssSubtypes,\n                            any (\\d -> eval (etype t) == eval d) deduped]\n              -- When all matching instances are alias-equivalent (single\n              -- deduped entry), propagate existential solutions normally.\n              -- When multiple distinct instances exist (e.g. Integral Int\n              -- and Integral Real), don't propagate -- solving to one\n              -- would break the others.\n              singleGroup = length deduped <= 1\n          g1 <- connectInstance singleGroup g0 es0\n          return (g1, es0)\n\n      (g3, es2) <- statefulMapM resolveInstances g2 es1\n\n      return (g3, VarS v (Many es2))\n    f _ g0 (VarS v (MonomorphicExpr _ xs)) = statefulMapM resolveInstances g0 xs |>> second (VarS v . Many)\n    -- propagate\n    f _ g0 (AppS e es) = do\n      (g1, e') <- resolveInstances g0 e\n      (g2, es') <- statefulMapM resolveInstances g1 es\n      return (g2, AppS e' es')\n    f _ g0 (LamS vs e) = resolveInstances g0 e |>> second (LamS vs)\n    f _ g0 (LstS es) = statefulMapM resolveInstances g0 es |>> second LstS\n    f _ g0 (TupS es) = statefulMapM resolveInstances g0 es |>> second TupS\n    f _ g0 (NamS rs) = do\n      (g1, es') <- statefulMapM resolveInstances g0 (map snd rs)\n      return (g1, NamS (zip (map fst rs) es'))\n\n    -- let expressions\n    f _ g0 (LetBndS v) = return (g0, LetBndS v)\n    f _ g0 (LetS v e1 e2) = do\n      (g1, e1') <- resolveInstances g0 e1\n      (g2, e2') <- resolveInstances g1 e2\n      return (g2, LetS v e1' e2')\n    -- primitives\n    f _ g0 UniS = return (g0, UniS)\n    f _ g0 NullS = return (g0, NullS)\n    f _ g0 (BndS v) = return (g0, BndS v)\n    f _ g0 (CallS v) = return (g0, CallS v)\n    f _ g0 (RealS x) = return (g0, RealS x)\n    f _ g0 (IntS x) = return (g0, IntS x)\n    f _ g0 (LogS x) = return (g0, LogS x)\n    f _ g0 (StrS x) = return (g0, StrS x)\n    f _ g0 (ExeS x) = return (g0, ExeS x)\n    f _ g0 (DoBlockS e) = resolveInstances g0 e |>> second DoBlockS\n    f _ g0 (EvalS e) = resolveInstances g0 e |>> second EvalS\n    f _ g0 (CoerceS c e) = resolveInstances g0 e |>> second (CoerceS c)\n    f _ g0 (IfS c t e) = do\n      (g1, c') <- resolveInstances g0 c\n      (g2, t') <- resolveInstances g1 t\n      (g3, e') <- resolveInstances g2 e\n      return (g3, IfS c' t' e')\n    f _ g0 (IntrinsicS intr es) = do\n      (g1, es') <- statefulMapM resolveInstances g0 es\n      return (g1, IntrinsicS intr es')\n\n    -- When unique (single alias-equivalent group), propagate gamma normally.\n    -- When not unique (multiple distinct instances), skip failures and don't\n    -- propagate, since solving to one instance would break the others.\n    connectInstance :: Bool -> Gamma -> [AnnoS (Indexed TypeU) f c] -> MorlocMonad Gamma\n    connectInstance _ g0 [] = return g0\n    connectInstance singleGroup g0 (AnnoS (Idx i t) _ _ : es) = do\n      scope <- MM.getGeneralScope i\n      case subtype scope (stripCoercionWrappers gt) t g0 of\n        (Left _) -> connectInstance singleGroup g0 es\n        (Right g1)\n          | singleGroup -> connectInstance singleGroup g1 es\n          | otherwise   -> connectInstance singleGroup g0 es\n\n-- prepare a general, indexed typechecking error\nthrowTypeError :: Int -> MDoc -> MorlocMonad a\nthrowTypeError i msg = MM.throwSourcedError i (\"General type error:\" <+> msg)\n\ncheckG ::\n  Gamma ->\n  AnnoS Int ManyPoly Int ->\n  TypeU ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , AnnoS (Indexed TypeU) ManyPoly Int\n    )\ncheckG g (AnnoS i j e) t = do\n  annotation <- MM.gets stateAnnotations\n  (g', t', e') <- case Map.lookup j annotation of\n    Nothing -> checkE' i g e t\n    (Just annType) -> do\n      gAnn <- subtype' i annType t g\n      checkE' i gAnn e t\n  return (g', t', AnnoS (Idx i t') j e')\n\nsynthG ::\n  Gamma ->\n  AnnoS Int ManyPoly Int ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , AnnoS (Indexed TypeU) ManyPoly Int\n    )\nsynthG g (AnnoS gi ci e) = do\n  annotation <- MM.gets stateAnnotations\n  (g', t, e') <- case Map.lookup ci annotation of\n    Nothing -> synthE' gi g e\n    (Just annType) -> checkE' gi g e annType\n  return (g', t, AnnoS (Idx gi t) ci e')\n\nsynthE ::\n  Int ->\n  Gamma ->\n  ExprS Int ManyPoly Int ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , ExprS (Indexed TypeU) ManyPoly Int\n    )\nsynthE _ g UniS = return (g, BT.unitU, UniS)\nsynthE _ g NullS =\n  let (g1, v) = newvar \"nullType_\" g\n   in return (g1, OptionalU v, NullS)\nsynthE _ g (RealS x) = return (g, BT.realU, RealS x)\nsynthE _ g (IntS x) = return (g, BT.intU, IntS x)\nsynthE _ g (LogS x) = return (g, BT.boolU, LogS x)\nsynthE _ g (StrS x) = return (g, BT.strU, StrS x)\n-- Ensures pattern setting operations return the correct type.\n-- Without this case, patterns that change type will pass silently, but lead to\n-- corrupted data.\n-- Setter pattern lambda: (\\v -> .field v newVal) data\n-- The body applies a pattern to 2+ args (data + set values).\n-- Getters (1 arg) are NOT matched here and go through normal AppS.\nsynthE\n  _\n  g0\n  ( AppS\n      f0@( AnnoS\n            _\n            _\n            ( LamS\n                [_]\n                ( AnnoS\n                    _\n                    _\n                    ( AppS\n                        ((AnnoS _ _ (ExeS (PatCall (PatternStruct _)))))\n                        (_ : _ : _)\n                      )\n                  )\n              )\n          )\n      [x0]\n    ) = do\n    (g1, patternType, f1) <- synthG g0 f0\n    case patternType of\n      (FunU _ selectType) -> do\n        (g2, dataType, x1) <- checkG g1 x0 selectType\n        return (g2, dataType, AppS f1 [x1])\n      _ -> error \"This should be unreachable\"\n\n-- synthesize a string interpolation pattern\nsynthE i g (AppS f@(AnnoS _ _ (ExeS (PatCall (PatternText _ _)))) es) = do\n  (g1, _, f1) <- synthG g f\n  (g2, _, es1, _) <- zipCheck i g1 es (take (length es) (repeat BT.strU))\n  return (g2, BT.strU, AppS f1 es1)\n\n-- handle getter patterns\nsynthE _ _ (AppS (AnnoS _ _ (ExeS (PatCall (PatternStruct _)))) []) = error \"Unreachable application pattern to no data\"\nsynthE _ g0 (AppS (AnnoS fgidx fcidx (ExeS (PatCall (PatternStruct s)))) [e0]) = do\n  -- generate an existential type that contains the pattern\n  (g1, datType) <- selectorType g0 s\n\n  -- type returned from pattern (with one element for each extracted value)\n  retType <- return $ case selectorGetter datType s of\n    [] -> error \"Illegal empty selection\"\n    [t] -> t\n    ts -> BT.tupleU ts\n  let ft = FunU [datType] retType\n\n  -- use selector-derived type to update context and data expression\n  (g2, _, e') <- checkG g1 e0 datType\n\n  let f1 = (AnnoS (Idx fgidx ft) fcidx (ExeS (PatCall (PatternStruct s))))\n\n  return (g2, apply g2 retType, AppS f1 [e'])\n\n-- handle setter patterns\nsynthE _ g0 (AppS (AnnoS fgidx fcidx (ExeS (PatCall (PatternStruct s)))) (e0 : es0)) = do\n  (g1, (unzip -> (setTypes, es1))) <-\n    statefulMapM (\\s' e -> synthG s' e |>> (\\(a, b, c) -> (a, (b, c)))) g0 es0\n\n  -- generate an existential type that contains the pattern\n  (g2, outputType) <- selectorType g1 s |>> second (selectorSetter setTypes s)\n\n  (g3, datType, e1) <- checkG g2 e0 outputType\n\n  let patternType = apply g3 $ FunU (datType : setTypes) outputType\n      f1 = AnnoS (Idx fgidx patternType) fcidx (ExeS (PatCall (PatternStruct s)))\n\n  return (g3, apply g3 outputType, AppS f1 (e1 : es1))\nsynthE _ g (ExeS (PatCall (PatternText s ss@(length -> n)))) = do\n  let t = FunU (take n (repeat BT.strU)) BT.strU\n  return (g, t, ExeS (PatCall (PatternText s ss)))\n\n--   -->E0\nsynthE _ g (AppS f []) = do\n  (g1, t1, f1) <- synthG g f\n  return (g1, t1, AppS f1 [])\n\n--   -->E\nsynthE i g0 (AppS f xs0) = do\n  -- synthesize the type of the function\n  (g1, funType0, funExpr0) <- synthG g0 f\n\n  -- Resolve nat labels: if the function has labeled nat params (m:Int syntax)\n  -- and corresponding args are int literals, inject NatVarU solutions into gamma\n  let g1' = resolveNatLabels f funType0 xs0 g1\n\n  etaExpandSynthE i g1' funType0 funExpr0 f xs0\n\n-- -->I==>\n-- Synthesize lambda expressions. The key optimization here is to avoid\n-- re-synthesizing after eta expansion - we synthesize the body once with\n-- proper context, then construct the expanded form directly.\nsynthE parentIdx g0 (LamS vs x) = do\n  -- Create existentials for lambda-bound variables and add to context\n  let (g1, paramTypes) = statefulMap (\\g' v -> newvar (unEVar v <> \"_x\") g') g0 vs\n      g2 = g1 ++> zipWith AnnG vs paramTypes\n\n  -- Synthesize body ONCE with bound variables in context\n  (g3, bodyType, bodyExpr) <- synthG g2 x\n\n  -- Check if body returns a function (needs eta expansion)\n  let normalBody = normalizeType (apply g3 bodyType)\n  case normalBody of\n    FunU extraArgTypes retType -> do\n      -- Body returns a function: eta-expand WITHOUT re-synthesizing\n      -- Create new bound variables for the extra arguments\n      (g4, newVarsWithTypes) <-\n        statefulMapM\n          ( \\g' t -> do\n              let (g'', v) = evarname g' \"v\"\n              return (g'', (v, t))\n          )\n          g3\n          extraArgTypes\n\n      let newVars = map fst newVarsWithTypes\n          appliedExtraTypes = map (apply g4 . snd) newVarsWithTypes\n\n      -- Add type annotations for new bound variables\n      let g5 = g4 ++> zipWith AnnG newVars appliedExtraTypes\n\n      -- Create typed variable references for the new parameters\n      newVarExprs <-\n        mapM\n          ( \\(v, t) -> do\n              idx <- MM.getCounterWithPos parentIdx\n              return $ AnnoS (Idx idx t) idx (BndS v)\n          )\n          (zip newVars appliedExtraTypes)\n\n      -- Create the application of body to new variables\n      appIdx <- MM.getCounterWithPos parentIdx\n      let appliedRetType = apply g5 retType\n          appliedBodyExpr = AppS (applyGen g5 bodyExpr) newVarExprs\n          appliedBodyAnno = AnnoS (Idx appIdx appliedRetType) appIdx appliedBodyExpr\n\n      -- Construct the full function type\n      let allParamTypes = map (apply g5) paramTypes ++ appliedExtraTypes\n          fullType = FunU allParamTypes appliedRetType\n\n      return (g5, fullType, LamS (vs ++ newVars) appliedBodyAnno)\n    _ -> do\n      -- Body is not a function: just return the lambda as-is\n      let funType = apply g3 (FunU paramTypes bodyType)\n      return (g3, funType, LamS vs (applyGen g3 bodyExpr))\n\n--   List\nsynthE _ g (LstS []) =\n  let (g1, itemType) = newvar \"itemType_\" g\n      listType = BT.listU itemType\n   in return (g1, listType, LstS [])\nsynthE i g (LstS (e : es)) = do\n  (g1, itemType, itemExpr) <- synthG g e\n  (g2, listType, listExpr) <- checkE' i g1 (LstS es) (BT.listU itemType)\n  case listExpr of\n    (LstS es') -> return (g2, listType, LstS (itemExpr : es'))\n    _ -> error \"impossible\"\n\n--   Tuple\nsynthE _ g (TupS []) =\n  let t = BT.tupleU []\n   in return (g, t, TupS [])\nsynthE i g (TupS (e : es)) = do\n  -- synthesize head\n  (g1, itemType, itemExpr) <- synthG g e\n\n  -- synthesize tail\n  (g2, tupleType, tupleExpr) <- synthE' i g1 (TupS es)\n\n  -- merge the head and tail\n  t3 <- case tupleType of\n    (AppU _ ts) -> return $ BT.tupleU (apply g2 itemType : ts)\n    _ -> error \"impossible\" -- the general tuple will always be (AppU _ _)\n  xs' <- case tupleExpr of\n    (TupS xs') -> return xs'\n    _ -> error \"impossible\" -- synth does not change data constructors\n  return (g2, t3, TupS (itemExpr : xs'))\nsynthE _ g0 (NamS rs) = do\n  (g1, xs) <- statefulMapM (\\s v -> synthG s v |>> (\\(a, b, c) -> (a, (b, c)))) g0 (map snd rs)\n  let (ts, es) = unzip xs\n      ks = map fst rs\n      (g2, t) = newvarRich ([], Closed) (zip ks ts, Closed) \"record_\" g1\n      e = NamS (zip ks es)\n  return (g2, t, e)\n\n-- Any morloc variables should have been expanded by treeify. Any bound\n-- variables should be checked against. I think (this needs formalization).\nsynthE _ g0 (VarS v (MonomorphicExpr (Just t0) xs0)) = do\n  let (g1, t1) = rename g0 (etype t0)\n      g1' = g1 ++> [AnnG v t1]\n  (g2, t2, xs1) <- foldCheck g1' xs0 t1\n  let xs2 = applyCon g2 $ VarS v (MonomorphicExpr (Just t0) xs1)\n  return (g2, t2, xs2)\nsynthE _ g (VarS v (MonomorphicExpr Nothing (x : xs))) = do\n  let (g0', freshT) = newvar (unEVar v <> \"_rec\") g\n      g0'' = g0' ++> [AnnG v freshT]\n  (g', t', x') <- synthG g0'' x\n  (g'', t'', xs') <- foldCheck g' xs t'\n  let xs'' = applyCon g'' $ VarS v (MonomorphicExpr Nothing (x' : xs'))\n  return (g'', t'', xs'')\nsynthE _ g (VarS v (MonomorphicExpr Nothing [])) = do\n  let (g', t) = newvar (unEVar v <> \"_u\") g\n  return (g', t, VarS v (MonomorphicExpr Nothing []))\nsynthE i g0 (VarS v (PolymorphicExpr cls clsName t0 rs0)) = do\n  (g1, rsChecked) <- checkInstances g0 (etype t0) rs0\n  let (g2, t1) = rename g1 (etype t0)\n  return (g2, t1, VarS v (PolymorphicExpr cls clsName t0 rsChecked))\n  where\n    -- Check each instance independently. Reset gammaContext between instances\n    -- to prevent unbounded growth (each instance adds ~50-80 entries for\n    -- existentials and solved types that are not needed by subsequent checks).\n    checkInstances ::\n      Gamma ->\n      TypeU ->\n      [(EType, [AnnoS Int ManyPoly Int])] ->\n      MorlocMonad (Gamma, [(EType, [AnnoS (Indexed TypeU) ManyPoly Int])])\n    checkInstances g _ [] = return (g, [])\n    checkInstances g10 genType ((instType, es) : rs) = do\n      -- convert qualified terms in the general type to existentials\n      let (g11, genType') = toExistential g10 genType\n      -- rename the instance type\n      let (g12, instType') = rename g11 (etype instType)\n      -- subtype the renamed instance type against the existential general\n      g13 <- subtype' i instType' genType' g12\n      -- Record the slot counter AFTER subtype'. Both toExistential and subtype'\n      -- create ExistG entries that must be preserved: connectInstance (in\n      -- resolveInstances) needs them in gammaContext to solve via access1.\n      let slotAfterSubtype = gammaSlot g13\n      -- check all implementations for this instance\n      (g14, es') <- checkImplementations g13 genType' es\n\n      -- Trim context back to post-subtype state. This removes entries\n      -- from checkG (the main source of O(N^2) growth) while preserving\n      -- existentials from toExistential and subtype' needed downstream.\n      let gNext = gammaTrimAfter slotAfterSubtype g14\n\n      -- Use the ORIGINAL general type, not the existentialized one above.\n      -- Each instance gets its own existentials solved independently.\n      (g15, rs') <- checkInstances gNext genType rs\n\n      return (g15, (instType, es') : rs')\n\n    -- check each implementation within each instance\n    -- do not return modified Gamma state\n    checkImplementations ::\n      Gamma ->\n      TypeU ->\n      [AnnoS Int ManyPoly Int] ->\n      MorlocMonad (Gamma, [AnnoS (Indexed TypeU) ManyPoly Int])\n    checkImplementations g _ [] = return (g, [])\n    checkImplementations g10 t (e@(AnnoS implGi _ _) : es) = do\n      -- Temporarily remove any annotation that was propagated to this\n      -- implementation's index via copyState/reindexExprI. An annotation\n      -- like `mempty :: Str` on the usage site must not constrain checking\n      -- of each instance implementation (e.g. the List instance's `[]`).\n      implAnn <- MM.gets (Map.lookup implGi . stateAnnotations)\n      MM.modify (\\s -> s { stateAnnotations = Map.delete implGi (stateAnnotations s) })\n      (g11, _, e') <- checkG g10 e t\n      -- Restore\n      case implAnn of\n        Just ann' -> MM.modify (\\s -> s { stateAnnotations = Map.insert implGi ann' (stateAnnotations s) })\n        Nothing -> return ()\n\n      -- check all the remaining implementations\n      (g12, es') <- checkImplementations g11 t es\n\n      -- return the final context and the applied expressions\n      return (g12, applyGen g12 e' : es')\n\n-- bare selector pattern (e.g., .0 or .1 used as a function argument, not applied)\nsynthE _ g0 (ExeS (PatCall (PatternStruct s))) = do\n  (g1, datType) <- selectorType g0 s\n  retType <- return $ case selectorGetter datType s of\n    [] -> error \"Illegal empty selection\"\n    [t] -> t\n    ts -> BT.tupleU ts\n  let ft = FunU [datType] retType\n  return (g1, ft, ExeS (PatCall (PatternStruct s)))\n-- This case will only be encountered in check, the existential generated here\n-- will be subtyped against the type known from the VarS case.\nsynthE _ g (ExeS exe) = do\n  let (g', t) = newvar \"call_\" g\n  return (g', t, ExeS exe)\nsynthE _ g (BndS v) = do\n  (g', t') <- case lookupE v g of\n    (Just t) -> return (g, t)\n    Nothing -> return $ newvar (unEVar v <> \"_u\") g\n  return (g', t', BndS v)\nsynthE _ g (LetBndS v) = do\n  (g', t') <- case lookupE v g of\n    (Just t) -> return (g, t)\n    Nothing -> return $ newvar (unEVar v <> \"_u\") g\n  return (g', t', LetBndS v)\nsynthE _ g (CallS v) = do\n  (g', t') <- case lookupE v g of\n    (Just t) -> return (g, t)\n    Nothing -> return $ newvar (unEVar v <> \"_rec\") g\n  return (g', t', CallS v)\nsynthE _ g (LetS v e1 e2) = do\n  (g1, t1, e1') <- synthG g e1\n  let g2 = g1 ++> [AnnG v t1]\n      -- Track known constant values for nat label resolution\n      g2' = case tryEvalConst g2 (let AnnoS _ _ e = e1' in e) of\n        Just val' -> g2 { gammaIntVals = Map.insert v val' (gammaIntVals g2) }\n        Nothing -> g2\n  (g3, t2, e2') <- synthG g2' e2\n  return (g3, t2, LetS v e1' e2')\nsynthE i g (IfS cond thenE elseE) = do\n  (g1, condType, cond') <- synthG g cond\n  g2 <- subtype' i condType (VarU (TV \"Bool\")) g1\n  (g3, t2, thenE') <- synthG g2 thenE\n  (g4, t3, elseE') <- synthG g3 elseE\n  g5 <- subtype' i t3 t2 g4\n  return (g5, apply g5 t2, IfS cond' thenE' elseE')\nsynthE i g (DoBlockS e) = do\n  (g1, t1, e1) <- synthG g e\n  case apply g1 t1 of\n    EffectU _ iT -> do\n      -- Final expr is effectful: wrap it in EvalS so codegen forces the\n      -- thunk and collectDoEffects picks up its effects alongside the\n      -- non-final EvalS nodes.\n      e1' <- wrapFinalEvalS i iT e1\n      let collected = collectDoEffects e1'\n      return (g1, EffectU collected iT, DoBlockS e1')\n    bareT -> do\n      -- Pure final: leave body as-is; tryCoerce CoerceToEffect will lift\n      -- against an effectful expected type at the use site.\n      let collected = collectDoEffects e1\n      return (g1, EffectU collected bareT, DoBlockS e1)\nsynthE _ g (CoerceS coercion e) = do\n  (g1, t1, e1) <- synthG g e\n  return (g1, applyCoercion coercion t1, CoerceS coercion e1)\nsynthE i g (EvalS e) = do\n  (g1, t1, e1) <- synthG g e\n  let (g1', t1') = stripForallU g1 (apply g1 t1)\n  case t1' of\n    EffectU _ a -> return (g1', a, EvalS e1)\n    ExistU _ _ _ -> do\n      let (g2, bv) = tvarname g1' \"effectInner_\"\n          bt = ExistU bv ([], Open) ([], Open)\n          (g2b, ev) = tvarname g2 \"effectVar_\"\n          thunkT = EffectU (EffectVar ev) bt\n      g3 <- subtype' i (apply g2b t1') thunkT g2b\n      return (g3, apply g3 bt, EvalS (applyGen g3 e1))\n    t -> throwTypeError i $\n      \"Cannot force a non-effectful value (got type\" <+> pretty t <> \").\"\n      <+> \"The ! operator and non-final do-block statements require an effectful type <E> T.\"\n      <+> \"Pure expressions in a do-block should be bound via 'let' or moved to the final position.\"\nsynthE i g (IntrinsicS intr args) = do\n  (g', argTypes, args') <- synthArgs g args\n  g'' <- checkIntrinsicArgs i g' intr argTypes\n  let (g''', expectedType) = intrinsicTypeG g'' intr\n  return (g''', expectedType, IntrinsicS intr args')\n\n-- | Strip ForallU wrappers by instantiating bound variables as existentials.\n-- Follows the same pattern as `application` for ForallU.\nstripForallU :: Gamma -> TypeU -> (Gamma, TypeU)\nstripForallU g (ForallU v t) = stripForallU (g +> v) (substitute v t)\nstripForallU g t = (g, t)\n\n-- | Return type of a fully applied intrinsic, threading Gamma for fresh existentials\nintrinsicTypeG :: Gamma -> Intrinsic -> (Gamma, TypeU)\nintrinsicTypeG g IntrLoad =\n  let (g', loadType) = newvar \"load_\" g\n  in (g', EffectU ioEffectSet (OptionalU loadType))\nintrinsicTypeG g IntrRead =\n  let (g', readType) = newvar \"read_\" g\n  in (g', OptionalU readType)\nintrinsicTypeG g intr = (g, intrinsicType intr)\n\n-- | Return type of a fully applied intrinsic (for intrinsics without fresh vars)\nintrinsicType :: Intrinsic -> TypeU\nintrinsicType IntrSave = EffectU ioEffectSet BT.unitU\nintrinsicType IntrSaveM = EffectU ioEffectSet BT.unitU\nintrinsicType IntrSaveJ = EffectU ioEffectSet BT.unitU\nintrinsicType IntrLoad = EffectU ioEffectSet (OptionalU (ExistU (TV \"load_a\") ([], Open) ([], Open)))\nintrinsicType IntrHash = BT.strU\nintrinsicType IntrVersion = BT.strU\nintrinsicType IntrCompiled = BT.strU\nintrinsicType IntrLang = BT.strU\nintrinsicType IntrSchema = BT.strU\nintrinsicType IntrTypeof = BT.strU\nintrinsicType IntrShow = BT.strU\nintrinsicType IntrRead = OptionalU (ExistU (TV \"read_a\") ([], Open) ([], Open))\nintrinsicType IntrDatafile = BT.strU\n\n-- intrinsicArity is defined in Morloc.Namespace.Expr\n\n-- | Synthesize types for a list of arguments\nsynthArgs ::\n  Gamma ->\n  [AnnoS Int ManyPoly Int] ->\n  MorlocMonad (Gamma, [TypeU], [AnnoS (Indexed TypeU) ManyPoly Int])\nsynthArgs g [] = return (g, [], [])\nsynthArgs g (a:as) = do\n  (g1, t1, a') <- synthG g a\n  (g2, ts, as') <- synthArgs g1 as\n  return (g2, t1:ts, a':as')\n\n-- | Check intrinsic argument count and types\ncheckIntrinsicArgs ::\n  Int -> Gamma -> Intrinsic -> [TypeU] -> MorlocMonad Gamma\ncheckIntrinsicArgs i g intr argTypes = do\n  let expected = intrinsicArity intr\n      actual = length argTypes\n  if actual /= expected\n    then throwTypeError i $\n      \"@\" <> pretty (intrinsicName intr) <+> \"expects\" <+> pretty expected\n        <+> \"arguments but got\" <+> pretty actual\n    else do\n      -- Check specific argument types\n      case (intr, argTypes) of\n        -- @save/@savem/@savej: a -> Str -> {()}\n        (IntrSave, [_, pathT]) -> subtype' i pathT BT.strU g\n        (IntrSaveM, [_, pathT]) -> subtype' i pathT BT.strU g\n        (IntrSaveJ, [_, pathT]) -> subtype' i pathT BT.strU g\n        -- @load: Str -> {?a}\n        (IntrLoad, [pathT]) -> subtype' i pathT BT.strU g\n        -- @hash: a -> Str\n        (IntrHash, [_]) -> return g\n        -- @schema/@typeof: a -> Str (value ignored at runtime)\n        (IntrSchema, [_]) -> return g\n        (IntrTypeof, [_]) -> return g\n        -- @show: a -> Str (any type accepted)\n        (IntrShow, [_]) -> return g\n        -- @read: Str -> ?a (arg must be Str)\n        (IntrRead, [strT]) -> subtype' i strT BT.strU g\n        -- @datafile: Str -> Str (path must be string literal)\n        (IntrDatafile, [pathT]) -> subtype' i pathT BT.strU g\n        -- compile-time constants: no args\n        (IntrVersion, []) -> return g\n        (IntrCompiled, []) -> return g\n        (IntrLang, []) -> return g\n        _ -> return g\n\netaExpandSynthE ::\n  Int ->\n  Gamma ->\n  TypeU ->\n  AnnoS (Indexed TypeU) ManyPoly Int ->\n  AnnoS Int ManyPoly Int ->\n  [AnnoS Int ManyPoly Int] ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , ExprS (Indexed TypeU) ManyPoly Int\n    )\netaExpandSynthE i g1 funType0 funExpr0 _f xs0 = do\n  let normalType = normalizeType funType0\n      numArgs = length xs0\n\n  -- Check for arity errors before proceeding\n  case normalType of\n    FunU (length -> numParams) _\n      | numArgs > numParams ->\n          throwTypeError i $ \"Invalid function application of type:\\n  \" <> prettyTypeU funType0\n    _ -> return ()\n\n  -- Process available args through application (no re-synthesis)\n  (g2, funType1, inputExprs) <- application' i g1 xs0 normalType\n\n  MM.sayVVV $\n    \"  funType1:\" <+> pretty funType1\n      <> \"\\n  inputExprs:\" <+> list (map pretty inputExprs)\n\n  case funType1 of\n    FunU ts t -> case drop numArgs ts of\n      -- full application\n      [] -> return (g2, apply g2 t, AppS funExpr0 inputExprs)\n      -- partial application: eta-expand without re-synthesis\n      remainingParams -> do\n        (g3, newVarsWithTypes) <-\n          statefulMapM\n            ( \\g' tp -> do\n                let (g'', v) = evarname g' \"v\"\n                return (g'', (v, apply g2 tp))\n            )\n            g2\n            remainingParams\n\n        let newVars = map fst newVarsWithTypes\n            newTypes = map snd newVarsWithTypes\n            g4 = g3 ++> zipWith AnnG newVars newTypes\n\n        -- Create typed variable references for the new params\n        newVarExprs <-\n          mapM\n            ( \\(v, tp) -> do\n                idx <- MM.getCounterWithPos i\n                return $ AnnoS (Idx idx tp) idx (BndS v)\n            )\n            newVarsWithTypes\n\n        -- Build the application and lambda directly\n        appIdx <- MM.getCounterWithPos i\n        let retType = apply g4 t\n            bodyExpr = AppS funExpr0 (inputExprs ++ newVarExprs)\n            bodyAnno = AnnoS (Idx appIdx retType) appIdx bodyExpr\n            fullType = FunU newTypes retType\n        return (g4, fullType, LamS newVars bodyAnno)\n    _ -> error \"impossible\"\n\nexpand :: Int -> Int -> Gamma -> ExprS Int f Int -> MorlocMonad (Gamma, ExprS Int f Int)\nexpand _ 0 g x = return (g, x)\nexpand parentIdx n g e@(AppS _ _) = do\n  newIndex <- MM.getCounterWithPos parentIdx\n  let (g', v') = evarname g \"v\"\n  e' <- applyExistential parentIdx v' e\n  let x' = LamS [v'] (AnnoS newIndex newIndex e')\n  expand parentIdx (n - 1) g' x'\nexpand parentIdx n g (LamS vs' (AnnoS t ci e)) = do\n  let (g', v') = evarname g \"v\"\n  e' <- applyExistential parentIdx v' e\n  expand parentIdx (n - 1) g' (LamS (vs' <> [v']) (AnnoS t ci e'))\nexpand _ _ g x = return (g, x)\n\napplyExistential :: Int -> EVar -> ExprS Int f Int -> MorlocMonad (ExprS Int f Int)\napplyExistential parentIdx v' (AppS f xs') = do\n  newIndex <- MM.getCounterWithPos parentIdx\n  return $ AppS f (xs' <> [AnnoS newIndex newIndex (BndS v')])\n-- possibly illegal application, will type check after expansion\napplyExistential parentIdx v' e = do\n  appIndex <- MM.getCounterWithPos parentIdx\n  varIndex <- MM.getCounterWithPos parentIdx\n  return $ AppS (AnnoS appIndex appIndex e) [AnnoS varIndex varIndex (BndS v')]\n\napplication ::\n  Int ->\n  Gamma ->\n  [AnnoS Int ManyPoly Int] -> -- the expressions that are passed to the function\n  TypeU -> -- the function type\n  MorlocMonad\n    ( Gamma\n    , TypeU -- output function type\n    , [AnnoS (Indexed TypeU) ManyPoly Int] -- @e@, with type annotation\n    )\n--  g1 |- e <= A -| g2\n-- ----------------------------------------- -->App\n--  g1 |- A->C o e =>> C -| g2\napplication i g0 es0 (FunU as0 b0) = do\n  (g1, as1, es1, remainder) <- zipCheck i g0 es0 as0\n  let funType = apply g1 $ FunU (as1 <> remainder) b0\n  insetSay $ \"remainder:\" <+> vsep (map pretty remainder)\n  return (g1, funType, es1)\n\n--  g1,Ea |- [Ea/a]A o e =>> C -| g2\n-- ----------------------------------------- Forall App\n--  g1 |- Forall x.A o e =>> C -| g2\napplication i g0 es (ForallU v s) = application' i (g0 +> v) es (substitute v s)\n--  g1[Ea2, Ea1, Ea=Ea1->Ea2] |- e <= Ea1 -| g2\n-- ----------------------------------------- EaApp\n--  g1[Ea] |- Ea o e =>> Ea2 -| g2\napplication i g0 es (ExistU v@(TV s) ([], _) _) =\n  case access1 v g0 of\n    -- replace <t0> with <t0>:<ea1> -> <ea2>\n    Just _ -> do\n      let (g1, veas) = statefulMap (\\g _ -> tvarname g \"a_\") g0 es\n          (g2, vea) = tvarname g1 (s <> \"o_\")\n          eas = [ExistU v' ([], Open) ([], Open) | v' <- veas]\n          ea = ExistU vea ([], Open) ([], Open)\n          f = FunU eas ea\n      g3 <- case solveExistWith v f (map index eas ++ [index ea]) g2 of\n        Left err -> throwTypeError i err\n        Right Nothing -> return g2\n        Right (Just g') -> return g'\n      (g4, _, es', _) <- zipCheck i g3 es eas\n      return (g4, apply g4 f, es')\n    -- if the variable has already been solved, use solved value\n    Nothing -> case lookupU v g0 of\n      (Just (FunU ts t)) -> do\n        (g1, ts', es', _) <- zipCheck i g0 es ts\n        return (g1, apply g1 (FunU ts' t), es')\n      (Just t) -> throwTypeError i $ \"Application of term with non-functional type:\\n   \" <+> prettyTypeU t\n      Nothing -> throwTypeError i $ \"Expected function, but could not find type of term\\n   \" <+> pretty v\napplication i _ _ t =\n  throwTypeError i $\n    \"Application of non-functional expression of type:\" <+> prettyTypeU t\n\n-- Tip together the arguments passed to an application\nzipCheck ::\n  Int ->\n  Gamma ->\n  [AnnoS Int ManyPoly Int] ->\n  [TypeU] ->\n  MorlocMonad\n    ( Gamma\n    , [TypeU]\n    , [AnnoS (Indexed TypeU) ManyPoly Int]\n    , [TypeU] -- remainder\n    )\n-- check the first elements, cdr down the remaining values\nzipCheck i g0 (x0 : xs0) (t0 : ts0) = do\n  (g1, t1, x1) <- checkG g0 x0 t0\n  (g2, ts1, xs1, remainder) <- zipCheck i g1 xs0 ts0\n  return (g2, t1 : ts1, x1 : xs1, remainder)\n-- If there are fewer arguments than types, this may be OK, just partial application\nzipCheck _ g0 [] ts = return (g0, [], [], ts)\n-- If there are fewer types than arguments, then die\nzipCheck i _ _ [] = MM.throwSourcedError i \"Compiler bug (__FILE__:__LINE__): too many arguments in zipCheck\"\n\nfoldCheck ::\n  Gamma ->\n  [AnnoS Int ManyPoly Int] ->\n  TypeU ->\n  MorlocMonad (Gamma, TypeU, [AnnoS (Indexed TypeU) ManyPoly Int])\nfoldCheck g [] t = return (g, t, [])\nfoldCheck g (x : xs) t = do\n  (g', t', x') <- checkG g x t\n  (g'', t'', xs') <- foldCheck g' xs t'\n  return (g'', t'', x' : xs')\n\ncheckE ::\n  Int ->\n  Gamma ->\n  ExprS Int ManyPoly Int ->\n  TypeU ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , ExprS (Indexed TypeU) ManyPoly Int\n    )\ncheckE i g1 (LstS (e : es)) (AppU v [t]) = do\n  (g2, t2, e') <- checkG g1 e t\n  -- LstS [] will go to the normal Sub case\n  (g3, t3, LstS es') <- checkE' i g2 (LstS es) (AppU v [t2])\n  return (g3, t3, LstS (map (applyGen g3) (e' : es')))\ncheckE i g0 e0@(LamS vs body) t@(FunU as b)\n  | length vs == length as = do\n      let g1 = g0 ++> zipWith AnnG vs as\n      (g2, t2, e2) <- checkG g1 body b\n\n      let t3 = apply g2 (FunU as t2)\n          e3 = applyCon g2 (LamS vs e2)\n\n      return (g2, t3, e3)\n  | otherwise = do\n      (g', e') <- expand i (length as - length vs) g0 e0\n      checkE' i g' e' t\ncheckE i g1 e1 (ForallU v a) = do\n  checkE' i (g1 +> v) e1 (substitute v a)\ncheckE i g (IfS cond thenE elseE) t = do\n  (g1, condType, cond') <- synthG g cond\n  g2 <- subtype' i condType (VarU (TV \"Bool\")) g1\n  (g3, t2, thenE') <- checkG g2 thenE t\n  (g4, _, elseE') <- checkG g3 elseE (apply g3 t2)\n  return (g4, apply g4 t2, IfS cond' thenE' elseE')\n-- DoBlockS falls through to the general synth+subtype/coerce case (below).\n-- synthE DoBlockS produces a flattened EffectU, and subtype handles effectful\n-- finals via <E1> T <: <E2> T, while tryCoerce handles pure-final auto-lift.\ncheckE i g (EvalS e) t = do\n  -- Synthesize first to get concrete EffectSet in annotations,\n  -- then check the inner type against the expected type.\n  -- This avoids creating an EffectVar that is never solved.\n  (g1, t1, e1) <- synthG g e\n  let (g1', t1') = stripForallU g1 (apply g1 t1)\n  case t1' of\n    EffectU _ a -> do\n      g2 <- subtype' i a t g1'\n      return (g2, apply g2 t, EvalS e1)\n    ExistU _ _ _ -> do\n      let (g2, bv) = tvarname g1' \"effectInner_\"\n          bt = ExistU bv ([], Open) ([], Open)\n          (g2b, ev) = tvarname g2 \"effectVar_\"\n          thunkT = EffectU (EffectVar ev) bt\n      g3 <- subtype' i (apply g2b t1') thunkT g2b\n      g4 <- subtype' i (apply g3 bt) t g3\n      return (g4, apply g4 t, EvalS (applyGen g4 e1))\n    t' -> throwTypeError i $\n      \"Cannot force a non-effectful value (got type\" <+> pretty t' <> \").\"\n      <+> \"The ! operator and non-final do-block statements require an effectful type <E> T.\"\n      <+> \"Pure expressions in a do-block should be bound via 'let' or moved to the final position.\"\n\n-- Resolve solved existentials so specific handlers (LstS, TupS, etc.) can match\ncheckE i g e t@(ExistU v _ _)\n  | Just _ <- lookupU v g\n  = checkE' i g e (apply g t)\n--   Sub (with coercion fallback)\ncheckE i g1 e1 b = do\n  (g2, a, e2) <- synthE' i g1 e1\n  let a' = apply g2 a\n      b' = apply g2 b\n  scope <- MM.getGeneralScope i\n  case subtype scope a' b' g2 of\n    Right g3 -> return (g3, apply g3 b', e2)\n    Left err ->\n      case tryCoerce scope a' b' g2 of\n        Just (coercions, g3) -> do\n          (finalExpr, _) <- foldlM\n            (\\(expr, currentType) coercion -> do\n              idx <- MM.getCounterWithPos i\n              let wrappedAnno = AnnoS (Idx idx currentType) i expr\n              return (CoerceS coercion wrappedAnno, applyCoercion coercion currentType))\n            (e2, apply g3 a')\n            coercions\n          return (g3, apply g3 b', finalExpr)\n        Nothing -> MM.throwSourcedError i $\n          \"Type mismatch:\"\n          <> line <> \"  expected: \" <> prettyTypeU b'\n          <> line <> \"  inferred: \" <> prettyTypeU a'\n          <> line <> err\n\nsubtype' :: Int -> TypeU -> TypeU -> Gamma -> MorlocMonad Gamma\nsubtype' i a b g = do\n  scope <- MM.getGeneralScope i\n  insetSay $ parens (pretty a) <+> \"<:\" <+> parens (pretty b)\n  case subtype scope a b g of\n    (Left err') -> MM.throwSourcedError i err'\n    (Right g') -> do\n      let newDeferred = drop (length (gammaDeferred g)) (gammaDeferred g')\n      mapM_ (\\(t1, t2) ->\n        MM.sayV $ \"Warning: deferred Nat constraint:\" <+> prettyTypeU t1 <+> \"~\" <+> prettyTypeU t2\n        ) newDeferred\n      return g'\n\n-- | Try to find a coercion chain from type a to type b.\n-- Returns a list of coercions (inside-out) and the resulting gamma.\n-- Recursion terminates when the target is not OptionalU.\ntryCoerce :: Scope -> TypeU -> TypeU -> Gamma -> Maybe ([Coercion], Gamma)\ntryCoerce scope a (OptionalU b) g =\n  case subtype scope a b g of\n    Right g' -> Just ([CoerceToOptional], g')\n    Left _ -> case tryCoerce scope a b g of\n      Just (cs, g') -> Just (CoerceToOptional : cs, g')\n      Nothing -> Nothing\n-- Coerce a pure value to an effectful type: a -> <E> a\ntryCoerce scope a (EffectU effs b) g =\n  case subtype scope a b g of\n    Right g' -> Just ([CoerceToEffect (resolveEffectSet effs)], g')\n    Left _ -> case tryCoerce scope a b g of\n      Just (cs, g') -> Just (CoerceToEffect (resolveEffectSet effs) : cs, g')\n      Nothing -> Nothing\ntryCoerce _ _ _ _ = Nothing\n\n-- | Strip OptionalU wrappers that result from coercion.\n-- Used in instance resolution to match the underlying type.\nstripCoercionWrappers :: TypeU -> TypeU\nstripCoercionWrappers (OptionalU t) = stripCoercionWrappers t\nstripCoercionWrappers t = t\n\n-- | Wrap the innermost (final) expression of a do-block body in EvalS.\n-- Walks past LetS wrappers (introduced by desugarDo) and updates their type\n-- annotations to the unwrapped inner type. A fresh index is allocated for\n-- the new EvalS node so indexing metadata stays unique.\nwrapFinalEvalS\n  :: Int\n  -> TypeU\n  -> AnnoS (Indexed TypeU) ManyPoly Int\n  -> MorlocMonad (AnnoS (Indexed TypeU) ManyPoly Int)\nwrapFinalEvalS i iT (AnnoS (Idx gi _) ci (LetS v e1 e2)) = do\n  e2' <- wrapFinalEvalS i iT e2\n  return (AnnoS (Idx gi iT) ci (LetS v e1 e2'))\nwrapFinalEvalS i iT final = do\n  newIdx <- MM.getCounterWithPos i\n  return (AnnoS (Idx newIdx iT) (annoSCtx final) (EvalS final))\n  where\n    annoSCtx (AnnoS _ c _) = c\n\n-- | Collect effect labels from all EvalS nodes within a do-block body.\n-- Deeply traverses the full expression tree to find nested EvalS nodes\n-- (e.g., inside tuples, applications, let bindings).\ncollectDoEffects :: AnnoS (Indexed TypeU) f c -> EffectSet\ncollectDoEffects = go\n  where\n    go (AnnoS _ _ expr) = case expr of\n      EvalS e -> effectOfAnno e `effUnion` go e\n      LetS _ e1 e2 -> go e1 `effUnion` go e2\n      AppS f args -> unions (go f : map go args)\n      TupS es -> unions (map go es)\n      LstS es -> unions (map go es)\n      NamS rs -> unions (map (go . snd) rs)\n      LamS _ e -> go e\n      IfS c t e -> go c `effUnion` go t `effUnion` go e\n      DoBlockS e -> go e\n      CoerceS _ e -> go e\n      IntrinsicS _ es -> unions (map go es)\n      _ -> emptyEffectSet\n\n    effectOfAnno (AnnoS (Idx _ (EffectU effs _)) _ _) = effs\n    effectOfAnno _ = emptyEffectSet\n\n    unions = foldl effUnion emptyEffectSet\n\n    effUnion a b\n      | a == emptyEffectSet = b\n      | b == emptyEffectSet = a\n      | a == b = a\n      | otherwise = EffectUnion a b\n\n-- helpers\n\n-- apply context to a AnnoS\napplyGen ::\n  (Functor gf, Traversable f, Applicable g) =>\n  Gamma ->\n  AnnoS (gf g) f c ->\n  AnnoS (gf g) f c\napplyGen g = mapAnnoSG (fmap (apply g))\n\napplyCon ::\n  (Functor gf, Traversable f, Applicable g) =>\n  Gamma ->\n  ExprS (gf g) f c ->\n  ExprS (gf g) f c\napplyCon g = mapExprSG (fmap (apply g))\n\nevaluateAnnoSTypes ::\n  (Traversable f) => AnnoS (Indexed TypeU) f Int -> MorlocMonad (AnnoS (Indexed TypeU) f Int)\nevaluateAnnoSTypes = mapAnnoSGM resolve\n  where\n    resolve :: Indexed TypeU -> MorlocMonad (Indexed TypeU)\n    resolve (Idx m t) = do\n      scope <- getScope m\n      case TE.evaluateType scope t of\n        (Left (SystemError e)) -> MM.throwSourcedError m e\n        (Left e) -> MM.throwError e\n        (Right tu) -> return (Idx m tu)\n\n    getScope :: Int -> MorlocMonad Scope\n    getScope i = do\n      globalMap <- MM.gets stateGeneralTypedefs\n      case GMap.lookup i globalMap of\n        GMapNoFst -> return Map.empty\n        GMapNoSnd -> return Map.empty\n        GMapJust scope -> return scope\n\n---- debugging\n\nsynthE' ::\n  Int ->\n  Gamma ->\n  ExprS Int ManyPoly Int ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , ExprS (Indexed TypeU) ManyPoly Int\n    )\nsynthE' i g x = do\n  enter \"synthE\"\n  insetSay $ \"synthesize type for: \" <> peakSExpr x\n  r@(g', t, _) <- synthE i g x\n  leave \"synthE\"\n  seeGamma g'\n  insetSay $ \"synthesized type = \" <> pretty t\n  return r\n\ncheckE' ::\n  Int ->\n  Gamma ->\n  ExprS Int ManyPoly Int ->\n  TypeU ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , ExprS (Indexed TypeU) ManyPoly Int\n    )\ncheckE' i g x t = do\n  enter \"checkE\"\n  insetSay $ \"check if expr: \" <> peakSExpr x\n  insetSay $ \"matches type: \" <> pretty t\n  r@(g', t', _) <- checkE i g x t\n  leave \"checkE\"\n  seeGamma g'\n  seeType t'\n  return r\n\napplication' ::\n  Int ->\n  Gamma ->\n  [AnnoS Int ManyPoly Int] ->\n  TypeU ->\n  MorlocMonad\n    ( Gamma\n    , TypeU\n    , [AnnoS (Indexed TypeU) ManyPoly Int]\n    )\napplication' i g es t = do\n  enter \"application\"\n  seeType t\n  insetSay $ \"es:\" <+> list [peakSExpr e | (AnnoS _ _ e) <- es]\n  r@(g', t', _) <- application i g es t\n  leave \"application\"\n  seeGamma g'\n  seeType t'\n  return r\n\n-- | Try to reduce an expression to a compile-time constant. Handles:\n--   - Integer literals\n--   - Tuple literals (recursively)\n--   - Let-bound and lambda-bound variable references (via gammaIntVals)\n--   - Let expressions (with local constant propagation)\n--   - Index accessors on tuples: .0 (5, 6, 7) => 5\n-- Returns Nothing for anything involving foreign function calls,\n-- non-constant variables, or unsupported expression forms.\ntryEvalConst :: Gamma -> ExprS g f c -> Maybe ConstVal\ntryEvalConst _ (IntS n) = Just (ConstInt n)\ntryEvalConst g (LetBndS v) = Map.lookup v (gammaIntVals g)\ntryEvalConst g (BndS v) = Map.lookup v (gammaIntVals g)\ntryEvalConst g (TupS es) = ConstTup <$> mapM (\\(AnnoS _ _ e) -> tryEvalConst g e) es\ntryEvalConst g (LetS v (AnnoS _ _ e1) (AnnoS _ _ e2)) = do\n  val' <- tryEvalConst g e1\n  tryEvalConst (g { gammaIntVals = Map.insert v val' (gammaIntVals g) }) e2\n-- Index accessor on tuple literal or known tuple: .i (a, b, c) => element i\ntryEvalConst g (AppS (AnnoS _ _ (ExeS (PatCall (PatternStruct\n  (SelectorIdx (idx, SelectorEnd) []))))) [AnnoS _ _ inner])\n  = case tryEvalConst g inner of\n      Just (ConstTup vs) | idx >= 0, idx < length vs -> Just (vs !! idx)\n      _ -> Nothing\n-- Lambda application: (\\x -> body) arg => beta-reduce\ntryEvalConst g (AppS (AnnoS _ _ (LamS vs (AnnoS _ _ body))) args)\n  | length vs == length args = do\n    vals <- mapM (\\(AnnoS _ _ e) -> tryEvalConst g e) args\n    let g' = g { gammaIntVals = foldl (\\m (v', val') -> Map.insert v' val' m) (gammaIntVals g) (zip vs vals) }\n    tryEvalConst g' body\ntryEvalConst _ _ = Nothing\n\n-- | Try to reduce an expression to an integer constant.\ntryEvalInt :: Gamma -> ExprS g f c -> Maybe Integer\ntryEvalInt g e = case tryEvalConst g e of\n  Just (ConstInt n) -> Just n\n  _ -> Nothing\n\ntryExtractIntPre :: Gamma -> AnnoS Int ManyPoly Int -> Maybe Integer\ntryExtractIntPre g (AnnoS _ _ e) = tryEvalInt g e\n\n-- | Resolve nat labels from int literal arguments.\n-- When a function has labeled nat params (e.g., m:Int -> Tensor1 m Real)\n-- and the corresponding arguments are int literals or let-bound ints,\n-- inject NatVarU solutions into gamma so the return type gets concrete dimensions.\nresolveNatLabels ::\n  AnnoS Int ManyPoly Int ->  -- the function expression (pre-synthesis)\n  TypeU ->                    -- the synthesized (renamed) function type\n  [AnnoS Int ManyPoly Int] -> -- the arguments\n  Gamma -> Gamma\nresolveNatLabels (AnnoS _ _ (VarS _ (MonomorphicExpr (Just et) _))) funType args g\n  | not (Map.null labels) =\n    let origNvs = nub (collectNatVarNames (etype et))\n        renamedNvs = nub (collectNatVarNames funType)\n        renMap = Map.fromList (zip origNvs renamedNvs)\n        solutions = Map.fromList\n          [ (renamedVar, NatLitU n)\n          | (origVar, argIdx) <- Map.toList labels\n          , Just renamedVar <- [Map.lookup origVar renMap]\n          , argIdx < length args\n          , Just n <- [tryExtractIntPre g (args !! argIdx)]\n          ]\n    in g { gammaNatSubs = Map.union solutions (gammaNatSubs g) }\n  where\n    labels = enatLabels et\nresolveNatLabels _ _ _ g = g\n\npeakSExpr :: ExprS Int ManyPoly Int -> MDoc\npeakSExpr UniS = \"UniS\"\npeakSExpr NullS = \"NullS\"\npeakSExpr (VarS v (MonomorphicExpr mayT _)) = \"VarS\" <+> pretty v <+> \"::\" <+> maybe \"?\" pretty mayT\npeakSExpr (VarS v (PolymorphicExpr cls _ t _)) = \"VarS\" <+> pretty cls <+> \" => \" <+> pretty v <+> \"::\" <+> pretty t\npeakSExpr (BndS v) = \"BndS\" <+> pretty v\npeakSExpr (AppS _ xs) = \"AppS\" <+> \"nargs=\" <> pretty (length xs)\npeakSExpr (LamS vs _) = \"LamS\" <> tupled (map pretty vs)\npeakSExpr (LstS xs) = \"LstS\" <> \"n=\" <> pretty (length xs)\npeakSExpr (TupS xs) = \"TupS\" <> \"n=\" <> pretty (length xs)\npeakSExpr (NamS rs) = \"NamS\" <> encloseSep \"{\" \"}\" \",\" (map (pretty . fst) rs)\npeakSExpr (RealS x) = \"RealS\" <+> viaShow x\npeakSExpr (IntS x) = \"IntS\" <+> pretty x\npeakSExpr (LogS x) = \"LogS\" <+> pretty x\npeakSExpr (StrS x) = \"StrS\" <+> pretty x\npeakSExpr (ExeS exe) = \"ExeS\" <+> pretty exe\npeakSExpr (LetS v _ _) = \"LetS\" <+> pretty v\npeakSExpr (LetBndS v) = \"LetBndS\" <+> pretty v\npeakSExpr (CallS v) = \"CallS\" <+> pretty v\npeakSExpr (DoBlockS _) = \"DoBlockS\"\npeakSExpr (EvalS _) = \"EvalS\"\npeakSExpr (CoerceS _ _) = \"CoerceS\"\npeakSExpr (IfS _ _ _) = \"IfS\"\npeakSExpr (IntrinsicS intr _) = \"@\" <> pretty (intrinsicName intr)\n"
  },
  {
    "path": "library/Morloc/Frontend/Valuecheck.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Frontend.Valuecheck\nDescription : Detect contradictions between alternative implementations\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nWhen a term has multiple implementations (e.g. a declaration and a source,\nor multiple sources), this module checks that they are not provably\ncontradictory. For instance, two constant expressions that return different\nliteral values for the same type are flagged as errors.\n-}\nmodule Morloc.Frontend.Valuecheck (valuecheck, checkPair) where\n\nimport qualified Data.Set as Set\nimport qualified Data.Text as DT\nimport Morloc.Data.Doc\nimport Morloc.Frontend.Namespace\nimport qualified Morloc.Monad as MM\n\n-- Convert AnnoS objects to a simple intermediate type\ntoE :: AnnoS (Indexed Type) Many Int -> E\ntoE (AnnoS g _ UniS) = LitP g MUni\ntoE (AnnoS g _ NullS) = LitP g MNull\ntoE (AnnoS g _ (BndS v)) = BndP g v\ntoE (AnnoS g _ (VarS v (Many es))) = VarP g v (map toE es)\ntoE (AnnoS g _ (AppS e es)) = AppP g (toE e) (map toE es)\ntoE (AnnoS g _ (LamS vs e)) = LamP g vs (toE e)\ntoE (AnnoS g _ (LstS es)) = LstP g (map toE es)\ntoE (AnnoS g _ (TupS es)) = TupP g (map toE es)\ntoE (AnnoS g _ (NamS rs)) = NamP g (map (second toE) rs)\ntoE (AnnoS g _ (RealS x)) = LitP g (MNum x)\ntoE (AnnoS g _ (IntS x)) = LitP g (MInt x)\ntoE (AnnoS g _ (LogS x)) = LitP g (MLog x)\ntoE (AnnoS g _ (StrS x)) = LitP g (MStr x)\ntoE (AnnoS g _ (ExeS (SrcCall s))) = SrcP g s\ntoE (AnnoS g _ (ExeS (PatCall (PatternText s ss)))) =\n  LitP g (MStr (s <> DT.concat [\"#{}\" <> s' | s' <- ss]))\ntoE (AnnoS g _ (ExeS (PatCall (PatternStruct s)))) = PatP g s\ntoE (AnnoS g _ (LetBndS v)) = BndP g v\ntoE (AnnoS g _ (CallS v)) = BndP g v\ntoE (AnnoS _ _ (LetS _ _ body)) = toE body\ntoE (AnnoS g _ (IfS c t e)) = IfP g (toE c) (toE t) (toE e)\ntoE (AnnoS g _ (DoBlockS e)) = DoBlockP g (toE e)\ntoE (AnnoS g _ (EvalS e)) = EvalP g (toE e)\ntoE (AnnoS g _ (CoerceS c e)) = CoerceP c g (toE e)\ntoE (AnnoS g _ (IntrinsicS intr es)) = IntrinsicP g intr (map toE es)\n\nindexOfE :: E -> Int\nindexOfE (BndP (Idx i _) _) = i\nindexOfE (VarP (Idx i _) _ _) = i\nindexOfE (AppP (Idx i _) _ _) = i\nindexOfE (LamP (Idx i _) _ _) = i\nindexOfE (LstP (Idx i _) _) = i\nindexOfE (TupP (Idx i _) _) = i\nindexOfE (NamP (Idx i _) _) = i\nindexOfE (LitP (Idx i _) _) = i\nindexOfE (SrcP (Idx i _) _) = i\nindexOfE (PatP (Idx i _) _) = i\nindexOfE (IfP (Idx i _) _ _ _) = i\nindexOfE (DoBlockP (Idx i _) _) = i\nindexOfE (EvalP (Idx i _) _) = i\nindexOfE (CoerceP _ (Idx i _) _) = i\nindexOfE (IntrinsicP (Idx i _) _ _) = i\n\n-- Check the harmony of typed implementations.\n--\n-- A naive implementation of this functions (and mine is naive as heck) will run\n-- in exponential time in some cases. This can be avoided with a touch of\n-- memoization. But I will leave that as an exercise for my user (PR's accepted).\nvaluecheck :: AnnoS (Indexed Type) Many Int -> MorlocMonad (AnnoS (Indexed Type) Many Int)\nvaluecheck e0 = check (toE e0) >> return e0\n\n-- walk through a tree\n-- find the sets of implementations in VarS expressions\n-- compare all pairs of implementations\ncheck :: E -> MorlocMonad ()\ncheck (VarP (Idx i _) _ es) = mapM_ (uncurry (checkPair i)) (pairwise es)\n  where\n    -- find all unique pairs\n    pairwise :: [a] -> [(a, a)]\n    pairwise xs = [(xs !! i', xs !! j') | i' <- [0 .. length xs - 1], j' <- [0 .. length xs - 1], j' > i']\ncheck (AppP _ e es) = mapM_ check (e : es)\ncheck (LamP _ _ e) = check e\ncheck (LstP _ es) = mapM_ check es\ncheck (TupP _ es) = mapM_ check es\ncheck (NamP _ (map snd -> es)) = mapM_ check es\ncheck (IfP _ c t e) = mapM_ check [c, t, e]\ncheck (DoBlockP _ e) = check e\ncheck (EvalP _ e) = check e\ncheck (CoerceP _ _ e) = check e\ncheck _ = return ()\n\n-- check for contradictions in one pair of expressions\ncheckPair :: Int -> E -> E -> MorlocMonad ()\n-- These pass\n--   foo x y = (x, y)\n--   foo a b = (a, b)\n--\n-- These do not except in the special case where a=b\n--   foo x y = (x, y)\n--   foo a b = (b, a)\n--\n-- This requires unified names (see LamS case)\ncheckPair i e1@(BndP _ v1) e2@(BndP _ v2)\n  | v1 == v2 = return ()\n  | otherwise = valueError i e1 e2 \"Non-equivalent variable patterns\"\ncheckPair _ e1@(VarP g v1 es1) (VarP _ v2 es2)\n  -- Same term, so es1 and es2 must be identical\n  | v1 == v2 = check e1\n  -- If the terms are different all the instances must still be the same and the\n  -- type will be the same, so we can simply combine them.\n  | otherwise = check (VarP g v1 (es1 <> es2))\n-- evaluate all applications of lambdas\n--  case #1 remove an empty lambda\ncheckPair i (AppP _ (LamP _ [] e1) _) e2 = checkPair i e1 e2\n--  case #2 remove an empty application\ncheckPair i (AppP _ f@LamP {} []) e2 = checkPair i f e2\n--  case #3 substitute on argument into the lambda\ncheckPair i (AppP g1 (LamP g2 (v : vs) e1) (x : xs)) e2 =\n  let e1' = substituteExpr v x e1\n   in checkPair i (AppP g1 (LamP g2 vs e1') xs) e2\n--  if there is an applied lambda on the other side, reverse\ncheckPair i e1 e2@(AppP _ LamP {} _) = checkPair i e2 e1\n-- No value checking is possible between applications\n--\n-- If the function applied is not the same in both terms, we can\n-- say nothing without source analysis.\n--\n-- If the function applied is the same, we also need more information to make\n-- any decisions. For example:\n--\n-- foo x y\n-- foo y x\n--\n-- Here the applied function is the same, but the arguments are switched. But\n-- whether this causes the terms to be non-equal depends on whether the function\n-- commutes. For example, `add x y` == `add y x`.\n--\n-- In general, a function is free to map different inputs to the same\n-- output. Without further information, we can conclude nothing. So all\n-- applications must pass.\ncheckPair _ AppP {} AppP {} = return ()\ncheckPair i e1@AppP {} e2 = valueError i e1 e2 \"Cannot check beyond source boundary\"\ncheckPair i e1 e2@AppP {} = valueError i e1 e2 \"Cannot check beyond source boundary\"\n-- Not that SrcP is something sourced, not necessarily a function, it may be a\n-- constant.\ncheckPair i (SrcP (Idx _ t) src1) (SrcP _ src2) = compareForeignFunctions i t src1 src2\ncheckPair i e1@(SrcP _ _) e2 = checkPair i e2 e1\ncheckPair i e1 e2@SrcP {}\n  | isSimple e1 = valueError i e1 e2 \"Cannot compare source value to non-source expression\"\n  | otherwise = return ()\n  where\n    -- For VarP, simplicity of ANY instance indicates an error\n    isSimple (VarP _ _ es) = any isSimple es\n    -- For other expressions, only simplicity of ALL elements is error\n    isSimple (LstP _ es) = all isSimple es\n    isSimple (TupP _ es) = all isSimple es\n    isSimple (NamP _ (map snd -> es)) = all isSimple es\n    isSimple BndP {} = True\n    isSimple LitP {} = True\n    isSimple AppP {} = False\n    isSimple LamP {} = False\n    isSimple SrcP {} = False\n    isSimple PatP {} = False\n    isSimple (IfP _ _ _ _) = False\n    isSimple (DoBlockP _ e) = isSimple e\n    isSimple (EvalP _ e) = isSimple e\n    isSimple (CoerceP _ _ e) = isSimple e\n    isSimple (IntrinsicP _ _ _) = False\n\n-- reduce empty lambdas\n--\n-- -- initial\n-- \\x y -> (\\a b -> bar b a) x y\n-- \\k j -> bar k j\n--\n-- -- unify terms\n-- \\m n -> (\\a b -> bar b a) m n\n-- \\m n -> bar m n\n--\n-- -- apply if not in canonical form\n-- \\m n -> bar n m\n-- \\m n -> bar m n\n--\n-- -- compare calls\n-- bar\n-- bar -- same function is called, so their arguments must be the comparable\n--\n-- -- compare arguments, starting with first\n-- n\n-- m\ncheckPair i (LamP _ vs1 s1) (LamP _ vs2 s2) = checkPair i s1' s2'\n  where\n    used =\n      Set.unions\n        [ freeTerms s1\n        , freeTerms s2\n        , Set.fromList (vs1 <> vs2)\n        ]\n\n    -- list of original variable names\n    newvars =\n      filter\n        (\\v -> not $ Set.member v used)\n        [EV $ DT.pack (\"x\" <> show j) | j <- [(0 :: Int) ..]]\n\n    s1' = foldr (\\(v, r) s -> substituteEVar v r s) s1 (zip vs1 newvars)\n    s2' = foldr (\\(v, r) s -> substituteEVar v r s) s2 (zip vs2 newvars)\ncheckPair _ _ (LamP {}) = error \"Illegal empty lambda\"\ncheckPair _ (LamP {}) _ = error \"Illegal empty lambda\"\n-- check all container elements\n--  * their sizes must agree\n--  * their pairwise elements must agree\ncheckPair i e1@(LstP _ xs) e2@(LstP _ ys)\n  | length xs /= length ys = valueError i e1 e2 \"Containers of unequal length\"\n  | otherwise = mapM_ (uncurry (checkPair i)) (zip xs ys)\ncheckPair i (TupP _ xs) (TupP _ ys) =\n  mapM_ (uncurry (checkPair i)) (zip xs ys)\n-- check records, no assumption of order\ncheckPair _ (NamP _ []) (NamP _ _) = return ()\ncheckPair i (NamP g1 ((k, x) : rs1)) (NamP g2 rs2) =\n  case lookup k rs2 of\n    (Just y) -> checkPair i x y >> checkPair i (NamP g1 rs1) (NamP g2 rs2)\n    Nothing -> error \"Unreachable if typechecker has passed\"\n-- Primitives must be equal\ncheckPair i e1@(LitP _ x) e2@(LitP _ y)\n  | x == y = return ()\n  | otherwise =\n      valueError i e1 e2 $\n        \"Cannot equate non-equal primitives:\\n\"\n          <> \"a:\" <+> pretty x\n          <> \"b:\" <+> pretty y\n-- All other cases should fail.\n--\n-- Actually, all other cases should already have failed while typechecking.\n--\n-- It should not be possible to reach this case, should it?\ncheckPair i e1 e2 = valueError i e1 e2 \"Non-equivalent forms\"\n\n-- Currently we do not check the equivalence of sourced terms\n--\n-- This is the function to implement when we decided to start\n-- checking source code\n--\n-- This operation may be expensive (if we are doing it right)\ncompareForeignFunctions :: Int -> Type -> Source -> Source -> MorlocMonad ()\ncompareForeignFunctions _ _ _ _ = return ()\n\nvalueError :: Int -> E -> E -> MDoc -> MorlocMonad ()\nvalueError i e1 e2 msg = MM.throwUnificationError (indexOfE e1) (indexOfE e2) i (\"Error in value checker:\" <+> msg)\n\nsubstituteEVar :: EVar -> EVar -> E -> E\nsubstituteEVar oldVar newVar e0\n  | oldVar == newVar = e0\n  | otherwise = f usedVars 0 e0\n  where\n    -- list of free term variables\n    usedVars = Set.union (freeTerms e0) (Set.fromList [oldVar, newVar])\n\n    f :: Set.Set EVar -> Int -> E -> E\n    f _ _ e@(BndP g v)\n      | v == oldVar = BndP g newVar\n      | otherwise = e\n    f used idx (LamP g vs e) =\n      let (used', idx', vs', e') = relabelLam used idx vs e\n       in LamP g vs' (f used' idx' e')\n    f _ _ e@(VarP g v es)\n      | v == oldVar = VarP g newVar es\n      | otherwise = e\n    f used idx (AppP g e es) = AppP g (f used idx e) $ map (f used idx) es\n    f used idx (LstP g es) = LstP g $ map (f used idx) es\n    f used idx (TupP g es) = TupP g $ map (f used idx) es\n    f used idx (NamP g rs) = NamP g $ map (second (f used idx)) rs\n    f used idx (IfP g c t e) = IfP g (f used idx c) (f used idx t) (f used idx e)\n    f used idx (DoBlockP g e) = DoBlockP g (f used idx e)\n    f used idx (EvalP g e) = EvalP g (f used idx e)\n    f used idx (CoerceP c g e) = CoerceP c g (f used idx e)\n    f _ _ e = e\n\n    relabelLam :: Set.Set EVar -> Int -> [EVar] -> E -> (Set.Set EVar, Int, [EVar], E)\n    relabelLam used idx [] e = (used, idx, [], e)\n    relabelLam used idx (v : vs) e\n      | Set.member v used =\n          let (idx', v') = newvar used idx\n           in let (used', idx'', vs', e') = relabelLam (Set.insert v' used) idx' vs (substituteEVar v v' e)\n               in (used', idx'', v' : vs', e')\n      | otherwise =\n          let (used', idx', vs', e') = relabelLam used idx vs e\n           in (used', idx', v : vs', e')\n\n    newvar :: Set.Set EVar -> Int -> (Int, EVar)\n    newvar used i =\n      let x = EV (DT.pack $ \"x\" <> show i)\n       in if Set.member x used\n            then newvar used (i + 1)\n            else (i, x)\n\n-- Find all names in a term that are not bound under a lambda\nfreeTerms :: E -> Set.Set EVar\nfreeTerms = f Set.empty\n  where\n    f :: Set.Set EVar -> E -> Set.Set EVar\n    f boundterms (BndP _ v)\n      | Set.member v boundterms = Set.empty\n      | otherwise = Set.singleton v\n    f boundterms (VarP _ v es)\n      | Set.member v boundterms = error \"Bug found, somewhere Var and Bnd are getting mixed\"\n      | otherwise = Set.insert v . Set.unions . fmap (f boundterms) $ es\n    f boundterms (LamP _ vs e) =\n      let boundterms' = Set.union boundterms (Set.fromList vs)\n       in f boundterms' e\n    f boundterms (AppP _ e es) = Set.unions . map (f boundterms) $ (e : es)\n    f boundterms (LstP _ es) = Set.unions . map (f boundterms) $ es\n    f boundterms (TupP _ es) = Set.unions . map (f boundterms) $ es\n    f boundterms (NamP _ (map snd -> es)) = Set.unions . map (f boundterms) $ es\n    f boundterms (IfP _ c t e) = Set.unions [f boundterms c, f boundterms t, f boundterms e]\n    f boundterms (DoBlockP _ e) = f boundterms e\n    f boundterms (EvalP _ e) = f boundterms e\n    f boundterms (CoerceP _ _ e) = f boundterms e\n    f _ _ = Set.empty\n\nsubstituteExpr :: EVar -> E -> E -> E\nsubstituteExpr oldVar replacementExpr = f\n  where\n    f e@(BndP _ v)\n      | v == oldVar = replacementExpr\n      | otherwise = e\n    f e@(VarP _ v _)\n      | v == oldVar = replacementExpr\n      | otherwise = e\n    f e@(LamP g vs body)\n      | oldVar `elem` vs = e -- stop if term is shadowed\n      | otherwise = LamP g vs (f body)\n    f (AppP g e es) = AppP g (f e) (map f es)\n    f (LstP g es) = LstP g (map f es)\n    f (TupP g es) = TupP g (map f es)\n    f (NamP g rs) = NamP g (map (second f) rs)\n    f e@LitP {} = e\n    f e@SrcP {} = e\n    f e@PatP {} = e\n    f (IfP g c t e) = IfP g (f c) (f t) (f e)\n    f (DoBlockP g e) = DoBlockP g (f e)\n    f (EvalP g e) = EvalP g (f e)\n    f (CoerceP c g e) = CoerceP c g (f e)\n    f (IntrinsicP g intr es) = IntrinsicP g intr (map f es)\n"
  },
  {
    "path": "library/Morloc/Internal.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Internal\nDescription : Proto-prelude re-exporting common utilities\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nA project-wide prelude that re-exports commonly used modules (Data.Maybe,\nData.Either, Control.Monad, etc.) along with custom Bifunctor\\/Bifoldable\ntypeclasses and utility functions. Imported transitively by nearly every\nmodule via \"Morloc.Namespace.Prim\".\n\nThis module must NOT import anything from Morloc (other than Data.*) to\navoid circular dependencies, since the lexer depends on it.\n-}\nmodule Morloc.Internal\n  ( concatMapM\n  , unique\n  , duplicates\n  , module Data.Maybe\n  , module Data.Either\n  , module Data.List.Extra\n  , module Control.Monad\n  , module Control.Monad.IO.Class\n  , module Data.Monoid\n  , module Data.Traversable\n  , module Morloc.Data.Bifoldable\n  , module Morloc.Data.Bifunctor\n  , module Morloc.Data.Annotated\n  , isLower\n  , isUpper\n  , toLower\n\n    -- * Data.Foldable re-exports\n  , foldlM\n  , foldrM\n\n    -- * Tuple utilities\n  , return2\n\n    -- * Operators\n  , (|>>)\n  , (</>)\n  , (<|>)\n  , (&&&)\n  , (***)\n\n    -- * Safe re-exports\n  , module Safe\n\n    -- * Stateful mapping\n  , statefulMap\n  , statefulMapM\n  , filterApart\n\n    -- * Length-checked zips (fail on mismatched lengths, indicating compiler bugs)\n  , safeZip\n  , safeZipWith\n  , safeZipWithM\n  ) where\n\nimport Control.Applicative ((<|>))\nimport Control.Monad\nimport Control.Monad.IO.Class\nimport Data.Char (isLower, isUpper, toLower)\nimport Data.Either\nimport Data.Foldable (foldlM, foldrM)\nimport Data.List.Extra hiding (list)\nimport qualified Data.Map.Strict as Map\nimport Data.Maybe\nimport Data.Monoid\nimport qualified Data.Set as Set\nimport Data.Traversable\nimport Data.Tuple.Extra ((&&&), (***))\nimport Morloc.Data.Annotated\nimport Morloc.Data.Bifoldable\nimport Morloc.Data.Bifunctor\nimport Safe hiding (at, headDef, lastDef)\nimport System.FilePath\nimport Prelude hiding (mapM)\n\n-- | Lift a binary function into a monadic return\nreturn2 :: (Monad m) => (a -> b -> c) -> (a -> b -> m c)\nreturn2 f x y = return $ f x y\n\n-- | Concatenate the results of a monadic map\nconcatMapM :: (Monad m) => (a -> m [b]) -> [a] -> m [b]\nconcatMapM f = fmap concat . mapM f\n\n-- | Remove duplicate elements while preserving first-occurrence order\nunique :: (Ord a) => [a] -> [a]\nunique = unique' Set.empty\n  where\n    unique' _ [] = []\n    unique' set (x : xs)\n      | Set.member x set = unique' set xs\n      | otherwise = x : unique' (Set.insert x set) xs\n\n-- | Return elements that appear more than once, in first-occurrence order\nduplicates :: (Ord a) => [a] -> [a]\nduplicates xs = unique $ filter isDuplicated xs\n  where\n    countMap = Map.fromList . map (\\ks -> (head ks, length ks)) . group . sort $ xs\n    isDuplicated k = fromJust (Map.lookup k countMap) > 1\n\n-- | Map with threaded state\nstatefulMap :: (s -> a -> (s, b)) -> s -> [a] -> (s, [b])\nstatefulMap _ s [] = (s, [])\nstatefulMap f s0 (x : xs) =\n  let (s1, y) = f s0 x\n   in let (sn, ys) = statefulMap f s1 xs\n       in (sn, y : ys)\n\n-- | Monadic 'statefulMap'\nstatefulMapM :: (Monad m) => (s -> a -> m (s, b)) -> s -> [a] -> m (s, [b])\nstatefulMapM _ s [] = return (s, [])\nstatefulMapM f s (x : xs) = do\n  (s', x') <- f s x\n  (s'', xs') <- statefulMapM f s' xs\n  return (s'', x' : xs')\n\n-- | Extract the first element matching a predicate, returning it and the rest\nfilterApart :: (a -> Bool) -> [a] -> (Maybe a, [a])\nfilterApart _ [] = (Nothing, [])\nfilterApart f (x : xs)\n  | f x = (Just x, xs)\n  | otherwise = case filterApart f xs of\n      (r, xs') -> (r, x : xs')\n\n-- | Zip two lists, returning 'Nothing' if lengths differ\nsafeZip :: [a] -> [b] -> Maybe [(a, b)]\nsafeZip (x : xs) (y : ys) = (:) (x, y) <$> safeZip xs ys\nsafeZip [] [] = Just []\nsafeZip _ _ = Nothing\n\n-- | 'zipWith' returning 'Nothing' if lengths differ\nsafeZipWith :: (a -> b -> c) -> [a] -> [b] -> Maybe [c]\nsafeZipWith f xs ys\n  | length xs == length ys = Just $ zipWith f xs ys\n  | otherwise = Nothing\n\n-- | Monadic 'safeZipWith'\nsafeZipWithM :: (Monad m) => (a -> b -> m c) -> [a] -> [b] -> m (Maybe [c])\nsafeZipWithM f xs ys\n  | length xs == length ys = zipWithM f xs ys |>> Just\n  | otherwise = return Nothing\n\n-- | Piped fmap: @x |>> f == fmap f x@\ninfixl 1 |>>\n\n(|>>) :: (Functor f) => f a -> (a -> b) -> f b\n(|>>) = flip fmap\n"
  },
  {
    "path": "library/Morloc/LangRegistry.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.LangRegistry\nDescription : Language registry loaded from lang.yaml and languages.yaml\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProvides metadata about all supported languages. Built from embedded\nlang.yaml files at startup and extended with discovered plugin languages\nat parse time.\n-}\nmodule Morloc.LangRegistry\n  ( LangRegistry (..)\n  , LangRegistryEntry (..)\n  , emptyRegistry\n  , buildDefaultRegistry\n  , extendRegistry\n  , lookupLang\n  , lookupByAlias\n  , buildLangMap\n  , registryPairwiseCost\n  , registryLanguageCost\n  , registrySerialType\n  , registryIsCompiled\n  , registryRunCommand\n  , registryMakeExtension\n  , registryMakeExecutablePoolName\n  , registryMakeSourcePoolName\n  , parseLangYamlFile\n  ) where\n\nimport Data.Aeson ((.!=), (.:), (.:?))\nimport qualified Data.Aeson as Aeson\nimport qualified Data.ByteString as BS\nimport Data.Map.Strict (Map)\nimport qualified Data.Map.Strict as Map\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Data.Text.Encoding as TE\nimport qualified Data.Yaml as Y\nimport Morloc.Language (Lang (..))\n\ndata LangRegistry = LangRegistry\n  { lrEntries :: Map Text LangRegistryEntry -- keyed by canonical name\n  , lrAliases :: Map Text Text -- alias -> canonical name\n  , lrSameLangCosts :: Map Text Int\n  , lrCrossLangCosts :: Map Text Int\n  , lrOptimizedPairs :: Map (Text, Text) Int\n  , lrDefaultSameCost :: Int\n  , lrDefaultCrossCost :: Int\n  }\n  deriving (Show)\n\ndata LangRegistryEntry = LangRegistryEntry\n  { lreExtension :: !String\n  , lreIsCompiled :: !Bool\n  , lreRunCommand :: ![Text]\n  , lreSerialType :: !Text\n  , lreCost :: !Int\n  , lrePreamble :: ![Text]\n  }\n  deriving (Show)\n\nemptyRegistry :: LangRegistry\nemptyRegistry = LangRegistry Map.empty Map.empty Map.empty Map.empty Map.empty 10 10000\n\n-- | Build the default registry from embedded lang.yaml files and languages.yaml\nbuildDefaultRegistry :: [(String, Text)] -> Text -> Either String LangRegistry\nbuildDefaultRegistry langFiles languagesText = do\n  langs <- mapM parseLangYaml langFiles\n  costs <- parseLanguagesYaml languagesText\n  let entries = Map.fromList [(lymName ly, entryFromYaml ly) | ly <- langs]\n      aliases =\n        Map.fromList $\n          concatMap (\\ly -> [(a, lymName ly) | a <- lymAliases ly] ++ [(lymName ly, lymName ly)]) langs\n  return\n    costs\n      { lrEntries = entries\n      , lrAliases = aliases\n      }\n\n-- | Extend the registry with a new language entry (for plugins)\nextendRegistry :: Text -> LangRegistryEntry -> [Text] -> LangRegistry -> LangRegistry\nextendRegistry name entry newAliases reg =\n  reg\n    { lrEntries = Map.insert name entry (lrEntries reg)\n    , lrAliases = Map.union (Map.fromList $ (name, name) : [(a, name) | a <- newAliases]) (lrAliases reg)\n    }\n\nlookupLang :: Text -> LangRegistry -> Maybe LangRegistryEntry\nlookupLang name reg = Map.lookup name (lrEntries reg)\n\n-- | Look up by alias, returning (canonical name, entry)\nlookupByAlias :: Text -> LangRegistry -> Maybe (Text, LangRegistryEntry)\nlookupByAlias alias reg = do\n  canonical <- Map.lookup (T.toLower alias) (lrAliases reg)\n  entry <- Map.lookup canonical (lrEntries reg)\n  return (canonical, entry)\n\n{- | Build a map from all aliases (lowercased) to Lang values.\nUsed by the parser to resolve language names.\n-}\nbuildLangMap :: LangRegistry -> Map Text Lang\nbuildLangMap reg =\n  Map.fromList\n    [ (alias, Lang canonical (lreExtension entry))\n    | (alias, canonical) <- Map.toList (lrAliases reg)\n    , Just entry <- [Map.lookup canonical (lrEntries reg)]\n    ]\n\nregistryPairwiseCost :: LangRegistry -> Text -> Text -> Int\nregistryPairwiseCost reg from to\n  | from == to = case Map.lookup from (lrSameLangCosts reg) of\n      Just c -> c\n      Nothing -> lrDefaultSameCost reg\n  | otherwise = case Map.lookup (from, to) (lrOptimizedPairs reg) of\n      Just c -> c\n      Nothing -> case Map.lookup to (lrCrossLangCosts reg) of\n        Just c -> c\n        Nothing -> lrDefaultCrossCost reg\n\nregistryLanguageCost :: LangRegistry -> Text -> Int\nregistryLanguageCost reg name = case Map.lookup name (lrEntries reg) of\n  Just entry -> lreCost entry\n  Nothing -> 5\n\nregistrySerialType :: LangRegistry -> Text -> Text\nregistrySerialType reg name = case Map.lookup name (lrEntries reg) of\n  Just entry -> lreSerialType entry\n  Nothing -> \"bytes\"\n\nregistryIsCompiled :: LangRegistry -> Text -> Bool\nregistryIsCompiled reg name = case Map.lookup name (lrEntries reg) of\n  Just entry -> lreIsCompiled entry\n  Nothing -> False\n\nregistryRunCommand :: LangRegistry -> Text -> [Text]\nregistryRunCommand reg name = case Map.lookup name (lrEntries reg) of\n  Just entry -> lreRunCommand entry\n  Nothing -> []\n\nregistryMakeExtension :: LangRegistry -> Text -> String\nregistryMakeExtension reg name = case Map.lookup name (lrEntries reg) of\n  Just entry -> lreExtension entry\n  Nothing -> T.unpack name\n\nregistryMakeExecutablePoolName :: LangRegistry -> Text -> String -> String\nregistryMakeExecutablePoolName reg name base =\n  if registryIsCompiled reg name\n    then base <> \"-\" <> T.unpack name <> \".out\"\n    else base <> \".\" <> registryMakeExtension reg name\n\nregistryMakeSourcePoolName :: LangRegistry -> Text -> String -> String\nregistryMakeSourcePoolName reg name base =\n  base <> \".\" <> registryMakeExtension reg name\n\n-- internal YAML types\n\ndata LangYamlMeta = LangYamlMeta\n  { lymName :: Text\n  , lymExtension :: String\n  , lymAliases :: [Text]\n  , lymIsCompiled :: Bool\n  , lymRunCommand :: [Text]\n  , lymSerialType :: Text\n  , lymCost :: Int\n  , lymPreamble :: [Text]\n  }\n  deriving (Show)\n\ninstance Aeson.FromJSON LangYamlMeta where\n  parseJSON = Aeson.withObject \"LangYamlMeta\" $ \\o ->\n    LangYamlMeta\n      <$> o .: \"name\"\n      <*> o .: \"extension\"\n      <*> o .:? \"aliases\" .!= []\n      <*> o .:? \"is_compiled\" .!= False\n      <*> o .:? \"run_command\" .!= []\n      <*> o .:? \"serial_type\" .!= \"bytes\"\n      <*> o .:? \"cost\" .!= 5\n      <*> o .:? \"preamble\" .!= []\n\ndata LanguagesYaml = LanguagesYaml\n  { lysSameLangCosts :: Map Text Int\n  , lysCrossLangCosts :: Map Text Int\n  , lysOptimizedPairs :: [(Text, Text, Int)]\n  , lysDefaultSame :: Int\n  , lysDefaultCross :: Int\n  }\n  deriving (Show)\n\ninstance Aeson.FromJSON LanguagesYaml where\n  parseJSON = Aeson.withObject \"LanguagesYaml\" $ \\o -> do\n    same <- o .:? \"same_language_costs\" .!= Map.empty\n    cross <- o .:? \"cross_language_costs\" .!= Map.empty\n    pairs <- o .:? \"optimized_pairs\" .!= []\n    defSame <- o .:? \"default_same_language\" .!= 10\n    defCross <- o .:? \"default_cross_language\" .!= 10000\n    parsedPairs <- mapM parsePair pairs\n    return $ LanguagesYaml same cross parsedPairs defSame defCross\n    where\n      parsePair = Aeson.withObject \"OptimizedPair\" $ \\o ->\n        (,,) <$> o .: \"from\" <*> o .: \"to\" <*> o .: \"cost\"\n\nentryFromYaml :: LangYamlMeta -> LangRegistryEntry\nentryFromYaml ly =\n  LangRegistryEntry\n    { lreExtension = lymExtension ly\n    , lreIsCompiled = lymIsCompiled ly\n    , lreRunCommand = lymRunCommand ly\n    , lreSerialType = lymSerialType ly\n    , lreCost = lymCost ly\n    , lrePreamble = lymPreamble ly\n    }\n\n-- | Parse a lang.yaml file from the filesystem, returning (canonical name, extension)\nparseLangYamlFile :: FilePath -> IO (Either String (Text, String))\nparseLangYamlFile path = do\n  content <- TE.decodeUtf8 <$> BS.readFile path\n  case parseLangYaml (\"file:\" ++ path, content) of\n    Left err -> return (Left err)\n    Right meta -> return (Right (lymName meta, lymExtension meta))\n\nparseLangYaml :: (String, Text) -> Either String LangYamlMeta\nparseLangYaml (name, content) =\n  case Y.decodeEither' (TE.encodeUtf8 content) of\n    Left err -> Left $ \"Failed to parse lang.yaml for \" ++ name ++ \": \" ++ Y.prettyPrintParseException err\n    Right meta -> Right meta\n\nparseLanguagesYaml :: Text -> Either String LangRegistry\nparseLanguagesYaml content =\n  case Y.decodeEither' (TE.encodeUtf8 content) of\n    Left err -> Left $ \"Failed to parse languages.yaml: \" ++ Y.prettyPrintParseException err\n    Right lys ->\n      Right $\n        LangRegistry\n          { lrEntries = Map.empty\n          , lrAliases = Map.empty\n          , lrSameLangCosts = lysSameLangCosts lys\n          , lrCrossLangCosts = lysCrossLangCosts lys\n          , lrOptimizedPairs = Map.fromList [((f, t), c) | (f, t, c) <- lysOptimizedPairs lys]\n          , lrDefaultSameCost = lysDefaultSame lys\n          , lrDefaultCrossCost = lysDefaultCross lys\n          }\n"
  },
  {
    "path": "library/Morloc/Language.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Language\nDescription : Language type and utilities\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nThe Lang type is a simple name+extension record. All language metadata\nlives in the LangRegistry (loaded from lang.yaml files).\n-}\nmodule Morloc.Language\n  ( Lang (..)\n  , makeExtension\n  , showLangName\n  , makeExecutablePoolName\n  , makeSourcePoolName\n  , makeLang\n  ) where\n\nimport Data.Text (Text)\nimport Morloc.Data.Doc\n\n{- | A programming language in the Morloc ecosystem.\nIdentity is determined solely by the canonical name.\n-}\ndata Lang = Lang\n  { langName :: !Text -- canonical lowercase name: \"py\", \"r\", \"cpp\", etc.\n  , langExtension :: !String -- file extension: \"py\", \"R\", \"cpp\", etc.\n  }\n  deriving (Show)\n\ninstance Eq Lang where\n  a == b = langName a == langName b\n\ninstance Ord Lang where\n  compare a b = compare (langName a) (langName b)\n\ninstance Pretty Lang where\n  pretty = pretty . langName\n\n-- | Construct a Lang from canonical name and extension\nmakeLang :: Text -> String -> Lang\nmakeLang = Lang\n\n-- | Get the file extension for a language\nmakeExtension :: Lang -> String\nmakeExtension = langExtension\n\n-- | Get the canonical name of a language\nshowLangName :: Lang -> Text\nshowLangName = langName\n\nmakeSourceName :: Lang -> String -> String\nmakeSourceName lang base = base ++ \".\" ++ makeExtension lang\n\nmakeExecutableName :: Lang -> String -> String\nmakeExecutableName lang base\n  | langName lang == \"c\" = base <> \"-c.out\"\n  | langName lang == \"cpp\" = base <> \"-cpp.out\"\n  | otherwise = makeSourceName lang base\n\nmakeExecutablePoolName :: Lang -> String\nmakeExecutablePoolName lang = makeExecutableName lang \"pool\"\n\nmakeSourcePoolName :: Lang -> String\nmakeSourcePoolName lang = makeSourceName lang \"pool\"\n"
  },
  {
    "path": "library/Morloc/Module.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Module\nDescription : Module discovery, metadata loading, and installation\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nHandles all aspects of the morloc module system:\n\n * finding modules on the local filesystem (by name or path)\n * loading package YAML metadata\n * finding headers and shared libraries required by modules\n * installing modules from GitHub via @morloc install@\n-}\nmodule Morloc.Module\n  ( findModule\n  , loadModuleMetadata\n  , findMainLocFile\n\n    -- * Module installation\n  , OverwriteProtocol (..)\n  , GitProtocol (..)\n  , InstallReason (..)\n  , TypecheckFn\n  , installModule\n  , extractMorlocDeps\n  , extractModuleName\n  ) where\n\nimport Control.Applicative (optional)\nimport Control.Exception (onException)\nimport Text.Parsec (Parsec, try, parse, many, many1)\nimport Text.Parsec.Char (char, string, alphaNum, digit, satisfy)\nimport Text.Parsec.Text ()\n\nimport qualified Data.Aeson as Aeson\nimport qualified Data.Aeson.KeyMap as KM\nimport qualified Data.ByteString.Char8 as BS8\nimport qualified Data.ByteString.Lazy as BL\nimport qualified Data.Vector as V  -- from aeson's transitive dependency\nimport qualified Data.Char as DC\nimport qualified Data.Map as Map\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Data.Text.IO as TIO\nimport qualified Data.Time.Clock.POSIX as Time\nimport qualified Data.Yaml.Config as YC\nimport qualified Morloc.Config as Config\nimport Morloc.Data.Doc\nimport Morloc.Data.Json\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport qualified Morloc.ProgramBuilder.Install as Install\nimport qualified Morloc.System as MS\nimport qualified Network.HTTP.Simple as HTTP\nimport System.Directory\nimport System.Process (callProcess, readProcess)\n\ndata InstallReason = ExplicitInstall | AutoDependency\n  deriving (Show, Eq)\n\nmoduleInstallError :: MDoc -> MorlocMonad a\nmoduleInstallError msg = MM.throwSystemError $ \"Failed to install module:\" <+> msg\n\n-- | Is this a local (.dot-prefixed) import?\nisLocalImport :: MVar -> Bool\nisLocalImport (MV x) = \".\" `MT.isPrefixOf` x\n\n-- | Look for a morloc module: local (.dot-prefixed) or bare (system/plane).\nfindModule :: (Maybe Path, MVar) -> MVar -> MorlocMonad Path\nfindModule (_, currentModule) importModule\n  | isLocalImport importModule = findLocalModule currentModule importModule\n  | otherwise = findBareModule currentModule importModule\n\n-- | Resolve a local import from the project root.\n-- e.g., .foo.bar -> <root>/foo/bar.loc or <root>/foo/bar/main.loc\nfindLocalModule :: MVar -> MVar -> MorlocMonad Path\nfindLocalModule currentModule importModule = do\n  projectRoot <- MM.gets stateProjectRoot\n  case projectRoot of\n    Nothing -> MM.throwSystemError $\n      \"Cannot resolve local import\" <+> squotes (pretty importModule)\n        <+> \"without a project root (are you reading from stdin?)\"\n    Just root -> do\n      let MV importText = importModule\n          nameParts = map MT.unpack $ MT.splitOn \".\" (MT.drop 1 importText)\n          candidates =\n            [ MS.joinPath (root : init nameParts ++ [last nameParts ++ \".loc\"])\n            , MS.joinPath (root : nameParts ++ [\"main.loc\"])\n            ]\n      existingPaths <- liftIO . fmap catMaybes . mapM getFile $ candidates\n      case existingPaths of\n        (x : _) -> return x\n        [] -> MM.throwSystemError $\n          \"Within module\" <+> squotes (pretty currentModule)\n            <> \",\" <+> \"failed to import local module\" <+> squotes (pretty importModule)\n            <> \"\\nThe following paths were searched:\\n\"\n            <+> indent 4 (vsep (map pretty candidates))\n\n-- | Resolve a bare (non-dot-prefixed) import: search system/plane paths,\n-- with deprecated fallback to local paths (project root).\n-- Supports namespaced imports: \"owner/name\" searches lib/plane/owner/name/\n-- Bare imports: \"foo\" searches lib/plane/morloclib/foo/ first, then lib/plane/foo/\nfindBareModule :: MVar -> MVar -> MorlocMonad Path\nfindBareModule currentModule importModule = do\n  config <- MM.ask\n  projectRoot <- MM.gets stateProjectRoot\n  let lib = Config.configLibrary config\n      plane = Config.configPlane config\n      planeCore = Config.configPlaneCore config\n      namePath = splitModuleName importModule\n      -- Check if this is a namespaced import (contains \"/\")\n      MV importText = importModule\n      isNamespaced = \"/\" `MT.isInfixOf` importText\n      -- For namespaced: \"owner/name\" -> [owner, name] as filesystem path\n      -- For bare: \"foo\" -> search morloclib/foo first, then foo\n      namespacedPath = case MT.splitOn \"/\" importText of\n        [owner, name] -> [MT.unpack owner, MT.unpack name]\n        _ -> namePath  -- fallback\n      systemPaths\n        | isNamespaced =\n            -- Namespaced import: only search the explicit namespace path\n            [ MS.joinPath ([lib, plane] <> namespacedPath <> [\"main.loc\"])\n            , MS.joinPath ([lib, plane] <> init namespacedPath <> [last namespacedPath <> \".loc\"])\n            ]\n        | otherwise =\n            -- Bare import: search core org namespace first, then flat paths\n            [ MS.joinPath ([lib, plane, planeCore] <> namePath <> [\"main.loc\"])\n            , MS.joinPath ([lib, plane, planeCore] <> init namePath <> [last namePath <> \".loc\"])\n            , MS.joinPath ([lib, plane] <> init namePath <> [last namePath <> \".loc\"])\n            , MS.joinPath ([lib, plane] <> namePath <> [\"main.loc\"])\n            , MS.joinPath (lib : init namePath <> [last namePath <> \".loc\"])\n            , MS.joinPath (lib : namePath <> [\"main.loc\"])\n            ]\n      localPaths\n        | isNamespaced = []  -- namespaced imports are never local\n        | otherwise = case projectRoot of\n            Just root ->\n              [ MS.joinPath (root : init namePath <> [last namePath <> \".loc\"])\n              , MS.joinPath (root : namePath <> [\"main.loc\"])\n              ]\n            Nothing ->\n              [ MS.joinPath (init namePath <> [last namePath <> \".loc\"])\n              , MS.joinPath (namePath <> [\"main.loc\"])\n              ]\n  existingSystem <- liftIO . fmap catMaybes . mapM getFile $ systemPaths\n  existingLocal <- liftIO . fmap catMaybes . mapM getFile $ localPaths\n  case (existingSystem, existingLocal) of\n    (s:_, l:_) ->\n      MM.throwSystemError $\n        \"Ambiguous import\" <+> squotes (pretty importModule)\n          <+> \"from module\" <+> squotes (pretty currentModule)\n          <> \"\\nFound in system:\" <+> pretty s\n          <> \"\\nFound locally:\" <+> pretty l\n          <> \"\\nUse\" <+> squotes (pretty (\"import .\" <> unMVar importModule))\n          <+> \"for local or ensure the system module is installed\"\n    (x:_, []) -> return x\n    ([], x:_) -> do\n      MM.say $\n        \"WARNING: bare import\" <+> squotes (pretty importModule)\n          <+> \"resolved locally.\"\n          <+> \"Use\" <+> squotes (pretty (\"import .\" <> unMVar importModule))\n          <+> \"for explicit local imports.\"\n      return x\n    ([], []) -> do\n      let allPaths = systemPaths <> localPaths\n          nameNameLoc = namePath <> [last namePath <> \".loc\"]\n          hintPaths =\n            map\n              MS.joinPath\n              [ nameNameLoc\n              , lib : nameNameLoc\n              , [lib, plane] <> nameNameLoc\n              ]\n      existingHints <- liftIO . fmap catMaybes . mapM getFile $ hintPaths\n      let hintMsg = case existingHints of\n            (found : _) ->\n              let expected = MS.combine (MS.takeDirectory found) \"main.loc\"\n               in \"\\n\\nFound\"\n                    <+> squotes (pretty found)\n                    <+> \"but expected\"\n                    <+> squotes (pretty expected)\n                    <> \"\\n  Rename the entry point: mv\"\n                      <+> pretty found\n                      <+> pretty expected\n            [] -> mempty\n      MM.throwSystemError $\n        \"Within module\" <+> squotes (pretty currentModule)\n          <> \",\"\n            <+> \"failed to import module\"\n            <+> squotes (pretty importModule)\n          <> \"\\n\"\n          <> \"The following paths were searched:\\n\"\n            <+> indent 4 (vsep (map pretty allPaths))\n          <> \"\\nMaybe try running: morloc install\" <+> pretty importModule\n          <> hintMsg\n\n{- | Give a module path (e.g. \"/your/path/foo.loc\") find the package metadata.\nIt currently only looks for a file named \"package.yaml\" in the same folder\nas the main \"*.loc\" file.\n-}\nfindModuleMetadata :: Path -> IO (Maybe Path)\nfindModuleMetadata mainFile =\n  getFile $ MS.combine (MS.takeDirectory mainFile) \"package.yaml\"\n\nloadModuleMetadata :: Path -> MorlocMonad ()\nloadModuleMetadata main = do\n  maybef <- liftIO $ findModuleMetadata main\n  meta <-\n    case maybef of\n      (Just f) -> liftIO $ YC.loadYamlSettings [f] [] YC.ignoreEnv\n      Nothing -> return defaultValue\n  -- Reject include entries that escape the package directory. Absolute\n  -- paths and `..` traversals are not allowed because they would break\n  -- reproducibility and tie installs to ambient filesystem layout.\n  case packageInclude meta of\n    Just pats -> liftIO $ Install.validateIncludeScope pats\n    Nothing -> return ()\n  state <- MM.get\n  MM.put (appendMeta meta state)\n  where\n    appendMeta :: PackageMeta -> MorlocState -> MorlocState\n    appendMeta m s = s {statePackageMeta = m : statePackageMeta s}\n\nsplitModuleName :: MVar -> [String]\nsplitModuleName (MV x) = map MT.unpack $ MT.splitOn \".\" x\n\n\ngetFile :: Path -> IO (Maybe Path)\ngetFile x = do\n  exists <- MS.doesFileExist x\n  return $\n    if exists\n      then Just x\n      else Nothing\n\n-- {{{ definitions\n\ndata GitProtocol = SshProtocol | HttpsProtocol\n  deriving (Show, Eq, Ord)\n\ndata OverwriteProtocol\n  = ForceOverwrite\n  | DoNotOverwrite\n  deriving (Show, Eq, Ord)\n\ndata RemoteSource\n  = RemoteGithub\n  | RemoteGitlab\n  | RemoteBitbucket\n  | RemoteCodeberg\n  | RemoteAzure\n  deriving (Show, Eq, Ord)\n\ndata GitSnapshotSelector\n  = LatestDefaultBranch\n  | LatestOnBranch Text\n  | CommitHash Text\n  | ReleaseTag Text\n  deriving (Show, Eq, Ord)\n\ndata GitRemote = GitRemote\n  { gitRemoteSource :: RemoteSource\n  , gitReference :: GitSnapshotSelector\n  , gitUsername :: Text\n  , gitReponame :: Text\n  }\n  deriving (Show, Eq, Ord)\n\n-- | Specify where a module is located\ndata ModuleSource\n  = -- | Module in a local directory (may or may not be a git repo)\n    ModuleSourceLocal Text (Maybe GitSnapshotSelector)\n  | -- | A module stored in an arbitrary users github repo, e.g., (GithubRepo \"weena\" \"math\")\n    ModuleSourceRemoteGit GitRemote\n  | -- | A module from the morloc registry (owner, name)\n    ModuleSourceRegistry Text Text\n  deriving (Show, Eq, Ord)\n\n-- }}}\n\n-- | Check that a resolved module name is a valid identifier\nvalidateModuleName :: Text -> Text -> Either Text Text\nvalidateModuleName modstr name\n  | MT.null name =\n      Left $ \"Could not determine module name from '\" <> modstr <> \"'\"\n  | not (DC.isAlphaNum (MT.head name)) =\n      Left $ \"Module name '\" <> name <> \"' (from '\" <> modstr <> \"') must start with an alphanumeric character\"\n  | MT.any (\\c -> not (DC.isAlphaNum c) && c /= '-') name =\n      Left $ \"Module name '\" <> name <> \"' (from '\" <> modstr <> \"') contains invalid characters (only alphanumeric and hyphens allowed)\"\n  | otherwise = Right name\n\n{- | Extract the module name from an install string.\nFor \"github:user/repo\" -> \"repo\", for \"math\" -> \"math\",\nfor \"./path/to/foo\" -> \"foo\", for \".\" -> current directory name\n-}\nextractModuleName :: Text -> IO Text\nextractModuleName modstr = do\n  name <- case parse (moduleInstallParser \"morloclib\") \"\" modstr of\n    Right (Right (ModuleSourceLocal path _)) ->\n      MT.pack . MS.takeFileName <$> (MS.makeAbsolute . MS.dropTrailingPathSeparator . MT.unpack $ path)\n    Right (Right (ModuleSourceRemoteGit remote)) ->\n      return $ gitReponame remote\n    Right (Right (ModuleSourceRegistry _ n)) ->\n      return n\n    _ -> return modstr\n  case validateModuleName modstr name of\n    Left err -> ioError . userError $ MT.unpack err\n    Right n -> return n\n\n{- | Typecheck callback: takes a filepath, returns list of (name, type) exports.\nPassed in from the executable layer to avoid circular imports.\n-}\ntype TypecheckFn = FilePath -> MorlocMonad [(Text, Text)]\n\ninstallModule ::\n  -- | How should overwrites be handled\n  OverwriteProtocol ->\n  -- | Remote Git download protocol (HTTPS by default)\n  GitProtocol ->\n  -- | Absolute path to folder where modules are installed for the given plane\n  Path ->\n  -- | Default github org for the given plane for pulling core modules\n  Path ->\n  -- | Optional typecheck callback (Nothing = skip typecheck)\n  Maybe TypecheckFn ->\n  -- | User-specified module sources from the CLI batch (name -> install-string)\n  Map.Map Text Text ->\n  -- | Modules currently being installed (cycle detection)\n  Set.Set Text ->\n  -- | Why this module is being installed\n  InstallReason ->\n  -- | Installation string, such as \"github:weena/math@version:0.1.0\"\n  Text ->\n  MorlocMonad ()\ninstallModule overwrite gitprot libpath coreorg mayTypecheck userSources inProgress reason modstr = do\n  config <- MM.ask\n  let registry = Config.configRegistry config\n  -- Try registry first for bare names when a registry is configured\n  case (registry, tryParseRegistryModule (MT.pack coreorg) modstr) of\n    (Just _, Just (owner, name)) -> do\n      let targetDir = libpath </> MT.unpack owner </> MT.unpack name\n      if Set.member name inProgress\n        then return ()\n        else do\n          targetExists <- liftIO $ doesDirectoryExist targetDir\n          case (targetExists, overwrite) of\n            (True, DoNotOverwrite) -> do\n              case reason of\n                ExplicitInstall ->\n                  MM.say $ \"Module\" <+> pretty name <+> \"is already installed, use --force to reinstall\"\n                AutoDependency ->\n                  MM.sayVVV $ \"Module\" <+> pretty name <+> \"already installed, skipping\"\n              return ()\n            (True, ForceOverwrite) -> do\n              liftIO $ removeDirectoryRecursive targetDir\n              doInstall (ModuleSourceRegistry owner name) name targetDir\n            (False, _) ->\n              doInstall (ModuleSourceRegistry owner name) name targetDir\n    _ -> installModuleClassic\n  where\n    installModuleClassic = case parse (moduleInstallParser (MT.pack coreorg)) \"\" modstr of\n      (Left errstr) -> moduleInstallError (pretty . show $ errstr)\n      (Right (Left errstr)) -> moduleInstallError $ pretty errstr\n      (Right (Right source)) -> do\n        rawName <- case source of\n              ModuleSourceLocal path _ ->\n                liftIO $ MT.pack . MS.takeFileName <$> (MS.makeAbsolute . MS.dropTrailingPathSeparator . MT.unpack $ path)\n              ModuleSourceRemoteGit remote -> return $ gitReponame remote\n              ModuleSourceRegistry _ n -> return n\n        name <- case validateModuleName modstr rawName of\n          Left err -> moduleInstallError $ pretty err\n          Right n -> return n\n        let targetDir = libpath </> MT.unpack name\n\n        if Set.member name inProgress\n          then return ()\n          else do\n            -- Check if already installed\n            targetExists <- liftIO $ doesDirectoryExist targetDir\n            case (targetExists, overwrite) of\n              (True, DoNotOverwrite) -> do\n                case reason of\n                  ExplicitInstall ->\n                    MM.say $ \"Module\" <+> pretty name <+> \"is already installed, use --force to reinstall\"\n                  AutoDependency ->\n                    MM.sayVVV $ \"Module\" <+> pretty name <+> \"already installed, skipping\"\n                return ()\n              (True, ForceOverwrite) -> do\n                liftIO $ removeDirectoryRecursive targetDir\n                doInstall source name targetDir\n              (False, _) ->\n                doInstall source name targetDir\n\n\n    doInstall :: ModuleSource -> Text -> FilePath -> MorlocMonad ()\n    doInstall source name targetDir = do\n      let inProgress' = Set.insert name inProgress\n\n      -- create the library path if it is missing\n      liftIO $ createDirectoryIfMissing True libpath\n\n      -- Copy/clone files, with cleanup on exception\n      liftIO $ createDirectoryIfMissing True (MS.takeDirectory targetDir)\n      config' <- MM.ask\n      let ioAction = case source of\n            ModuleSourceLocal path selector ->\n              installLocalIO targetDir selector path\n            ModuleSourceRemoteGit remote ->\n              installRemoteIO gitprot targetDir remote\n            ModuleSourceRegistry owner' modName ->\n              case Config.configRegistry config' of\n                Just regUrl -> installFromRegistry regUrl owner' modName targetDir\n                Nothing -> ioError $ userError \"Registry URL not configured\"\n          cleanup = do\n            exists <- doesDirectoryExist targetDir\n            when exists $ removeDirectoryRecursive targetDir\n      liftIO $ ioAction `onException` cleanup\n\n      -- Read package.yaml for metadata and dependencies\n      meta <- liftIO $ do\n        let pkgYaml = targetDir </> \"package.yaml\"\n        exists <- doesFileExist pkgYaml\n        if exists\n          then YC.loadYamlSettings [pkgYaml] [] YC.ignoreEnv\n          else return defaultValue\n\n      -- Determine morloc dependencies by scanning .loc imports\n      morlocDeps <- do\n        mainFile <- liftIO $ findMainLocFile targetDir (MT.unpack name)\n        case mainFile of\n          Nothing -> return []\n          Just f -> liftIO $ extractMorlocDeps f\n\n      -- Recursively install dependencies\n      forM_ morlocDeps $ \\dep -> do\n        let depDir = libpath </> MT.unpack dep\n            depDirNs = libpath </> coreorg </> MT.unpack dep\n        depExists <- liftIO $ (||) <$> doesDirectoryExist depDir <*> doesDirectoryExist depDirNs\n        unless (depExists || Set.member dep inProgress') $ do\n          let depModstr = case Map.lookup dep userSources of\n                Just s -> s\n                Nothing -> dep\n          MM.say $ \"Auto-installing dependency:\" <+> pretty dep\n          installModule\n            DoNotOverwrite\n            gitprot\n            libpath\n            coreorg\n            mayTypecheck\n            userSources\n            inProgress'\n            AutoDependency\n            depModstr\n\n      -- Typecheck the module (if callback provided)\n      exports <- case mayTypecheck of\n        Nothing -> return []\n        Just typecheckFn -> do\n          mainFile <- liftIO $ findMainLocFile targetDir (MT.unpack name)\n          case mainFile of\n            Nothing -> return []\n            Just f -> typecheckFn f\n\n      -- Write module manifest to fdb/\n      config <- MM.ask\n      let fdbDir = Config.configHome config </> \"fdb\"\n      liftIO $ createDirectoryIfMissing True fdbDir\n      installTime <- liftIO $ floor <$> Time.getPOSIXTime\n      let manifestPath = fdbDir </> MT.unpack name ++ \".module\"\n          manifestJson =\n            buildModuleManifest\n              meta\n              name\n              morlocDeps\n              exports\n              targetDir\n              modstr\n              reason\n              installTime\n      liftIO $ TIO.writeFile manifestPath manifestJson\n      MM.say $ \"Installed module\" <+> squotes (pretty name)\n\n-- | Find the main .loc file in a module directory\nfindMainLocFile :: FilePath -> String -> IO (Maybe FilePath)\nfindMainLocFile dir name = do\n  dirExists <- doesDirectoryExist dir\n  if not dirExists\n    then return Nothing\n    else do\n      let mainLoc = dir </> \"main.loc\"\n          nameLoc = dir </> name ++ \".loc\"\n      mainExists <- doesFileExist mainLoc\n      if mainExists\n        then return (Just mainLoc)\n        else do\n          nameExists <- doesFileExist nameLoc\n          return $ if nameExists then Just nameLoc else Nothing\n\n-- | Build a module manifest JSON string\nbuildModuleManifest ::\n  PackageMeta ->\n  Text ->\n  [Text] ->\n  [(Text, Text)] ->\n  FilePath ->\n  Text ->\n  InstallReason ->\n  Int ->\n  Text\nbuildModuleManifest meta name morlocDeps exports installPath installSource reason installTime =\n  jsonObj\n    [ (\"kind\", jsonStr \"module\")\n    , (\"name\", jsonStr name)\n    , (\"version\", jsonStr (packageVersion meta))\n    , (\"synopsis\", jsonStr (packageSynopsis meta))\n    , (\"author\", jsonStr (packageAuthor meta))\n    , (\"license\", jsonStr (packageLicense meta))\n    , (\"homepage\", jsonStr (packageHomepage meta))\n    , (\"c_dependencies\", jsonStrArr (packageDependencies meta))\n    , (\"morloc_dependencies\", jsonStrArr morlocDeps)\n    ,\n      ( \"exports\"\n      , jsonArr\n          [ jsonObj [(\"name\", jsonStr n), (\"type\", jsonStr t)]\n          | (n, t) <- exports\n          ]\n      )\n    , (\"install_path\", jsonStr (MT.pack installPath))\n    , (\"install_source\", jsonStr installSource)\n    , (\"install_reason\", jsonStr (reasonText reason))\n    , (\"install_time\", jsonInt installTime)\n    ]\n  where\n    reasonText ExplicitInstall = \"explicit\"\n    reasonText AutoDependency = \"auto\"\n\n{- | Extract morloc module dependencies by scanning a .loc file for import statements.\nThis is a lightweight text scan, not using the full parser.\n-}\nextractMorlocDeps :: FilePath -> IO [Text]\nextractMorlocDeps path = do\n  content <- TIO.readFile path\n  let ls = MT.lines content\n      imports = concatMap extractImport (removeComments ls)\n  return (unique imports)\n  where\n    extractImport :: Text -> [Text]\n    extractImport ln =\n      let stripped = MT.stripStart ln\n       in case MT.stripPrefix \"import \" stripped of\n            Nothing -> []\n            Just rest ->\n              let modName = MT.strip (MT.takeWhile (\\c -> c /= '(' && c /= ' ') rest)\n               in if \".\" `MT.isPrefixOf` modName\n                    then [] -- skip local (.dot-prefixed) imports\n                    else if \"/\" `MT.isInfixOf` modName\n                      then [modName]  -- namespaced import: keep \"owner/name\" as-is\n                      else case MT.splitOn \".\" modName of\n                             (topLevel : _) | not (MT.null topLevel) -> [topLevel]\n                             _ -> []\n\n    removeComments :: [Text] -> [Text]\n    removeComments = go False\n      where\n        go _ [] = []\n        go True (l : ls)\n          | MT.isInfixOf \"-}\" l = go False ls\n          | otherwise = go True ls\n        go False (l : ls)\n          | MT.isPrefixOf \"--\" (MT.stripStart l) = go False ls\n          | MT.isPrefixOf \"{-\" (MT.stripStart l) = go True ls\n          | otherwise = l : go False ls\n\n-- {{{ parse module source\n\ntype Parser = Parsec Text ()\n\ndecimal :: Parser Int\ndecimal = read <$> many1 digit\n\ndata ModulePath\n  = ModulePathCore Text\n  | ModulePathGit Text Text\n  | ModulePathLocal Text\n  deriving (Show, Eq, Ord)\n\ndata RefForm = RefHash | RefBranch | RefVersion\n\nmoduleInstallParser :: Text -> Parser (Either Text ModuleSource)\nmoduleInstallParser coreorg = do\n  maySrcform <- optional (try parseSrcForm)\n  modPath <- parseModname maySrcform\n  ref <- optional (try parseRef)\n  return $ makeModuleSource maySrcform modPath ref\n  where\n    makeModuleSource ::\n      Maybe RemoteSource -> ModulePath -> Maybe GitSnapshotSelector -> Either Text ModuleSource\n    makeModuleSource mayRemote (ModulePathCore modname) selector\n      | mayRemote == Just RemoteGithub || mayRemote == Nothing =\n          return . ModuleSourceRemoteGit $\n            GitRemote\n              { gitRemoteSource = RemoteGithub\n              , gitReference = fromMaybe LatestDefaultBranch selector\n              , gitUsername = coreorg\n              , gitReponame = modname\n              }\n      | otherwise = Left \"Core modules are only imported from github\"\n    makeModuleSource (Just _) (ModulePathLocal _) _ =\n      Left \"Invalid mix of local and remote import names\"\n    makeModuleSource (maybe RemoteGithub id -> remote) (ModulePathGit user repo) selector =\n      return . ModuleSourceRemoteGit $\n        GitRemote\n          { gitRemoteSource = remote\n          , gitReference = fromMaybe LatestDefaultBranch selector\n          , gitUsername = user\n          , gitReponame = repo\n          }\n    makeModuleSource Nothing (ModulePathLocal path) selector = return $ ModuleSourceLocal path selector\n\n-- codeberg:weena/calendar@version:1.0.0\n-- --------\nparseSrcForm :: Parser RemoteSource\nparseSrcForm = do\n  remote <-\n    try (string \"github\" >> return RemoteGithub)\n      <|> try (string \"gitlab\" >> return RemoteGitlab)\n      <|> try (string \"bitbucket\" >> return RemoteBitbucket)\n      <|> try (string \"codeberg\" >> return RemoteCodeberg)\n      <|> (string \"azure\" >> return RemoteAzure)\n  _ <- char ':'\n  return remote\n\n-- codeberg:???????@version:1.0.0\n--          -------\nparseModname :: Maybe RemoteSource -> Parser ModulePath\nparseModname (Just _) = try parseRemoteModule <|> parseCoreModule\nparseModname Nothing =\n  parseCoreModule -- must start with letter\n    <|> parseLocalModule -- must start with [.~/]\n\n-- codeberg:weena/calendar@version:1.0.0\n--          --------------\nparseRemoteModule :: Parser ModulePath\nparseRemoteModule = do\n  user <- parseModuleSegment\n  _ <- char '/'\n  repo <- parseModuleSegment\n  return $ ModulePathGit user repo\n\n-- root@version:1.0.0\n-- ----\nparseCoreModule :: Parser ModulePath\nparseCoreModule = ModulePathCore <$> parseModuleSegment\n\nparseModuleSegment :: Parser Text\nparseModuleSegment = do\n  firstChar <- alphaNum\n  rest <- many (alphaNum <|> char '-')\n  case rest of\n    [] -> return (MT.pack [firstChar])\n    _ | last rest == '-' -> fail \"Module name cannot end with a dash\"\n      | otherwise -> return (MT.pack (firstChar : rest))\n\n-- parse a local file\n--   .\n--   ./my/morloc/dir\n--   ~/my/mod\nparseLocalModule :: Parser ModulePath\nparseLocalModule = do\n  fstChar <- char '.' <|> char '/' <|> char '~'\n  remaining <- MT.pack <$> many (satisfy (/= '@'))\n  return $ ModulePathLocal (MT.cons fstChar remaining)\n\n-- codeberg:weena/calendar@version:1.0.0\n--                         -------------\nparseRef :: Parser GitSnapshotSelector\nparseRef = do\n  char '@'\n  mayForm <- optional (try parseRefForm)\n  parseRefStr mayForm\n\n-- codeberg:weena/calendar@version:1.0.0\n--                         -------\nparseRefForm :: Parser RefForm\nparseRefForm = do\n  form <-\n    try (string \"hash\" >> return RefHash)\n      <|> try (string \"branch\" >> return RefBranch)\n      <|> try (string \"version\" >> return RefVersion)\n      <|> try (string \"tag\" >> return RefVersion) -- same diff\n  char ':'\n  return form\n\n-- codeberg:weena/calendar@version:1.0.0\n--                                 -----\nparseRefStr :: Maybe RefForm -> Parser GitSnapshotSelector\nparseRefStr Nothing =\n  try parseHash\n    <|> try parseVersion\n    <|> try parseBranch\nparseRefStr (Just RefHash) = parseHash\nparseRefStr (Just RefVersion) = parseVersion\nparseRefStr (Just RefBranch) = parseBranch\n\n-- match hexadecimal characters of 7 characters or more\nparseHash :: Parser GitSnapshotSelector\nparseHash = do\n  hash <- MT.pack <$> many1 (satisfy isHexDigit)\n  if MT.length hash >= 7\n    then return $ CommitHash hash\n    else fail \"Hash must be at least 7 characters\"\n  where\n    isHexDigit c = (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')\n\n-- match semantic version (with option of omitting patch, so v1.0 is legal)\nparseVersion :: Parser GitSnapshotSelector\nparseVersion = do\n  version <- versionParser\n  return $ ReleaseTag version\n  where\n    versionParser = do\n      -- Optional 'v' prefix\n      v <- optional (MT.pack <$> string \"v\")\n\n      -- Parse major.minor\n      major <- MT.show' <$> decimal\n      _ <- char '.'\n      minor <- MT.show' <$> decimal\n\n      -- Optional .patch\n      patchMay <- optional . try $ do\n        _ <- char '.'\n        MT.show' <$> decimal\n\n      -- Optional pre-release (after '-')\n      preRelease <- optional . try $ do\n        _ <- char '-'\n        MT.pack <$> many1 (satisfy (\\c -> DC.isAlphaNum c || c == '.'))\n\n      -- Optional build metadata (after '+')\n      buildMeta <- optional . try $ do\n        _ <- char '+'\n        MT.pack <$> many1 (satisfy (\\c -> DC.isAlphaNum c || c == '.'))\n\n      -- Reconstruct the full version string\n      return $\n        mconcat\n          [ fromMaybe \"\" v\n          , major\n          , \".\"\n          , minor\n          , maybe \"\" (\".\" <>) patchMay\n          , maybe \"\" (\"-\" <>) preRelease\n          , maybe \"\" (\"+\" <>) buildMeta\n          ]\n\n{- | Parse a legal git branch name according to git-check-ref-format\n\nGit reference naming rules (from git-check-ref-format man page):\n1. Cannot begin or end with slash '/'\n2. Cannot contain two consecutive dots '..'\n3. Cannot contain ASCII control characters (< 0x20), space, ~, ^, :, ?, *, [\n4. Cannot end with '.lock'\n5. Cannot end with a dot '.'\n6. Cannot contain a backslash '\\'\n7. Cannot contain '@{' sequence (reflog syntax)\n8. Cannot be a single '@' character\n9. Components between slashes cannot begin with a dot '.'\n10. Cannot contain multiple consecutive slashes '//'\n\nCurrent implementation deviations from spec:\n- Missing: check for '@{' sequence (reflog syntax - rule 7)\n- Missing: check for '@' as sole character (rule 8)\n- Missing: check that components don't start with '.' (rule 9)\n- Missing: check for consecutive slashes '//' (rule 10)\n- Missing: check for ASCII control characters (< 0x20)\n- Incomplete: allows '@' freely, but spec restricts it in certain contexts\n-}\nparseBranch :: Parser GitSnapshotSelector\nparseBranch = do\n  branch <- MT.pack <$> many1 (satisfy isBranchChar)\n\n  if isValidBranchName branch\n    then return $ LatestOnBranch branch\n    else fail $ \"Invalid git branch name: \" ++ MT.unpack branch\n  where\n    -- Characters allowed in branch names\n    -- Note: We're permissive in parsing, strict in validation\n    isBranchChar c =\n      c > '\\x1F' -- No ASCII control characters (0x00-0x1F)\n        && c /= ' '\n        && c /= '~'\n        && c /= '^'\n        && c /= ':'\n        && c /= '?'\n        && c /= '*'\n        && c /= '['\n        && c /= '\\\\'\n        && c /= '\\DEL' -- Also exclude DEL (0x7F)\n\n    -- Comprehensive validation according to git-check-ref-format\n    isValidBranchName branchName =\n      not (MT.null branchName) -- Must have content\n        && branchName /= \"@\" -- Cannot be just '@'\n        && not (MT.isPrefixOf \"/\" branchName) -- Cannot start with /\n        && not (MT.isSuffixOf \"/\" branchName) -- Cannot end with /\n        && not (MT.isSuffixOf \".\" branchName) -- Cannot end with .\n        && not (MT.isSuffixOf \".lock\" branchName) -- Cannot end with .lock\n        && not (\"..\" `MT.isInfixOf` branchName) -- No consecutive dots\n        && not (\"@{\" `MT.isInfixOf` branchName) -- No reflog syntax\n        && not (\"//\" `MT.isInfixOf` branchName) -- No consecutive slashes\n        && not (hasComponentStartingWithDot branchName) -- Components can't start with .\n        && not (MT.any isInvalidChar branchName) -- Final safety check\n\n    -- Check if any path component starts with a dot\n    hasComponentStartingWithDot txt =\n      let components = MT.splitOn \"/\" txt\n       in any (MT.isPrefixOf \".\") components\n\n    -- Characters that should never appear (belt-and-suspenders check)\n    isInvalidChar c =\n      c <= '\\x1F' -- Control characters\n        || c == '\\DEL'\n        || c `elem` (\" ~^:?*[\\\\\" :: String)\n\n-- }}}\n\n-- | Try to parse a module string as a registry-resolvable module.\n-- Bare names like \"foo\" -> (coreorg, \"foo\").\n-- Namespaced names like \"user/foo\" -> (\"user\", \"foo\").\n-- Returns Nothing for local paths, explicit remote sources, or git refs.\ntryParseRegistryModule :: Text -> Text -> Maybe (Text, Text)\ntryParseRegistryModule coreorg modstr\n  -- Reject anything with remote source prefixes, git ref selectors, or local paths\n  | MT.any (\\c -> c == ':' || c == '@' || c == '.' || c == '~') modstr = Nothing\n  -- Namespaced: \"owner/name\"\n  | \"/\" `MT.isInfixOf` modstr =\n      case MT.splitOn \"/\" modstr of\n        [owner, name] | isValidSegment owner && isValidSegment name -> Just (owner, name)\n        _ -> Nothing\n  -- Bare name: resolve to core org\n  | isValidSegment modstr = Just (coreorg, modstr)\n  | otherwise = Nothing\n  where\n    isValidSegment t =\n      not (MT.null t)\n        && DC.isAlphaNum (MT.head t)\n        && MT.all (\\c -> DC.isAlphaNum c || c == '-') t\n\n-- | Install a module from the morloc registry by downloading its tarball.\ninstallFromRegistry :: Text -> Text -> Text -> FilePath -> IO ()\ninstallFromRegistry registryUrl owner name targetDir = do\n  -- Get latest version metadata\n  let metaUrl = MT.unpack registryUrl <> \"/api/registry/\" <> MT.unpack owner <> \"/\" <> MT.unpack name\n  metaReq <- HTTP.parseRequest metaUrl\n  metaResp <- HTTP.httpLBS metaReq\n  let metaStatus = HTTP.getResponseStatusCode metaResp\n  when (metaStatus /= 200) $\n    ioError . userError $\n      \"Registry lookup failed for \" <> MT.unpack owner <> \"/\" <> MT.unpack name\n        <> \" (HTTP \" <> show metaStatus <> \")\"\n\n  -- Extract latest version from the response\n  let metaBody = HTTP.getResponseBody metaResp\n  latestVersion <- case Aeson.decode metaBody of\n    Just (Aeson.Object obj) -> case KM.lookup \"versions\" obj of\n      Just (Aeson.Array arr) | not (V.null arr) -> case V.head arr of\n        Aeson.Object vobj -> case KM.lookup \"version\" vobj of\n          Just (Aeson.String v) -> return v\n          _ -> ioError $ userError \"Could not parse version from registry response\"\n        _ -> ioError $ userError \"Could not parse version from registry response\"\n      _ -> ioError $ userError \"No versions found for module\"\n    _ -> ioError $ userError \"Could not parse registry response\"\n\n  -- Download tarball\n  let tarUrl = metaUrl <> \"/\" <> MT.unpack latestVersion <> \"/tarball\"\n  tarReq <- HTTP.parseRequest tarUrl\n  tarResp <- HTTP.httpLBS tarReq\n  let tarStatus = HTTP.getResponseStatusCode tarResp\n  when (tarStatus /= 200) $\n    ioError . userError $\n      \"Tarball download failed for \" <> MT.unpack owner <> \"/\" <> MT.unpack name\n        <> \"@\" <> MT.unpack latestVersion <> \" (HTTP \" <> show tarStatus <> \")\"\n\n  let tarball = HTTP.getResponseBody tarResp\n\n  -- Extract tarball to target directory\n  createDirectoryIfMissing True targetDir\n  let tarballPath = targetDir <> \".tar.gz\"\n  BL.writeFile tarballPath tarball\n\n  -- Verify SHA-256 if provided by the server\n  case HTTP.getResponseHeader \"X-Checksum-Sha256\" tarResp of\n    (expectedHash:_) -> do\n      output <- readProcess \"sha256sum\" [tarballPath] \"\"\n      let actualHash = takeWhile (/= ' ') output\n          expected = BS8.unpack expectedHash\n      when (actualHash /= expected) $ do\n        removeFile tarballPath\n        ioError . userError $\n          \"SHA-256 mismatch for \" <> MT.unpack owner <> \"/\" <> MT.unpack name\n    [] -> return ()\n\n  readProcess \"tar\" [\"xzf\", tarballPath, \"-C\", targetDir] \"\"\n  removeFile tarballPath\n\n-- {{{ install from module source (IO-level helpers)\n\n-- | Install from a local source (pure IO, no MorlocMonad)\ninstallLocalIO ::\n  FilePath -> Maybe GitSnapshotSelector -> Text -> IO ()\ninstallLocalIO targetDir maySelector modulePath = do\n  sourceDir <- MS.makeAbsolute . MS.dropTrailingPathSeparator . MT.unpack $ modulePath\n\n  sourceExists <- doesDirectoryExist sourceDir\n  unless sourceExists $\n    ioError $ userError $\n      \"Source directory does not exist: \" ++ sourceDir\n\n  let gitDir = sourceDir </> \".git\"\n  isGitRepo <- doesDirectoryExist gitDir\n\n  if isGitRepo\n    then installLocalGitRepoIO sourceDir targetDir (fromMaybe LatestDefaultBranch maySelector)\n    else Install.copyAllFiltered sourceDir targetDir\n\ninstallLocalGitRepoIO :: FilePath -> FilePath -> GitSnapshotSelector -> IO ()\ninstallLocalGitRepoIO sourceDir targetDir selector = do\n  case selector of\n    LatestDefaultBranch ->\n      -- Copy the working tree, filtering out .git and ignored files\n      Install.copyAllFiltered sourceDir targetDir\n    LatestOnBranch branch -> do\n      callProcess \"git\" [\"clone\", \"-q\", sourceDir, targetDir]\n      callProcess \"git\" [\"-C\", targetDir, \"checkout\", \"refs/heads/\" ++ MT.unpack branch]\n    CommitHash hash -> do\n      callProcess \"git\" [\"clone\", \"-q\", sourceDir, targetDir]\n      callProcess \"git\" [\"-C\", targetDir, \"checkout\", \"--detach\", MT.unpack hash]\n    ReleaseTag tag -> do\n      callProcess \"git\" [\"clone\", \"-q\", sourceDir, targetDir]\n      callProcess \"git\" [\"-C\", targetDir, \"checkout\", \"refs/tags/\" ++ MT.unpack tag]\n\n  -- Remove .git and ignored files from installed copy\n  Install.cleanIgnoredFiles targetDir\n\n-- | Install from a remote git source (pure IO)\ninstallRemoteIO :: GitProtocol -> FilePath -> GitRemote -> IO ()\ninstallRemoteIO gitprot targetDir remote = do\n  let gitUrl = buildGitUrl gitprot remote\n  callProcess \"git\" [\"clone\", \"-q\", gitUrl, targetDir]\n  checkoutRefIO targetDir (gitReference remote)\n  Install.cleanIgnoredFiles targetDir\n\n-- | Build a git URL from protocol and remote info\nbuildGitUrl :: GitProtocol -> GitRemote -> String\nbuildGitUrl protocol remote =\n  let username = MT.unpack $ gitUsername remote\n      reponame = MT.unpack $ gitReponame remote\n      source = gitRemoteSource remote\n      baseUrl = getBaseUrl source\n   in case protocol of\n        HttpsProtocol -> \"https://\" ++ baseUrl ++ \"/\" ++ username ++ \"/\" ++ reponame\n        SshProtocol -> \"git@\" ++ baseUrl ++ \":\" ++ username ++ \"/\" ++ reponame ++ \".git\"\n\n-- | Get base URL for each remote source\ngetBaseUrl :: RemoteSource -> String\ngetBaseUrl RemoteGithub = \"github.com\"\ngetBaseUrl RemoteGitlab = \"gitlab.com\"\ngetBaseUrl RemoteBitbucket = \"bitbucket.org\"\ngetBaseUrl RemoteCodeberg = \"codeberg.org\"\ngetBaseUrl RemoteAzure = \"dev.azure.com\"\n\n-- | Checkout a specific git reference (pure IO)\ncheckoutRefIO :: FilePath -> GitSnapshotSelector -> IO ()\ncheckoutRefIO targetDir selector = case selector of\n  LatestDefaultBranch -> return ()\n  LatestOnBranch branch -> do\n    callProcess \"git\" [\"-C\", targetDir, \"checkout\", MT.unpack branch]\n    callProcess \"git\" [\"-C\", targetDir, \"pull\"]\n  CommitHash hash ->\n    callProcess \"git\" [\"-C\", targetDir, \"checkout\", MT.unpack hash]\n  ReleaseTag tag ->\n    callProcess \"git\" [\"-C\", targetDir, \"checkout\", \"tags/\" ++ MT.unpack tag]\n\n-- }}}\n"
  },
  {
    "path": "library/Morloc/Monad.hs",
    "content": "{-# LANGUAGE FlexibleContexts #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Monad\nDescription : Compiler monad runner and accessors\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\n'MorlocMonad' is the main effect stack used throughout the compiler:\n\n@Reader Config (Except MorlocError (Writer [Text] (State MorlocState IO)))@\n\nThis module provides the runner ('runMorlocMonad'), error formatting\n('makeMorlocError' with source snippets), state accessors (counter,\ndepth, metadata lookups), logging ('say'\\/'sayV'\\/'sayVV'), system call\nwrappers, and an independent 'Index' monad for local re-indexing passes.\n-}\nmodule Morloc.Monad\n  ( MorlocReturn\n  , runMorlocMonad\n  , writeMorlocReturn\n  , makeMorlocError\n  , runCommand\n  , runCommandWith\n  , logFile\n  , logFileWith\n  , readLang\n\n    -- * re-exports\n  , module Control.Monad.Trans\n  , module Control.Monad.Except\n  , module Control.Monad.Reader\n  , module Control.Monad.State\n  , module Control.Monad.Writer\n  , module Control.Monad.Identity\n\n    -- * reusable counter\n  , startCounter\n  , getCounter\n  , getCounterWithPos\n  , setCounter\n  , takeFromCounter\n\n    -- * metadata accessors\n  , metaSources\n  , metaName\n  , getDocStrings\n  , getConcreteScope\n  , getGeneralScope\n  , getConcreteUniversalScope\n  , getGeneralUniversalScope\n\n    -- * handling tree depth\n  , incDepth\n  , getDepth\n  , decDepth\n  , setDepth\n\n    -- * messages\n  , say\n  , sayV\n  , sayVV\n  , sayVVV\n\n    -- * throwing errors\n  , throwSystemError\n  , throwSourcedError\n  , throwUnificationError\n\n    -- * naming helpers\n  , getModuleName\n  , getOutfileName\n\n    -- * Indexing monad\n  , Index\n  , IndexState (..)\n  , runIndex\n  , newIndex\n  , getIndex\n  , setIndex\n  ) where\n\nimport Control.Monad.Except\nimport Control.Monad.Identity\nimport Control.Monad.Reader\nimport Control.Monad.State\nimport Control.Monad.Trans\nimport Control.Monad.Writer\nimport Data.Text (Text)\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.GMap as GMap\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.DataFiles as DF\nimport qualified Morloc.LangRegistry as LR\nimport qualified Morloc.Language as ML\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport Morloc.Namespace.Type\nimport qualified Morloc.System as MS\nimport qualified System.Exit as SE\nimport System.IO (stderr)\nimport qualified System.Process as SP\n\nrunMorlocMonad ::\n  Maybe Path -> Int -> Config -> BuildConfig -> MorlocMonad a -> IO (MorlocReturn a)\nrunMorlocMonad outfile v config buildConfig ev = do\n  let langFiles = [(n, DF.embededFileText f) | (n, f) <- DF.langRegistryFiles]\n      languagesText = DF.embededFileText DF.languagesYaml\n      registry = case LR.buildDefaultRegistry langFiles languagesText of\n        Right r -> r\n        Left err -> error $ \"Failed to build language registry: \" ++ err\n      state0 = emptyState outfile v\n      state1 =\n        state0\n          { stateBuildConfig = buildConfig\n          , stateLangRegistry = registry\n          }\n  runStateT (runWriterT (runExceptT (runReaderT ev config))) state1\n\nemptyState :: Maybe Path -> Int -> MorlocState\nemptyState path v =\n  defaultValue\n    { stateVerbosity = v\n    , stateOutfile = path\n    }\n\nstartCounter :: MorlocMonad ()\nstartCounter = do\n  s <- get\n  put $ s {stateCounter = 0}\n\ngetCounter :: MorlocMonad Int\ngetCounter = do\n  s <- get\n  let i = stateCounter s\n  put $ s {stateCounter = stateCounter s + 1}\n  return i\n\n-- | Create a new index that inherits the source position of a parent index\ngetCounterWithPos :: Int -> MorlocMonad Int\ngetCounterWithPos parentIdx = do\n  i <- getCounter\n  s <- get\n  case Map.lookup parentIdx (stateSourceMap s) of\n    Just loc -> put $ s {stateSourceMap = Map.insert i loc (stateSourceMap s)}\n    Nothing -> return ()\n  return i\n\ntakeFromCounter :: Int -> MorlocMonad [Int]\ntakeFromCounter 0 = return []\ntakeFromCounter i = do\n  x <- getCounter\n  xs <- takeFromCounter (i - 1)\n  return (x : xs)\n\nsetCounter :: Int -> MorlocMonad ()\nsetCounter i = do\n  s <- get\n  put $ s {stateCounter = i}\n  return ()\n\nincDepth :: MorlocMonad Int\nincDepth = do\n  s <- get\n  let i = stateDepth s + 1\n  put $ s {stateDepth = i}\n  return i\n\ngetDepth :: MorlocMonad Int\ngetDepth = gets stateDepth\n\ndecDepth :: MorlocMonad Int\ndecDepth = do\n  s <- get\n  let i = stateDepth s - 1\n  put $ s {stateDepth = i}\n  return i\n\nsetDepth :: Int -> MorlocMonad ()\nsetDepth i = do\n  s <- get\n  put $ s {stateDepth = i}\n  return ()\n\nwriteMorlocReturn :: MorlocReturn a -> IO Bool\nwriteMorlocReturn ((Left err', msgs), st) = do\n  writeMessages\n  MT.hPutStrLn stderr (render $ makeMorlocError st err')\n  return False\n  where\n    writeMessages\n      | length msgs > 0 = MT.hPutStrLn stderr (MT.unlines msgs)\n      | otherwise = return ()\nwriteMorlocReturn ((Right _, _), _) = return True\n\nmakeMorlocError :: MorlocState -> MorlocError -> MDoc\nmakeMorlocError st (SourcedError i msg) =\n  case Map.lookup i (stateSourceMap st) of\n    Just loc -> pretty loc <> \": error:\" <> line <> msg <> snippet st loc\n    Nothing -> \"Compiler bug, broken index\" <+> pretty i <+> \"with attached error:\" <+> msg\nmakeMorlocError _ (SystemError msg) = msg\nmakeMorlocError st (UnificationError lhs rhs context msg) =\n  case (Map.lookup lhs srcMap, Map.lookup rhs srcMap, Map.lookup context srcMap) of\n    (Just lhsLoc, rhsLoc, contextLoc) ->\n      \"Unification error:\" <+> msg\n        <> line\n        <> \"Found while unifying\" <+> maybe mempty pretty contextLoc\n        <> line\n        <> \"With values\"\n        <> line\n        <> snippet st lhsLoc\n        <> maybe mempty (\\l -> \"and\" <> line <> snippet st l) rhsLoc\n    _ -> \"Compiler bug, broken indices\" <+> pretty (lhs, rhs, context) <+> \"with attached error:\" <+> msg\n  where\n    srcMap = stateSourceMap st\n\n{- | Render a source code snippet with error location markers.\nFor single-line spans: ^~~~^ underline from start to end column.\nFor multi-line spans: Elm-style vertical bar in the gutter with ^ at start and end.\nSpans > 10 lines are truncated to first 5 and last 5 lines.\n-}\nsnippet :: MorlocState -> SrcLoc -> MDoc\nsnippet st (SrcLoc path ln col endLn endCol) =\n  case path >>= \\p -> Map.lookup p (stateSourceText st) of\n    Nothing -> mempty\n    Just src ->\n      let srcLines = MT.lines src\n          n = length srcLines\n       in if n == 0 || ln < 1\n            then mempty\n            else\n              let startLine = min ln n\n                  finishLine = min endLn n\n                  gw = length (show finishLine)\n                  gutter = pretty (MT.replicate gw \" \")\n                  fmtLineNum num =\n                    let s = MT.show' num\n                     in pretty (MT.replicate (gw - MT.length s) \" \") <> pretty s\n               in if startLine == finishLine\n                    then snippetSingleLine srcLines startLine col endCol gutter fmtLineNum\n                    else snippetMultiLine srcLines startLine col finishLine endCol gutter fmtLineNum\n  where\n    snippetSingleLine srcLines lineNum startCol eCol gutter fmtNum =\n      let errLine = srcLines !! (lineNum - 1)\n          sc = max 1 (min startCol (MT.length errLine + 1))\n          ec = max sc (min eCol (MT.length errLine + 1))\n          pointer\n            | sc == ec = pretty (MT.replicate (sc - 1) \" \") <> \"^\"\n            | ec > sc + 1 =\n                pretty (MT.replicate (sc - 1) \" \")\n                  <> \"^\"\n                  <> pretty (MT.replicate (ec - sc - 2) \"~\")\n                  <> \"^\"\n            | otherwise = pretty (MT.replicate (sc - 1) \" \") <> \"^^\"\n       in line\n            <> gutter <+> \"|\"\n            <> line\n            <> fmtNum lineNum <+> \"|\" <+> pretty errLine\n            <> line\n            <> gutter <+> \"|\" <+> pointer\n            <> line\n\n    snippetMultiLine srcLines startLine startCol finishLine eCol gutter fmtNum =\n      let totalLines = finishLine - startLine + 1\n          lineNums\n            | totalLines <= 10 = [startLine .. finishLine]\n            | otherwise = [startLine .. startLine + 4] ++ [finishLine - 4 .. finishLine]\n          needsElision = totalLines > 10\n          elisionPoint = startLine + 5\n\n          renderLine num =\n            let srcLine = srcLines !! (num - 1)\n             in fmtNum num <+> \"| |\" <+> pretty srcLine\n\n          renderStartPointer =\n            let sc = max 1 startCol\n             in gutter <+> \"| |\" <+> pretty (MT.replicate (sc - 1) \" \") <> \"^\"\n\n          renderEndPointer =\n            let endLine = srcLines !! (finishLine - 1)\n                ec = max 1 (min eCol (MT.length endLine + 1))\n             in gutter <+> \"| |\" <+> pretty (MT.replicate (ec - 1) \" \") <> \"^\"\n\n          elisionLine = gutter <+> \"  ...\"\n\n          renderLines [] = mempty\n          renderLines (num : rest)\n            | needsElision && num == elisionPoint =\n                elisionLine <> line <> renderLines (dropWhile (< finishLine - 4) rest)\n            | num == startLine =\n                renderLine num\n                  <> line\n                  <> renderStartPointer\n                  <> line\n                  <> renderLines rest\n            | otherwise =\n                renderLine num\n                  <> line\n                  <> renderLines rest\n       in line\n            <> gutter <+> \"|\"\n            <> line\n            <> renderLines lineNums\n            <> renderEndPointer\n            <> line\n\nthrowSystemError :: (MonadError MorlocError m) => MDoc -> m a\nthrowSystemError = throwError . SystemError\n\nthrowSourcedError :: (MonadError MorlocError m) => Int -> MDoc -> m a\nthrowSourcedError i = throwError . SourcedError i\n\nthrowUnificationError :: (MonadError MorlocError m) => Int -> Int -> Int -> MDoc -> m a\nthrowUnificationError lhs rhs context msg = throwError $ UnificationError lhs rhs context msg\n\nsystemCallError :: Text -> Text -> String -> MorlocMonad a\nsystemCallError cmd loc msg =\n  throwSystemError $\n    \"System call failed at (\"\n      <> pretty loc\n      <> \"):\\n\"\n      <> \" cmd> \"\n      <> pretty cmd\n      <> \"\\n\"\n      <> \" msg>\\n\"\n      <> pretty msg\n\n-- | Execute a system call\nrunCommand ::\n  Text -> -- function making the call (used only in debugging messages on error)\n  Text -> -- system command\n  MorlocMonad ()\nrunCommand loc cmd = do\n  liftIO $ MT.hPutStrLn stderr (\"$ \" <> cmd)\n  (exitCode, _, err') <-\n    liftIO $ SP.readCreateProcessWithExitCode (SP.shell . MT.unpack $ cmd) []\n  case exitCode of\n    SE.ExitSuccess -> tell [MT.pack err']\n    _ -> systemCallError cmd loc err'\n\nsayIf :: Int -> MDoc -> MorlocMonad ()\nsayIf i d = do\n  verbosity <- gets stateVerbosity\n  when (verbosity >= i) $ liftIO $ MT.hPutStrLn stderr (render d)\n\n-- print anytime\nsay :: MDoc -> MorlocMonad ()\nsay = sayIf 0\n\n-- print for verbose level 1\n-- messages that may be of interest to the user\nsayV :: MDoc -> MorlocMonad ()\nsayV = sayIf 1\n\n-- print for verbose level 2\n-- messages for the programmer\nsayVV :: MDoc -> MorlocMonad ()\nsayVV = sayIf 2\n\n-- print for verbose level 3\n-- really boring shit that probably no one wants to ever hear, but we spent a\n-- lot of time working on it and don't want to delete it.\nsayVVV :: MDoc -> MorlocMonad ()\nsayVVV = sayIf 3\n\n-- | Execute a system call and return a function of the STDOUT\nrunCommandWith ::\n  Text -> -- function making the call (used only in debugging messages on error)\n\n  -- | A function of the output (run on success)\n  (Text -> a) ->\n  -- | System command\n  Text ->\n  MorlocMonad a\nrunCommandWith loc f cmd = do\n  liftIO $ MT.hPutStrLn stderr (\"$ \" <> cmd)\n  (exitCode, out, err') <-\n    liftIO $ SP.readCreateProcessWithExitCode (SP.shell . MT.unpack $ cmd) []\n  case exitCode of\n    SE.ExitSuccess -> return $ f (MT.pack out)\n    _ -> systemCallError cmd loc err'\n\n-- | Write a object to a file in the Morloc temporary directory\nlogFile ::\n  (Show a) =>\n  -- | A filename\n  String ->\n  a ->\n  MorlocMonad a\nlogFile s m = do\n  tmpdir <- asks configTmpDir\n  liftIO $ MS.createDirectoryIfMissing True tmpdir\n  let path = MS.combine tmpdir s\n  liftIO $ MT.writeFile path (MT.pretty m)\n  return m\n\n-- | Write a object to a file in the Morloc temporary directory\nlogFileWith ::\n  (Show b) =>\n  -- | A filename\n  String ->\n  -- | A function to convert a to something presentable\n  (a -> b) ->\n  a ->\n  MorlocMonad a\nlogFileWith s f m = do\n  tmpdir <- asks configTmpDir\n  liftIO $ MS.createDirectoryIfMissing True tmpdir\n  let path = MS.combine tmpdir s\n  liftIO $ MT.writeFile path (MT.pretty (f m))\n  return m\n\n-- | Look up a language by name or alias using the registry.\nreadLang :: Text -> MorlocMonad Lang\nreadLang langStr = do\n  reg <- gets stateLangRegistry\n  case LR.lookupByAlias langStr reg of\n    Just (name, entry) -> return (ML.makeLang name (LR.lreExtension entry))\n    Nothing -> throwSystemError $ \"Unknown language\" <> squotes (pretty langStr)\n\n{- | Return sources for constructing an object. These are used by `NamE NamObject`\nexpressions. Sources here includes some that are not linked to signatures, such\nas language-specific imports of object constructors. So this supersets the\nstateSignatures field's sources.\n-}\nmetaSources :: Int -> MorlocMonad [Source]\nmetaSources i = do\n  s <- gets stateSources\n  case GMap.lookup i s of\n    GMapNoFst -> return []\n    GMapNoSnd -> error \"Compiler bug: Internal GMap key missing\"\n    (GMapJust srcs) -> return srcs\n\n----- TODO: metaName should no longer be required - remove\n\n{- | The name of a morloc composition. These names are stored in the monad\nafter they are resolved away. For example in:\n  import math\n  export foo\n  bar x y = add x (inc y)\n  foo x = add (bar x 5) 1\n`foo` and `bar` are morloc composition. `foo` will be resolved to\n  add (add x (inc 5) 1\nThe terms \"foo\" and \"bar\" have disappeared. They aren't technically needed\nanymore. However, the nexus needs a subcommand name to give the user for\ncalling \"foo\". In the generated code and in error messages, it is also nice\nto keep the label \"bar\" attached to the second `add` function. `metaName`\ncan retrieve these names based on the index of the CallS expressions that\nwrap the two `add` functions.\n\nThe name is linked to the SAnno general data structure.\n-}\nmetaName :: Int -> MorlocMonad (Maybe EVar)\nmetaName i = gets (Map.lookup i . stateName)\n\n-- Get the docstrings associated with an item\ngetDocStrings ::\n  Int -> -- expression index\n  MorlocMonad ArgDoc\ngetDocStrings i = do\n  sgmap <- gets stateSignatures\n  case GMap.lookup i sgmap of\n    (GMapJust (Monomorphic (TermTypes (Just e) _ _))) -> return $ edocs e\n    (GMapJust (Polymorphic _ _ e _)) -> return $ edocs e\n    GMapNoSnd -> error \"Compiler bug: Internal GMap key missing\"\n    _ -> error \"Compiler bug: No entry found for index in stateSignatures\"\n\ngetConcreteScope :: Int -> Lang -> MorlocMonad Scope\ngetConcreteScope i lang = do\n  p <- gets stateConcreteTypedefs\n  return $ case GMap.lookup i p of\n    (GMapJust langmap) -> case Map.lookup lang langmap of\n      (Just scope) -> scope\n      Nothing -> Map.empty\n    _ -> Map.empty\n\ngetGeneralScope :: Int -> MorlocMonad Scope\ngetGeneralScope i = do\n  p <- gets stateGeneralTypedefs\n  return $ case GMap.lookup i p of\n    (GMapJust scope) -> scope\n    _ -> Map.empty\n\ngetConcreteUniversalScope :: Lang -> MorlocMonad Scope\ngetConcreteUniversalScope lang = do\n  scopeMap <- gets stateUniversalConcreteTypedefs\n  case Map.lookup lang scopeMap of\n    (Just scope) -> return scope\n    Nothing -> return Map.empty\n\ngetGeneralUniversalScope :: MorlocMonad Scope\ngetGeneralUniversalScope = gets stateUniversalGeneralTypedefs\n\n{- | Get the module name from state, falling back to \"nexus\" if unset.\nThis is the canonical name for pool subdirectories and manifest references.\n-}\ngetModuleName :: MorlocMonad String\ngetModuleName = do\n  st <- get\n  return $ case stateModuleName st of\n    Just (MV n) -> MT.unpack n\n    Nothing -> \"nexus\"\n\n{- | Get the output file name: the -o value if given, else the module name.\nThis controls only the wrapper script filename.\n-}\ngetOutfileName :: MorlocMonad String\ngetOutfileName = do\n  st <- get\n  case stateOutfile st of\n    Just name -> return name\n    Nothing -> getModuleName\n\nnewtype IndexState = IndexState {index :: Int}\ntype Index a = StateT IndexState Identity a\n\nrunIndex :: Int -> Index a -> a\nrunIndex i x = evalState x (IndexState i)\n\nnewIndex :: Index Int\nnewIndex = do\n  s <- get\n  let i = index s\n  put $ s {index = index s + 1}\n  return i\n\ngetIndex :: Index Int\ngetIndex = gets index\n\nsetIndex :: Int -> Index ()\nsetIndex i = do\n  s <- get\n  put $ s {index = i}\n  return ()\n"
  },
  {
    "path": "library/Morloc/Namespace/Expr.hs",
    "content": "{-# LANGUAGE DeriveGeneric #-}\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Namespace.Expr\nDescription : Frontend AST, source/config types, post-typecheck tree\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nExpression types at three stages of the compiler pipeline:\n\n* 'Expr' \\/ 'ExprI' -- parser output (untyped AST with integer indices)\n* 'E' -- intermediate typed tree used during typechecking\n* 'AnnoS' \\/ 'ExprS' -- post-typecheck annotated tree passed to code generation\n\nAlso defines 'Source' (foreign function binding), config types\n('ManifoldConfig', 'ModuleConfig', 'BuildConfig'), and mapping combinators\nover the 'AnnoS' tree.\n-}\nmodule Morloc.Namespace.Expr\n  ( -- * Source and config types\n    Source (..)\n  , RemoteResources (..)\n  , ManifoldConfig (..)\n  , ModuleConfig (..)\n  , BuildConfig (..)\n\n    -- * Mostly frontend expressions\n  , Symbol (..)\n  , AliasedSymbol (..)\n  , Signature (..)\n  , Typeclass (..)\n  , Selector (..)\n  , ungroup\n  , Pattern (..)\n  , Intrinsic (..)\n  , intrinsicName\n  , intrinsicArity\n  , parseIntrinsic\n  , Expr (..)\n  , ExprI (..)\n  , E (..)\n  , Lit (..)\n  , Import (..)\n  , Export (..)\n  , ExportGroup (..)\n  , Fixity (..)\n  , Associativity (..)\n\n    -- * Post-typechecking tree\n  , ExecutableExpr (..)\n  , AnnoS (..)\n  , ExprS (..)\n  , Coercion (..)\n  , applyCoercion\n  , unapplyCoercion\n  , ManyPoly (..)\n  , mapAnnoSM\n  , mapExprSM\n  , mapAnnoS\n  , mapExprS\n  , mapAnnoSC\n  , mapAnnoSCM\n  , mapAnnoSG\n  , mapAnnoSGM\n  , mapExprSC\n  , mapExprSCM\n  , mapExprSG\n  , mapExprSGM\n\n    -- * JSON helpers\n  , stripPrefixAndKebabCase\n  , convertToKebabCase\n  ) where\n\nimport Control.Monad.Identity (runIdentity)\nimport Data.Aeson (FromJSON (..))\nimport qualified Data.Aeson as Aeson\nimport Data.Aeson.Types (Options (..), defaultOptions)\nimport Data.Foldable (toList)\nimport qualified Data.Map as Map\nimport Data.Scientific (Scientific)\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport GHC.Generics (Generic)\nimport Morloc.Data.Doc\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.Type\n\n---- Source and config types\n\ndata Source\n  = Source\n  { srcName :: SrcName\n  , srcLang :: Lang\n  , srcPath :: Maybe Path\n  , srcAlias :: EVar\n  , srcLabel :: Maybe Label\n  , srcRsize :: [Int]\n  , srcNote :: [Text]\n  , srcInline :: !Bool\n  , srcOperator :: !Bool\n  }\n  deriving (Ord, Eq, Show)\n\ndata RemoteResources = RemoteResources\n  { remoteResourcesThreads :: Maybe Int\n  , remoteResourcesMemory :: Maybe Int\n  , remoteResourcesTime :: Maybe TimeInSeconds\n  , remoteResourcesGpus :: Maybe Int\n  }\n  deriving (Show, Ord, Eq, Generic)\n\ndata ManifoldConfig = ManifoldConfig\n  { manifoldConfigCache :: Maybe Bool\n  , manifoldConfigBenchmark :: Maybe Bool\n  , manifoldConfigRemote :: Maybe RemoteResources\n  }\n  deriving (Show, Ord, Eq, Generic)\n\ndata ModuleConfig = ModuleConfig\n  { moduleConfigDefaultGroup :: Maybe ManifoldConfig\n  , moduleConfigLabeledGroups :: Map.Map Text ManifoldConfig\n  }\n  deriving (Show, Generic)\n\ndata BuildConfig = BuildConfig\n  { buildConfigSlurmSupport :: Maybe Bool\n  , buildConfigSanitize :: Maybe Bool\n  }\n  deriving (Show, Generic)\n\n---- Expressions\n\ndata Symbol\n  = TypeSymbol TVar\n  | TermSymbol EVar\n  | ClassSymbol ClassName\n  deriving (Show, Ord, Eq)\n\ndata ExportGroup = ExportGroup\n  { exportGroupName :: !Text\n  , exportGroupDesc :: [Text]\n  , exportGroupMembers :: Set.Set (Int, Symbol)\n  }\n  deriving (Show, Ord, Eq)\n\ndata Export\n  = ExportMany (Set.Set (Int, Symbol)) [ExportGroup]\n  | ExportAll\n  deriving (Show, Ord, Eq)\n\ndata AliasedSymbol\n  = AliasedType TVar TVar\n  | AliasedTerm EVar EVar\n  | AliasedClass ClassName\n  deriving (Show, Ord, Eq)\n\ndata Signature = Signature EVar (Maybe Label) EType\n  deriving (Show, Ord, Eq)\n\ndata Typeclass a = Typeclass [Constraint] ClassName [TVar] [a]\n  deriving (Show, Ord, Eq)\n\ndata Selector\n  = SelectorKey (Text, Selector) [(Text, Selector)]\n  | SelectorIdx (Int, Selector) [(Int, Selector)]\n  | SelectorEnd\n  deriving (Show, Ord, Eq)\n\nungroup :: Selector -> [[Either Int Text]]\nungroup SelectorEnd = [[]]\nungroup (SelectorKey (k, SelectorEnd) []) = [[Right k]]\nungroup (SelectorIdx (i, SelectorEnd) []) = [[Left i]]\nungroup (SelectorKey x xs) = concat [map ((:) (Right k)) (ungroup s) | (k, s) <- (x : xs)]\nungroup (SelectorIdx x xs) = concat [map ((:) (Left i)) (ungroup s) | (i, s) <- (x : xs)]\n\ndata Pattern\n  = PatternText Text [Text]\n  | PatternStruct Selector\n  deriving (Show, Ord, Eq)\n\n-- | Compiler intrinsics: functions the compiler generates specialized code for.\ndata Intrinsic\n  = IntrSave      -- ^ @save   :: a -> Str -> {()}   -- voidstar format\n  | IntrSaveM     -- ^ @savem  :: a -> Str -> {()}   -- msgpack format\n  | IntrSaveJ     -- ^ @savej  :: a -> Str -> {()}   -- JSON format\n  | IntrLoad      -- ^ @load   :: Str -> {?a}        -- auto-detect format\n  | IntrHash      -- ^ @hash   :: a -> Str           -- xxhash, hex string\n  | IntrVersion   -- ^ @version :: Str               -- compiler version\n  | IntrCompiled  -- ^ @compiled :: Str              -- compile timestamp\n  | IntrLang      -- ^ @lang    :: Str               -- current pool language\n  | IntrSchema    -- ^ @schema  :: a -> Str          -- schema string\n  | IntrTypeof    -- ^ @typeof  :: a -> Str          -- concrete type name\n  | IntrShow      -- ^ @show   :: a -> Str           -- serialize to JSON string\n  | IntrRead      -- ^ @read   :: Str -> ?a          -- deserialize from JSON string\n  | IntrDatafile  -- ^ @datafile :: Str -> Str       -- resolve installed data file path\n  deriving (Show, Ord, Eq)\n\n-- | Map intrinsic to its canonical name\nintrinsicName :: Intrinsic -> Text\nintrinsicName IntrSave = \"save\"\nintrinsicName IntrSaveM = \"savem\"\nintrinsicName IntrSaveJ = \"savej\"\nintrinsicName IntrLoad = \"load\"\nintrinsicName IntrHash = \"hash\"\nintrinsicName IntrVersion = \"version\"\nintrinsicName IntrCompiled = \"compiled\"\nintrinsicName IntrLang = \"lang\"\nintrinsicName IntrSchema = \"schema\"\nintrinsicName IntrTypeof = \"typeof\"\nintrinsicName IntrShow = \"show\"\nintrinsicName IntrRead = \"read\"\nintrinsicName IntrDatafile = \"datafile\"\n\n-- | Parse a name to an intrinsic (Nothing if not a known intrinsic)\nparseIntrinsic :: Text -> Maybe Intrinsic\nparseIntrinsic \"save\" = Just IntrSave\nparseIntrinsic \"savem\" = Just IntrSaveM\nparseIntrinsic \"savej\" = Just IntrSaveJ\nparseIntrinsic \"load\" = Just IntrLoad\nparseIntrinsic \"hash\" = Just IntrHash\nparseIntrinsic \"version\" = Just IntrVersion\nparseIntrinsic \"compiled\" = Just IntrCompiled\nparseIntrinsic \"lang\" = Just IntrLang\nparseIntrinsic \"schema\" = Just IntrSchema\nparseIntrinsic \"typeof\" = Just IntrTypeof\nparseIntrinsic \"show\" = Just IntrShow\nparseIntrinsic \"read\" = Just IntrRead\nparseIntrinsic \"datafile\" = Just IntrDatafile\nparseIntrinsic _ = Nothing\n\n-- | Expected number of arguments for each intrinsic\nintrinsicArity :: Intrinsic -> Int\nintrinsicArity IntrSave = 2\nintrinsicArity IntrSaveM = 2\nintrinsicArity IntrSaveJ = 2\nintrinsicArity IntrLoad = 1\nintrinsicArity IntrHash = 1\nintrinsicArity IntrVersion = 0\nintrinsicArity IntrCompiled = 0\nintrinsicArity IntrLang = 0\nintrinsicArity IntrSchema = 1\nintrinsicArity IntrTypeof = 1\nintrinsicArity IntrShow = 1\nintrinsicArity IntrRead = 1\nintrinsicArity IntrDatafile = 1\n\ndata ExprI = ExprI Int Expr\n  deriving (Show, Ord, Eq)\n\ndata Expr\n  = ModE MVar [ExprI]\n  | ClsE (Typeclass Signature)\n  | IstE ClassName [TypeU] [ExprI]\n  | TypE ExprTypeE\n  | ImpE Import\n  | ExpE Export\n  | SrcE Source\n  | SigE Signature\n  | AssE EVar ExprI [ExprI]\n  | FixE Fixity\n  | BopE ExprI Int EVar ExprI\n  | UniE\n  | NullE\n  | VarE ManifoldConfig EVar\n  | HolE\n  | LstE [ExprI]\n  | TupE [ExprI]\n  | NamE [(Key, ExprI)]\n  | AppE ExprI [ExprI]\n  | LamE [EVar] ExprI\n  | AnnE ExprI TypeU\n  | LetE [(EVar, ExprI)] ExprI\n  | RealE Scientific\n  | IntE Integer\n  | LogE Bool\n  | StrE Text\n  | PatE Pattern\n  | IfE ExprI ExprI ExprI\n  | DoBlockE ExprI\n  | EvalE ExprI\n  | IntrinsicE Intrinsic [ExprI]\n  | ParenE !ExprI  -- ^ transient parenthesization wrapper (eliminated by handleBinops)\n  deriving (Show, Ord, Eq)\n\ndata Import\n  = Import\n  { importModuleName :: MVar\n  , importInclude :: Maybe [AliasedSymbol]\n  , importExclude :: [Symbol]\n  , importNamespace :: Maybe EVar\n  }\n  deriving (Ord, Eq, Show)\n\ndata Associativity\n  = InfixL\n  | InfixR\n  | InfixN\n  deriving (Show, Ord, Eq, Enum)\n\ndata Fixity = Fixity\n  { fixityAssoc :: Associativity\n  , fixityPrecedence :: Int\n  , fixityOperators :: [EVar]\n  }\n  deriving (Show, Ord, Eq)\n\ndata Lit\n  = MNum Scientific\n  | MInt Integer\n  | MLog Bool\n  | MStr Text\n  | MUni\n  | MNull\n  deriving (Ord, Eq, Show)\n\ndata E\n  = BndP (Indexed Type) EVar\n  | VarP (Indexed Type) EVar [E]\n  | AppP (Indexed Type) E [E]\n  | LamP (Indexed Type) [EVar] E\n  | LstP (Indexed Type) [E]\n  | TupP (Indexed Type) [E]\n  | NamP (Indexed Type) [(Key, E)]\n  | LitP (Indexed Type) Lit\n  | SrcP (Indexed Type) Source\n  | PatP (Indexed Type) Selector\n  | IfP (Indexed Type) E E E\n  | DoBlockP (Indexed Type) E\n  | EvalP (Indexed Type) E\n  | CoerceP Coercion (Indexed Type) E\n  | IntrinsicP (Indexed Type) Intrinsic [E]\n  deriving (Ord, Eq, Show)\n\n-- | Coercion tag for implicit type conversions inserted by the typechecker.\n-- Extensible: future coercions (e.g., numeric widening) add constructors here.\ndata Coercion\n  = CoerceToOptional\n  | CoerceToEffect (Set.Set EffectLabel)\n  deriving (Show, Eq, Ord)\n\n-- | Apply a coercion to a type, returning the coerced type.\napplyCoercion :: Coercion -> TypeU -> TypeU\napplyCoercion CoerceToOptional t = OptionalU t\napplyCoercion (CoerceToEffect effs) t = EffectU (EffectSet effs) t\n\n-- | Invert a coercion on a resolved Type.\nunapplyCoercion :: Coercion -> Type -> Type\nunapplyCoercion CoerceToOptional (OptionalT t) = t\nunapplyCoercion CoerceToOptional t = t  -- defensive fallback\nunapplyCoercion (CoerceToEffect _) (EffectT _ t) = t\nunapplyCoercion (CoerceToEffect _) t = t  -- defensive fallback\n\ndata ExecutableExpr = SrcCall Source | PatCall Pattern\n  deriving (Ord, Eq, Show)\n\ndata AnnoS g f c = AnnoS g c (ExprS g f c)\n\ndata ExprS g f c\n  = UniS\n  | NullS\n  | BndS EVar\n  | VarS EVar (f (AnnoS g f c))\n  | AppS (AnnoS g f c) [AnnoS g f c]\n  | LamS [EVar] (AnnoS g f c)\n  | LstS [AnnoS g f c]\n  | TupS [AnnoS g f c]\n  | NamS [(Key, AnnoS g f c)]\n  | RealS Scientific\n  | IntS Integer\n  | LogS Bool\n  | StrS Text\n  | ExeS ExecutableExpr\n  | LetS EVar (AnnoS g f c) (AnnoS g f c)\n  | LetBndS EVar\n  | CallS EVar  -- recursive call back-edge\n  | IfS (AnnoS g f c) (AnnoS g f c) (AnnoS g f c)\n  | DoBlockS (AnnoS g f c)\n  | EvalS (AnnoS g f c)\n  | CoerceS Coercion (AnnoS g f c)\n  | IntrinsicS Intrinsic [AnnoS g f c]\n\ndata ManyPoly a = MonomorphicExpr (Maybe EType) [a] | PolymorphicExpr ClassName EVar EType [(EType, [a])]\n  deriving (Show, Eq, Ord)\n\n---- Class instances\n\ninstance HasOneLanguage Source where\n  langOf s = Just (srcLang s)\n  langOf' s = srcLang s\n\ninstance Functor ManyPoly where\n  fmap f (MonomorphicExpr t xs) = MonomorphicExpr t (map f xs)\n  fmap f (PolymorphicExpr cls v t xs) = PolymorphicExpr cls v t (map (second (map f)) xs)\n\ninstance Traversable ManyPoly where\n  traverse f (MonomorphicExpr t xs) = MonomorphicExpr t <$> traverse f xs\n  traverse f (PolymorphicExpr cls v t xs) = PolymorphicExpr cls v t <$> traverse f2 xs\n    where\n      f2 (t', x) = (,) t' <$> traverse f x\n\ninstance Foldable ManyPoly where\n  foldr f b (MonomorphicExpr _ xs) = foldr f b xs\n  foldr f b (PolymorphicExpr _ _ _ (concatMap snd -> xs)) = foldr f b xs\n\ninstance Defaultable ModuleConfig where\n  defaultValue =\n    ModuleConfig\n      { moduleConfigDefaultGroup = Nothing\n      , moduleConfigLabeledGroups = Map.empty\n      }\n\ninstance Defaultable BuildConfig where\n  defaultValue =\n    BuildConfig\n      { buildConfigSlurmSupport = Nothing\n      , buildConfigSanitize = Nothing\n      }\n\ninstance Defaultable RemoteResources where\n  defaultValue =\n    RemoteResources\n      { remoteResourcesThreads = Nothing\n      , remoteResourcesMemory = Nothing\n      , remoteResourcesTime = Nothing\n      , remoteResourcesGpus = Nothing\n      }\n\ninstance Defaultable ManifoldConfig where\n  defaultValue =\n    ManifoldConfig\n      { manifoldConfigCache = Just False\n      , manifoldConfigBenchmark = Just False\n      , manifoldConfigRemote = Nothing\n      }\n\ninstance FromJSON ModuleConfig where\n  parseJSON =\n    Aeson.genericParseJSON $\n      defaultOptions {fieldLabelModifier = stripPrefixAndKebabCase \"moduleConfig\"}\n\ninstance FromJSON ManifoldConfig where\n  parseJSON =\n    Aeson.genericParseJSON $\n      defaultOptions {fieldLabelModifier = stripPrefixAndKebabCase \"manifoldConfig\"}\n\ninstance FromJSON RemoteResources where\n  parseJSON =\n    Aeson.genericParseJSON $\n      defaultOptions {fieldLabelModifier = stripPrefixAndKebabCase \"remoteResources\"}\n\ninstance FromJSON BuildConfig where\n  parseJSON =\n    Aeson.genericParseJSON $\n      defaultOptions {fieldLabelModifier = stripPrefixAndKebabCase \"buildConfig\"}\n\n---- JSON helpers\n\n-- Helper function to strip prefixes and convert to kebab-case\nstripPrefixAndKebabCase :: String -> String -> String\nstripPrefixAndKebabCase prefix str =\n  let stripped = drop (length prefix) str\n   in case stripped of\n        [] -> []\n        (x : xs) -> toLower x : convertToKebabCase xs\n\n-- Convert remaining characters to kebab-case\nconvertToKebabCase :: String -> String\nconvertToKebabCase [] = []\nconvertToKebabCase (x : xs)\n  | isUpper x = '-' : toLower x : convertToKebabCase xs\n  | otherwise = x : convertToKebabCase xs\n\n---- Helper functions for AnnoS/ExprS\n\nmapExprSM ::\n  (Traversable f, Monad m) => (AnnoS g f c -> m (AnnoS g' f c')) -> ExprS g f c -> m (ExprS g' f c')\nmapExprSM f (VarS v xs) = VarS v <$> traverse f xs\nmapExprSM f (AppS x xs) = AppS <$> f x <*> mapM f xs\nmapExprSM f (LamS vs x) = LamS vs <$> f x\nmapExprSM f (LstS xs) = LstS <$> mapM f xs\nmapExprSM f (TupS xs) = TupS <$> mapM f xs\nmapExprSM f (NamS rs) = NamS <$> mapM (secondM f) rs\nmapExprSM _ UniS = return UniS\nmapExprSM _ NullS = return NullS\nmapExprSM _ (BndS v) = return $ BndS v\nmapExprSM _ (RealS x) = return $ RealS x\nmapExprSM _ (IntS x) = return $ IntS x\nmapExprSM _ (LogS x) = return $ LogS x\nmapExprSM _ (StrS x) = return $ StrS x\nmapExprSM _ (ExeS x) = return $ ExeS x\nmapExprSM f (LetS v e1 e2) = LetS v <$> f e1 <*> f e2\nmapExprSM _ (LetBndS v) = return $ LetBndS v\nmapExprSM _ (CallS v) = return $ CallS v\nmapExprSM f (IfS c t e) = IfS <$> f c <*> f t <*> f e\nmapExprSM f (DoBlockS e) = DoBlockS <$> f e\nmapExprSM f (EvalS e) = EvalS <$> f e\nmapExprSM f (CoerceS c e) = CoerceS c <$> f e\nmapExprSM f (IntrinsicS intr es) = IntrinsicS intr <$> mapM f es\n\nmapAnnoSM ::\n  (Traversable f, Monad m) =>\n  (ExprS g f c -> g -> c -> m (g', c')) ->\n  AnnoS g f c ->\n  m (AnnoS g' f c')\nmapAnnoSM fun (AnnoS g c e) = do\n  e' <- mapExprSM (mapAnnoSM fun) e\n  (g', c') <- fun e g c\n  return (AnnoS g' c' e')\n\nmapAnnoS :: (Traversable f) => (ExprS g f c -> g -> c -> (g', c')) -> AnnoS g f c -> AnnoS g' f c'\nmapAnnoS fun = runIdentity . mapAnnoSM (\\x g c -> return (fun x g c))\n\nmapExprS :: (Traversable f) => (AnnoS g f c -> AnnoS g' f c') -> ExprS g f c -> ExprS g' f c'\nmapExprS fun = runIdentity . mapExprSM (return . fun)\n\nmapAnnoSGM :: (Traversable f, Monad m) => (g -> m g') -> AnnoS g f c -> m (AnnoS g' f c)\nmapAnnoSGM f = mapAnnoSM (\\_ gi ci -> (,) <$> f gi <*> pure ci)\n\nmapAnnoSCM :: (Traversable f, Monad m) => (c -> m c') -> AnnoS g f c -> m (AnnoS g f c')\nmapAnnoSCM f = mapAnnoSM (\\_ gi ci -> (,) gi <$> f ci)\n\nmapAnnoSG :: (Traversable f) => (g -> g') -> AnnoS g f c -> AnnoS g' f c\nmapAnnoSG f = mapAnnoS (\\_ gi ci -> (f gi, ci))\n\nmapAnnoSC :: (Traversable f) => (c -> c') -> AnnoS g f c -> AnnoS g f c'\nmapAnnoSC f = mapAnnoS (\\_ gi ci -> (gi, f ci))\n\nmapExprSGM :: (Traversable f, Monad m) => (g -> m g') -> ExprS g f c -> m (ExprS g' f c)\nmapExprSGM f = mapExprSM (\\(AnnoS gi ci e) -> AnnoS <$> f gi <*> pure ci <*> mapExprSGM f e)\n\nmapExprSCM :: (Traversable f, Monad m) => (c -> m c') -> ExprS g f c -> m (ExprS g f c')\nmapExprSCM f = mapExprSM (\\(AnnoS gi ci e) -> AnnoS gi <$> f ci <*> mapExprSCM f e)\n\nmapExprSG :: (Traversable f) => (g -> g') -> ExprS g f c -> ExprS g' f c\nmapExprSG f = mapExprS (\\(AnnoS gi ci e) -> AnnoS (f gi) ci (mapExprSG f e))\n\nmapExprSC :: (Traversable f) => (c -> c') -> ExprS g f c -> ExprS g f c'\nmapExprSC f = mapExprS (\\(AnnoS gi ci e) -> AnnoS gi (f ci) (mapExprSC f e))\n\n----- Pretty instances -------------------------------------------------------\n\ninstance Pretty Lit where\n  pretty (MNum x) = viaShow x\n  pretty (MInt x) = pretty x\n  pretty (MLog x) = pretty x\n  pretty (MStr x) = pretty x\n  pretty MUni = \"Unit\"\n  pretty MNull = \"Null\"\n\ninstance Pretty E where\n  pretty (BndP _ v) = pretty v\n  pretty (VarP _ v _) = pretty v\n  pretty (AppP _ e es) = pretty e <+> hsep (map f es)\n    where\n      f x@AppP {} = parens (pretty x)\n      f x@LamP {} = parens (pretty x)\n      f x@SrcP {} = parens (pretty x)\n      f x = pretty x\n  pretty (LamP _ vs e) = \"\\\\\" <+> hsep (map pretty vs) <+> \"->\" <+> pretty e\n  pretty (LstP _ es) = list (map pretty es)\n  pretty (TupP _ es) = tupled (map pretty es)\n  pretty (NamP _ rs) = encloseSep \"{\" \"}\" \",\" [pretty k <+> \"=\" <+> pretty e | (k, e) <- rs]\n  pretty (LitP _ l) = pretty l\n  pretty (SrcP _ src) = pretty src\n  pretty (PatP _ s) = pretty (PatternStruct s)\n  pretty (IfP _ c t e) = \"if\" <+> pretty c <+> \"then\" <+> pretty t <+> \"else\" <+> pretty e\n  pretty (DoBlockP _ e) = \"{\" <> pretty e <> \"}\"\n  pretty (EvalP _ e) = \"!\" <> pretty e\n  pretty (CoerceP _ _ e) = \"coerce(\" <> pretty e <> \")\"\n  pretty (IntrinsicP _ intr args) = \"@\" <> pretty (intrinsicName intr) <+> hsep (map pretty args)\n\ninstance Pretty Source where\n  pretty s =\n    \"source\" <+> pretty (srcLang s)\n      <> maybe \"\" (\\path -> \" from\" <+> dquotes (pretty path)) (srcPath s)\n        <+> dquotes (pretty (srcName s))\n        <+> \"as\"\n        <+> pretty (srcAlias s)\n      <> maybe \"\" (\\t -> \":\" <> pretty t) (srcLabel s)\n\ninstance Pretty ExportGroup where\n  pretty (ExportGroup name desc members) =\n    \"--*\" <+> pretty name\n      <> maybe \"\" (\\d -> \":\" <+> pretty d) (listToMaybe desc)\n        <+> tupled (map pretty (Set.toList members))\n\ninstance Pretty Symbol where\n  pretty (TypeSymbol x) = pretty x\n  pretty (TermSymbol x) = pretty x\n  pretty (ClassSymbol x) = pretty x\n\ninstance Pretty AliasedSymbol where\n  pretty (AliasedType x alias)\n    | x == alias = pretty x\n    | otherwise = pretty x <+> \"as\" <+> pretty alias\n  pretty (AliasedTerm x alias)\n    | x == alias = pretty x\n    | otherwise = pretty x <+> \"as\" <+> pretty alias\n  pretty (AliasedClass x) = pretty x\n\ninstance Pretty ExprI where\n  pretty (ExprI i e) = parens (pretty e) <> \":\" <> pretty i\n\ninstance Pretty Pattern where\n  pretty (PatternText s ss) = dquotes $ hcat (pretty s : [\"#{}\" <> pretty s' | s' <- ss])\n  pretty (PatternStruct s) = pretty s\n\ninstance Pretty Selector where\n  pretty SelectorEnd = \"\"\n  pretty (SelectorKey (k, s) []) = \".\" <> pretty k <> pretty s\n  pretty (SelectorIdx (i, s) []) = \".\" <> pretty i <> pretty s\n  pretty (SelectorKey r rs) = \".\" <> tupled [\".\" <> pretty k <> pretty s | (k, s) <- (r : rs)]\n  pretty (SelectorIdx r rs) = \".\" <> tupled [\".\" <> pretty i <> pretty s | (i, s) <- (r : rs)]\n\ninstance Pretty Expr where\n  pretty HolE = \"_\"\n  pretty (PatE pat) = \"pattern:\" <+> pretty pat\n  pretty UniE = \"()\"\n  pretty (ModE v es) = align . vsep $ (\"module\" <+> pretty v) : map pretty es\n  pretty (ClsE (Typeclass constraints cls vs sigs)) =\n    \"class\" <+> consStr <> pretty cls <+> hsep (map pretty vs) <> (align . vsep . map pretty) sigs\n    where\n      consStr = case constraints of\n        [] -> \"\"\n        [c] -> pretty c <+> \"=> \"\n        _ -> tupled (map pretty constraints) <+> \"=> \"\n  pretty (IstE cls ts es) = \"instance\" <+> pretty cls <+> hsep (map (parens . pretty) ts) <> (align . vsep . map pretty) es\n  pretty (TypE (ExprTypeE lang v vs t _)) =\n    \"type\" <+> pretty lang\n      <> \"@\"\n      <> pretty v\n        <+> sep (map (either pretty (parens . pretty)) vs)\n        <+> \"=\"\n        <+> pretty t\n  pretty (ImpE (Import m Nothing _ _)) = \"import\" <+> pretty m\n  pretty (ImpE (Import m (Just xs) _ _)) = \"import\" <+> pretty m <+> tupled (map pretty xs)\n  pretty (ExpE ExportAll) = \"export *\"\n  pretty (ExpE (ExportMany symbols groups)) =\n    \"export\"\n      <+> tupled\n        ( map pretty (Set.toList symbols)\n            ++ [pretty g | g <- groups]\n        )\n  pretty (VarE _ s) = pretty s\n  pretty (LamE v e) = \"\\\\\" <+> pretty v <+> \"->\" <+> pretty e\n  pretty (AnnE e t) = parens (pretty e <+> \"::\" <+> pretty t)\n  pretty (LstE es) = encloseSep \"[\" \"]\" \",\" (map pretty es)\n  pretty (TupE es) = encloseSep \"[\" \"]\" \",\" (map pretty es)\n  pretty (AppE f es) = vsep (map pretty (f : es))\n  pretty (NamE rs) = block 4 \"<RECORD>\" (vsep [pretty k <+> \"::\" <+> pretty x | (k, x) <- rs])\n  pretty (RealE x) = pretty (show x)\n  pretty (IntE x) = pretty (show x)\n  pretty (StrE x) = dquotes (pretty x)\n  pretty (LogE x) = pretty x\n  pretty (LetE bindings body) = vsep [pretty v <+> \"=\" <+> pretty e | (v, e) <- bindings] <+> \"in\" <+> pretty body\n  pretty (AssE v e es) = pretty v <+> \"=\" <+> pretty e <+> \"where\" <+> (align . vsep . map pretty) es\n  pretty (SrcE (Source srcname lang file' alias _ rsizes _ _ _)) =\n    \"source\"\n      <+> viaShow lang\n      <> maybe \"\" (\\f -> \"from\" <+> pretty f) file'\n        <+> \"where\\n\"\n      <> indent\n        2\n        ( vsep\n            [ \"--' srcname: \" <> pretty srcname\n            , \"--' rsize: \" <> encloseSep \"\" \"\" \" \" (map pretty rsizes)\n            , pretty alias\n            ]\n        )\n  pretty (SigE (Signature v _ e)) =\n    pretty v <+> \"::\" <+> pretty e\n  pretty (FixE (Fixity assoc prec ops)) =\n    assocStr <+> pretty prec <+> hsep (map pretty ops)\n    where\n      assocStr :: Doc ann\n      assocStr = case assoc of\n        InfixL -> \"infixl\"\n        InfixR -> \"infixr\"\n        InfixN -> \"infix\"\n  pretty (ParenE e) = parens (pretty e)\n  pretty (BopE e1 _ v e2) = pretty e1 <+> pretty v <+> pretty e2\n  pretty (IfE c t e) = \"if\" <+> pretty c <+> \"then\" <+> pretty t <+> \"else\" <+> pretty e\n  pretty NullE = \"Null\"\n  pretty (DoBlockE e) = \"{\" <> pretty e <> \"}\"\n  pretty (EvalE e) = \"!\" <> pretty e\n  pretty (IntrinsicE intr args) = \"@\" <> pretty (intrinsicName intr) <+> hsep (map pretty args)\n\ninstance (Foldable f) => Pretty (AnnoS a f b) where\n  pretty (AnnoS _ _ e) = pretty e\n\ninstance (Foldable f) => Pretty (ExprS a f b) where\n  pretty (AppS e es) = \"(AppS\" <+> list (map pretty (e : es)) <> \")\"\n  pretty (VarS v res) = \"(VarS\" <+> pretty v <+> \"=\" <+> list (map pretty (toList res)) <> \")\"\n  pretty (LamS vs e) = \"(LamS\" <+> list (map pretty vs) <+> \"->\" <+> pretty e <> \")\"\n  pretty (LstS es) = \"(LstS\" <+> list (map pretty es) <> \")\"\n  pretty (TupS es) = \"(TupS\" <+> list (map pretty es) <> \")\"\n  pretty (NamS rs) = \"(NamS\" <+> list [pretty k <> \"=\" <> pretty v | (k, v) <- rs] <> \")\"\n  pretty UniS = \"UniS\"\n  pretty NullS = \"NullS\"\n  pretty (BndS x) = \"(BndS\" <+> pretty x <> \")\"\n  pretty (RealS x) = viaShow x\n  pretty (IntS x) = viaShow x\n  pretty (LogS x) = viaShow x\n  pretty (StrS x) = viaShow x\n  pretty (ExeS x) = pretty x\n  pretty (LetS v e1 e2) = \"(LetS\" <+> pretty v <+> \"=\" <+> pretty e1 <+> \"in\" <+> pretty e2 <> \")\"\n  pretty (LetBndS x) = \"(LetBndS\" <+> pretty x <> \")\"\n  pretty (CallS v) = \"(CallS\" <+> pretty v <> \")\"\n  pretty (IfS c t e) = \"(IfS\" <+> pretty c <+> pretty t <+> pretty e <> \")\"\n  pretty (DoBlockS e) = \"(DoBlockS\" <+> pretty e <> \")\"\n  pretty (EvalS e) = \"(EvalS\" <+> pretty e <> \")\"\n  pretty (CoerceS c e) = \"(CoerceS\" <+> viaShow c <+> pretty e <> \")\"\n  pretty (IntrinsicS intr es) = \"(IntrinsicS\" <+> viaShow intr <+> list (map pretty es) <> \")\"\n\ninstance Pretty ExecutableExpr where\n  pretty (SrcCall src) = pretty src\n  pretty (PatCall pat) = pretty pat\n\ninstance Pretty Signature where\n  pretty (Signature v _ e) = pretty v <+> \"::\" <+> pretty (etype e)\n"
  },
  {
    "path": "library/Morloc/Namespace/Prim.hs",
    "content": "{-# LANGUAGE DeriveGeneric #-}\n{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Namespace.Prim\nDescription : Foundation types with zero dependency on other Namespace modules\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nPrimitive types (newtypes for names, keys, paths) and small data structures\n(DAG, GMap, Or, None, etc.) used across the compiler. Re-exports\n\"Morloc.Internal\" so that downstream modules get the proto-prelude for free.\n-}\nmodule Morloc.Namespace.Prim\n  ( -- ** re-exports\n    module Morloc.Internal\n\n    -- ** Synonyms\n  , MDoc\n  , DAG\n\n    -- ** Other functors\n  , None (..)\n  , One (..)\n  , Or (..)\n  , Many (..)\n\n    -- ** Other classes\n  , Three (..)\n  , Defaultable (..)\n\n    -- ** Indexed\n  , IndexedGeneral (..)\n  , Indexed\n\n    -- ** Newtypes\n  , MVar (..)\n  , EVar (..)\n  , TVar (..)\n  , ClassName (..)\n  , CVar (..)\n  , Key (..)\n  , Label (..)\n  , SrcName (..)\n  , Path\n  , Code (..)\n  , TimeInSeconds (..)\n  , DirTree (File, Dir, Failed)\n  , AnchoredDirTree ((:/))\n  , failures\n  , writeDirectoryWith\n\n    -- ** Data\n  , GMap (..)\n  , GMapRet (..)\n\n    -- ** Source locations\n  , SrcLoc (..)\n\n    -- ** Typeclasses\n  , HasOneLanguage (..)\n\n    -- ** Language\n  , Lang (..)\n\n    -- ** Kinds\n  , Kind (..)\n  ) where\n\nimport Data.Aeson (FromJSON (..))\nimport qualified Data.Aeson as Aeson\nimport Data.Binary (Binary)\nimport Data.Map.Strict (Map)\nimport qualified Data.Map.Strict as Map\nimport Data.Text (Text)\nimport qualified Data.Text as DT\nimport GHC.Generics (Generic)\nimport Morloc.Data.Doc\nimport Morloc.Internal\nimport Morloc.Language (Lang (..))\nimport System.Directory.Tree (AnchoredDirTree ((:/)), DirTree (Dir, Failed, File), failures, writeDirectoryWith)\nimport Text.Read (readMaybe)\n\n---- Kinds\n\n-- | Kind of a type variable: either a proper type or a natural number (for dimensions)\ndata Kind = KindType | KindNat\n  deriving (Show, Ord, Eq)\n\n---- Typeclasses\n\n-- | Types that are associated with exactly one language\nclass HasOneLanguage a where\n  langOf :: a -> Maybe Lang\n  langOf' :: a -> Lang\n\n  langOf x = Just (langOf' x)\n  langOf' x = fromJust (langOf x)\n\n-- | Types with a sensible default (used for initial state, config, etc.)\nclass Defaultable a where\n  defaultValue :: a\n\n---- Type definitions\n\n-- | Unannotated pretty-printer document\ntype MDoc = Doc ()\n\n{- | A directed acyclic graph. Each key maps to a node value and a list of\n@(child-key, edge-data)@ pairs. The structure is not enforced to be acyclic;\n'Morloc.Data.DAG.synthesize' detects cycles at runtime.\n-}\ntype DAG key edge node = Map key (node, [(key, edge)])\n\n{- | A two-level map: outer keys -> inner keys -> values. Multiple outer keys\nmay share the same inner key. See \"Morloc.Data.GMap\" for operations.\n-}\ndata GMap a b c = GMap (Map a b) (Map b c)\n  deriving (Show, Ord, Eq)\n\n-- | Result of a 'GMap' lookup\ndata GMapRet c\n  = -- | Outer key not found\n    GMapNoFst\n  | -- | Inner key not found (possible bug)\n    GMapNoSnd\n  | -- | Successful lookup\n    GMapJust c\n  deriving (Show, Ord, Eq)\n\n-- | Source location span for error reporting\ndata SrcLoc = SrcLoc\n  { srcLocPath :: Maybe Path\n  , srcLocLine :: Int\n  , srcLocCol :: Int\n  , srcLocEndLine :: Int\n  , srcLocEndCol :: Int\n  }\n  deriving (Show, Ord, Eq)\n\n-- | Module name (e.g., @\\\"math\\\"@, @\\\"bio.algo\\\"@)\nnewtype MVar = MV {unMVar :: Text} deriving (Show, Eq, Ord)\n\n-- | Term\\/expression variable name\nnewtype EVar = EV {unEVar :: Text} deriving (Show, Eq, Ord)\n\n-- | Type variable name\nnewtype TVar = TV {unTVar :: Text} deriving (Show, Eq, Ord)\n\n-- | Typeclass name\nnewtype ClassName = ClassName {unClassName :: Text} deriving (Show, Eq, Ord)\n\n-- | Concrete type name (language-specific)\nnewtype CVar = CV {unCVar :: Text} deriving (Show, Eq, Ord)\n\n-- | Record field key\nnewtype Key = Key {unKey :: Text} deriving (Show, Eq, Ord, Generic)\n\ninstance Binary Key\n\n-- | Source label for grouping manifold configurations\nnewtype Label = Label {unLabel :: Text} deriving (Show, Eq, Ord)\n\n{- | Name of a foreign function (may contain characters illegal in morloc,\ne.g., the R function @file.exists@)\n-}\nnewtype SrcName = SrcName {unSrcName :: Text} deriving (Show, Eq, Ord)\n\n-- | A blob of generated code\nnewtype Code = Code {unCode :: Text} deriving (Show, Eq, Ord)\n\n-- | Duration in seconds (parsed from SLURM time format)\nnewtype TimeInSeconds = TimeInSeconds {unTimeInSeconds :: Int} deriving (Show, Eq, Ord)\n\n-- | Filesystem path (String because filepath libraries expect String)\ntype Path = String\n\n-- | A three-way sum type\ndata Three a b c = A a | B b | C c\n  deriving (Ord, Eq, Show)\n\n-- | A type carrying no information (used as a placeholder edge type in DAGs)\ndata None = None\n  deriving (Show)\n\n-- | A single wrapped value\nnewtype One a = One {unOne :: a}\n  deriving (Show)\n\n-- | A wrapped list\nnewtype Many a = Many {unMany :: [a]}\n  deriving (Show)\n\n-- | A value that may be left, right, or both\ndata Or a b = L a | R b | LR a b\n  deriving (Ord, Eq, Show)\n\n-- | 'IndexedGeneral' specialized to 'Int' keys\ntype Indexed = IndexedGeneral Int\n\n-- | A value paired with an index\ndata IndexedGeneral k a = Idx k a\n  deriving (Show, Ord, Eq)\n\n---- Fundamental class instances\n\ninstance Functor (IndexedGeneral k) where\n  fmap f (Idx i x) = Idx i (f x)\n\ninstance Functor One where\n  fmap f (One x) = One (f x)\n\ninstance Functor Many where\n  fmap f (Many x) = Many (map f x)\n\ninstance Traversable One where\n  traverse f (One x) = One <$> f x\n\ninstance Traversable Many where\n  traverse f (Many xs) = Many <$> traverse f xs\n\ninstance Foldable One where\n  foldr f b (One a) = f a b\n\ninstance Foldable Many where\n  foldr f b (Many xs) = foldr f b xs\n\ninstance Bifunctor Or where\n  bimapM f _ (L a) = L <$> f a\n  bimapM _ g (R a) = R <$> g a\n  bimapM f g (LR a b) = LR <$> f a <*> g b\n\ninstance Bifoldable Or where\n  bilistM f _ (L a) = f a |>> return\n  bilistM _ g (R b) = g b |>> return\n  bilistM f g (LR a b) = do\n    c1 <- f a\n    c2 <- g b\n    return [c1, c2]\n\ninstance Annotated IndexedGeneral where\n  val (Idx _ x) = x\n  ann (Idx i _) = i\n  annotate i x = Idx i x\n\n-- Custom FromJSON instance for TimeInSeconds\ninstance FromJSON TimeInSeconds where\n  parseJSON (Aeson.String t) = case parseSlurmTime (DT.unpack t) of\n    Just seconds -> return $ TimeInSeconds seconds\n    Nothing -> fail $ \"Invalid SLURM time format: \" ++ DT.unpack t\n  parseJSON _ = fail \"Expected a string for SLURM time\"\n\n-- Convert SLURM time string (e.g., \"01-00:00:00\") to seconds\nparseSlurmTime :: String -> Maybe Int\nparseSlurmTime str = case splitOn \"-\" str of\n  [days, hms] -> do\n    d <- readMaybe days :: Maybe Int\n    s <- parseHMS hms\n    return $ d * 86400 + s\n  [hms] -> parseHMS hms -- No days specified\n  _ -> Nothing\n\n-- Helper to parse \"HH:MM:SS\" into seconds\nparseHMS :: String -> Maybe Int\nparseHMS hms = case splitOn \":\" hms of\n  [hours, minutes, seconds] -> do\n    h <- readMaybe hours :: Maybe Int\n    m <- readMaybe minutes :: Maybe Int\n    s <- readMaybe seconds :: Maybe Int\n    return $ h * 3600 + m * 60 + s\n  _ -> Nothing\n\n----- Pretty instances -------------------------------------------------------\n\ninstance Pretty Kind where\n  pretty KindType = \"Type\"\n  pretty KindNat = \"Nat\"\n\ninstance Pretty SrcLoc where\n  pretty (SrcLoc path ln col endLn endCol)\n    | ln == endLn && col == endCol =\n        maybe \"<unknown>\" pretty path <> \":\" <> pretty ln <> \":\" <> pretty col\n    | ln == endLn =\n        maybe \"<unknown>\" pretty path <> \":\" <> pretty ln <> \":\" <> pretty col <> \"-\" <> pretty endCol\n    | otherwise =\n        maybe \"<unknown>\" pretty path\n          <> \":\"\n          <> pretty ln\n          <> \":\"\n          <> pretty col\n          <> \"-\"\n          <> pretty endLn\n          <> \":\"\n          <> pretty endCol\n\ninstance (Pretty a, Pretty b) => Pretty (Or a b) where\n  pretty (L x) = parens (\"L\" <+> pretty x)\n  pretty (R x) = parens (\"R\" <+> pretty x)\n  pretty (LR x y) = parens (\"LR\" <+> pretty x <> \",\" <+> pretty y)\n\ninstance Pretty EVar where\n  pretty (EV v) = pretty v\n\ninstance Pretty MVar where\n  pretty = pretty . unMVar\n\ninstance Pretty TimeInSeconds where\n  pretty = pretty . unTimeInSeconds\n\ninstance Pretty TVar where\n  pretty (TV v) = pretty v\n\ninstance Pretty ClassName where\n  pretty = pretty . unClassName\n\ninstance Pretty Key where\n  pretty (Key v) = pretty v\n\ninstance Pretty CVar where\n  pretty v = pretty (unCVar v)\n\ninstance Pretty Label where\n  pretty (Label v) = pretty v\n\ninstance Pretty SrcName where\n  pretty = pretty . unSrcName\n\ninstance Pretty Code where\n  pretty = pretty . unCode\n\ninstance Pretty None where\n  pretty None = \"()\"\n\ninstance (Pretty a) => Pretty (One a) where\n  pretty (One x) = pretty x\n\ninstance (Pretty a) => Pretty (Many a) where\n  pretty (Many xs) = list $ map pretty xs\n\ninstance (Pretty k, Pretty a) => Pretty (IndexedGeneral k a) where\n  pretty (Idx i x) = parens (pretty i <> \":\" <+> pretty x)\n\ninstance (Pretty k1, Pretty k2, Pretty v) => Pretty (GMap k1 k2 v) where\n  pretty (GMap m1 m2) = \"GMap\" <+> (align . vsep $ [pretty (Map.toList m1), pretty (Map.toList m2)])\n"
  },
  {
    "path": "library/Morloc/Namespace/State.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Namespace.State\nDescription : Compiler state, monad stack, config, errors\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nThe compiler monad ('MorlocMonad') and its components:\n\n* 'Config' -- read-only configuration loaded from @~\\/.local\\/share\\/morloc\\/config@\n* 'MorlocError' -- all compiler error types\n* 'MorlocState' -- mutable state threading type info, sources, and metadata\n  through the pipeline\n* 'Gamma' \\/ 'GammaIndex' -- typechecking context (ordered list of assumptions)\n* 'Script' -- a generated pool file with its build commands\n-}\nmodule Morloc.Namespace.State\n  ( -- * Morloc monad\n    MorlocMonadGen\n  , MorlocMonad\n  , MorlocReturn\n  , MorlocState (..)\n  , SignatureSet (..)\n  , Instance (..)\n  , TermTypes (..)\n\n    -- * Error handling\n  , MorlocError (..)\n\n    -- * Configuration\n  , Config (..)\n\n    -- * Package metadata\n  , PackageMeta (..)\n\n    -- * Typechecking\n  , Gamma (..)\n  , GammaIndex (..)\n  , ConstVal (..)\n\n    -- * Data files\n  , NexusSource (..)\n\n    -- * Sockets\n  , Socket (..)\n\n    -- * System\n  , SysCommand (..)\n  , Script (..)\n\n    -- * Language registry\n  , LangRegistry (..)\n  , LangRegistryEntry (..)\n  ) where\n\nimport Control.Monad.Except (ExceptT)\nimport Control.Monad.Reader (ReaderT)\nimport Control.Monad.State (StateT)\nimport Control.Monad.Writer (WriterT)\nimport Data.Aeson (FromJSON (..), (.!=), (.:?))\nimport qualified Data.Aeson as Aeson\nimport qualified Data.IntMap.Strict as IntMap\nimport qualified Data.Map as Map\nimport Data.Map.Strict (Map)\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport Morloc.Data.Doc\nimport Morloc.LangRegistry (LangRegistry (..), LangRegistryEntry (..))\nimport qualified Morloc.LangRegistry as LR\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.Type\n\n---- Monad types\n\n{- | The general monad transformer stack: Reader for config, Except for errors,\nWriter for log messages, State for mutable compiler state, over IO.\n-}\ntype MorlocMonadGen c e l s a =\n  ReaderT c (ExceptT e (WriterT l (StateT s IO))) a\n\n-- | The full result of running a MorlocMonad computation\ntype MorlocReturn a = ((Either MorlocError a, [Text]), MorlocState)\n\n-- | The concrete compiler monad used throughout the pipeline\ntype MorlocMonad a = MorlocMonadGen Config MorlocError [Text] MorlocState a\n\n---- State\n\n{- | Mutable compiler state threaded through the entire pipeline.\nAccumulates type signatures, source bindings, typedefs, and metadata\nas modules are parsed, linked, and typechecked.\n-}\ndata MorlocState = MorlocState\n  { statePackageMeta :: [PackageMeta]\n  , stateVerbosity :: Int\n  , stateCounter :: Int\n  , stateDepth :: Int\n  , stateSignatures :: GMap Int Int SignatureSet\n  , stateTypeclasses :: Map.Map EVar Instance\n  , stateConcreteTypedefs :: GMap Int MVar (Map Lang Scope)\n  , stateGeneralTypedefs :: GMap Int MVar Scope\n  , stateUniversalGeneralTypedefs :: Scope\n  , stateUniversalConcreteTypedefs :: Map Lang Scope\n  , stateSources :: GMap Int MVar [Source]\n  , stateAnnotations :: Map Int TypeU\n  , stateOutfile :: Maybe Path\n  , stateExports :: [Int]\n  , stateName :: Map Int EVar\n  , stateTermDocs :: Map.Map EVar [Text]\n  -- ^ Declaration-level docstrings keyed by term name. Takes precedence over\n  -- signature docstrings for the command-level description.\n  , stateManifoldConfig :: Map Int ManifoldConfig\n  , stateSourceMap :: Map Int SrcLoc\n  , stateSourceText :: Map Path Text\n  , stateBuildConfig :: BuildConfig\n  , stateModuleName :: Maybe MVar\n  , stateInstall :: Bool\n  , stateInstallForce :: Bool\n  , stateInstallDir :: Maybe Path\n  , stateClassDefs :: Map ClassName [Constraint]\n  , stateLangRegistry :: LangRegistry\n  , stateExportGroups :: Map Text ([Text], [Int])\n  -- ^ Map from group name to (description lines, member export indices)\n  , stateManifoldLang :: Map Int Lang\n  -- ^ Map from export manifold ID to its pool language\n  , stateManifoldEffects :: Map Int (Set.Set EffectLabel)\n  -- ^ Map from export manifold ID to its original return effect labels\n  , stateProjectRoot :: Maybe Path\n  -- ^ Project root directory (directory of the entry-point file)\n  , stateEvalMode :: Bool\n  -- ^ True when running in eval mode (restricts source/class/instance)\n  , stateModuleDoc :: [Text]\n  -- ^ Module-level description lines (from docstrings before module declaration)\n  , stateModuleEpilogues :: [[Text]]\n  -- ^ Epilogue blocks for the top-level help output\n  }\n  deriving (Show)\n\ndata SignatureSet\n  = Monomorphic TermTypes\n  | Polymorphic\n      ClassName\n      EVar\n      EType\n      [TermTypes]\n  deriving (Show)\n\ndata Instance = Instance\n  { className :: ClassName\n  , classVars :: [TVar]\n  , classType :: EType\n  , instanceTerms :: [TermTypes]\n  }\n  deriving (Show, Ord, Eq)\n\ndata TermTypes = TermTypes\n  { termGeneral :: Maybe EType\n  , termConcrete :: [(MVar, Indexed Source)]\n  , termDecl :: [ExprI]\n  }\n  deriving (Show, Ord, Eq)\n\n---- Error types\n\n-- | All compiler errors\ndata MorlocError\n  = -- | Error tied to a specific AST node index\n    SourcedError Int MDoc\n  | -- | Internal compiler error (bug)\n    SystemError MDoc\n  | -- | Type unification failure\n    UnificationError Int Int Int MDoc\n  deriving (Show)\n\n---- Configuration\n\n-- | Read-only configuration loaded from the morloc config file\ndata Config\n  = Config\n  { configHome :: !Path\n  , configLibrary :: !Path\n  , configPlane :: !Path\n  , configPlaneCore :: !Path\n  , configTmpDir :: !Path\n  , configBuildConfig :: !Path\n  , configLangOverrides :: !(Map Text [Text])\n  , configRegistry :: !(Maybe Text)\n  }\n  deriving (Show, Ord, Eq)\n\n---- Package metadata\n\ndata PackageMeta\n  = PackageMeta\n  { packageName :: !Text\n  , packageVersion :: !Text\n  , packageHomepage :: !Text\n  , packageSynopsis :: !Text\n  , packageDescription :: !Text\n  , packageCategory :: !Text\n  , packageLicense :: !Text\n  , packageAuthor :: !Text\n  , packageMaintainer :: !Text\n  , packageGithub :: !Text\n  , packageBugReports :: !Text\n  , packageCppVersion :: !Int\n  , packageDependencies :: [Text]\n  , packageInclude :: Maybe [Text]\n  }\n  deriving (Show, Ord, Eq)\n\n---- Typechecking context\n\n{- | Entries in the typechecking context (an ordered list of assumptions).\nThe context is manipulated as a stack during bidirectional typechecking.\n-}\ndata GammaIndex\n  = VarG TVar\n  | AnnG EVar TypeU\n  | ExistG\n      TVar\n      ([TypeU], OpenOrClosed)\n      ([(Key, TypeU)], OpenOrClosed)\n  | SolvedG TVar TypeU\n  | MarkG TVar\n  | SrcG Source\n  deriving (Ord, Eq, Show)\n\n{- | Typechecking context using IntMap for O(log N) operations.\nEntries are keyed by monotonically increasing slot numbers (higher = newer).\nSide-indexes provide O(log N) lookup of ExistG entries by TVar.\n-}\ndata Gamma = Gamma\n  { gammaCounter :: !Int\n  -- | Next available slot number (always increasing)\n  , gammaSlot :: !Int\n  -- | Ordered context: higher slot = newer entry\n  , gammaContext :: IntMap.IntMap GammaIndex\n  -- | Index: ExistG TVar -> slot number (for O(log N) access1)\n  , gammaExist :: Map TVar Int\n  -- | Cache of solved existential types\n  , gammaSolved :: Map TVar TypeU\n  -- | Nat constraints that could not be solved (deferred)\n  , gammaDeferred :: [(TypeU, TypeU)]\n  -- | Solutions for NatVarU variables from nat constraint solving\n  , gammaNatSubs :: Map TVar TypeU\n  -- | Known constant values for let-bound variables (for nat label resolution).\n  -- Tracks integers, tuples, and records so accessors like .0 can be evaluated.\n  , gammaIntVals :: Map EVar ConstVal\n  }\n\n-- | Compile-time constant values tracked during typechecking for nat label\n-- resolution. Only pure literal expressions are tracked.\ndata ConstVal\n  = ConstInt Integer\n  | ConstTup [ConstVal]\n  deriving (Show, Eq, Ord)\n\n---- Data files and system\n\ndata NexusSource = NexusSource\n  { nexusSourceUtility :: MDoc\n  , nexusSourceMain :: MDoc\n  }\n\ndata Socket = Socket\n  { socketLang :: Lang\n  , socketServerInit :: [MDoc]\n  , socketPath :: MDoc\n  }\n  deriving (Show)\n\ndata SysCommand\n  = SysExe Path\n  | SysMove Path Path\n  | SysRun Code\n  | SysInstall Path\n  | SysUnlink Path\n  deriving (Show, Ord, Eq)\n\ndata Script\n  = Script\n  { scriptBase :: !String\n  , scriptLang :: !Lang\n  , scriptCode :: !(AnchoredDirTree Code)\n  , scriptMake :: ![SysCommand]\n  }\n  deriving (Show, Ord, Eq)\n\n---- Instances\n\ninstance Defaultable MorlocState where\n  defaultValue =\n    MorlocState\n      { statePackageMeta = []\n      , stateVerbosity = 0\n      , stateCounter = -1\n      , stateDepth = 0\n      , stateSignatures = GMap Map.empty Map.empty\n      , stateTypeclasses = Map.empty\n      , stateConcreteTypedefs = GMap Map.empty Map.empty\n      , stateGeneralTypedefs = GMap Map.empty Map.empty\n      , stateUniversalConcreteTypedefs = Map.empty\n      , stateUniversalGeneralTypedefs = Map.empty\n      , stateSources = GMap Map.empty Map.empty\n      , stateAnnotations = Map.empty\n      , stateOutfile = Nothing\n      , stateExports = []\n      , stateName = Map.empty\n      , stateTermDocs = Map.empty\n      , stateManifoldConfig = Map.empty\n      , stateSourceMap = Map.empty\n      , stateSourceText = Map.empty\n      , stateBuildConfig = defaultValue\n      , stateModuleName = Nothing\n      , stateInstall = False\n      , stateInstallForce = False\n      , stateInstallDir = Nothing\n      , stateClassDefs = Map.empty\n      , stateLangRegistry = LR.emptyRegistry\n      , stateExportGroups = Map.empty\n      , stateManifoldLang = Map.empty\n      , stateManifoldEffects = Map.empty\n      , stateProjectRoot = Nothing\n      , stateEvalMode = False\n      , stateModuleDoc = []\n      , stateModuleEpilogues = []\n      }\n\ninstance Defaultable PackageMeta where\n  defaultValue =\n    PackageMeta\n      { packageName = \"\"\n      , packageVersion = \"\"\n      , packageHomepage = \"\"\n      , packageSynopsis = \"\"\n      , packageDescription = \"\"\n      , packageCategory = \"\"\n      , packageLicense = \"\"\n      , packageAuthor = \"\"\n      , packageMaintainer = \"\"\n      , packageGithub = \"\"\n      , packageBugReports = \"\"\n      , packageCppVersion = 17\n      , packageDependencies = []\n      , packageInclude = Nothing\n      }\n\ninstance FromJSON Config where\n  parseJSON =\n    Aeson.withObject \"object\" $ \\o -> do\n      home' <- o .:? \"home\" .!= \"~/.local/share/morloc\"\n      source' <- o .:? \"source\" .!= \"~/.local/share/morloc/src/morloc\"\n      plane' <- o .:? \"plane\" .!= \"default\"\n      planeCore' <- o .:? \"plane-core\" .!= \"morloclib\"\n      tmpdir' <- o .:? \"tmpdir\" .!= \"~/.local/share/morloc/tmp\"\n      buildConfig' <- o .:? \"build-config\" .!= \"~/.local/share/morloc/build-config.yaml\"\n      -- Parse legacy lang_python3/lang_R fields into langOverrides\n      pyCmd <- o .:? \"lang_python3\" .!= (\"\" :: Text)\n      rCmd <- o .:? \"lang_R\" .!= (\"\" :: Text)\n      overrides <- o .:? \"lang_overrides\" .!= Map.empty\n      registry' <- o .:? \"registry\"\n      let legacyOverrides =\n            Map.fromList $\n              filter\n                (not . null . snd)\n                [ (\"py\", if pyCmd == \"\" then [] else [pyCmd])\n                , (\"r\", if rCmd == \"\" then [] else [rCmd])\n                ]\n          allOverrides = Map.union overrides legacyOverrides\n      return $ Config home' source' plane' planeCore' tmpdir' buildConfig' allOverrides registry'\n\ninstance FromJSON PackageMeta where\n  parseJSON = Aeson.withObject \"object\" $ \\o ->\n    PackageMeta\n      <$> o .:? \"name\" .!= \"\"\n      <*> o .:? \"version\" .!= \"\"\n      <*> o .:? \"homepage\" .!= \"\"\n      <*> o .:? \"synopsis\" .!= \"\"\n      <*> o .:? \"description\" .!= \"\"\n      <*> o .:? \"category\" .!= \"\"\n      <*> o .:? \"license\" .!= \"\"\n      <*> o .:? \"author\" .!= \"\"\n      <*> o .:? \"maintainer\" .!= \"\"\n      <*> o .:? \"github\" .!= \"\"\n      <*> o .:? \"bug-reports\" .!= \"\"\n      <*> o .:? \"cpp-version\" .!= 0\n      <*> o .:? \"dependencies\" .!= []\n      <*> o .:? \"include\"\n\n----- Pretty instances -------------------------------------------------------\n\ninstance Pretty Instance where\n  pretty (Instance cls vs et ts) =\n    \"Instance\"\n      <+> pretty cls\n      <+> pretty vs\n      <+> parens (pretty (etype et))\n      <+> list (map pretty ts)\n\ninstance Pretty TermTypes where\n  pretty (TermTypes (Just t) cs es) = \"TermTypes\" <+> (align . vsep $ (parens (pretty t) : map pretty cs <> map pretty es))\n  pretty (TermTypes Nothing cs es) = \"TermTypes\" <+> \"?\" <> (align . vsep $ (map pretty cs <> map pretty es))\n\ninstance Pretty SignatureSet where\n  pretty (Monomorphic t) = pretty t\n  pretty (Polymorphic cls v t ts) =\n    \"class\"\n      <+> pretty cls\n      <+> (align . vsep $ (pretty v <+> \"::\" <+> parens (pretty t)) : map pretty ts)\n\ninstance Pretty GammaIndex where\n  pretty (VarG tv) = \"VarG:\" <+> pretty tv\n  pretty (ExistG tv ([], _) ([], _)) = angles (pretty tv)\n  pretty (ExistG tv (ts, _) (rs, _)) =\n    \"ExistG:\"\n      <+> pretty tv\n      <+> list (map (parens . pretty) ts)\n      <+> list (map ((\\(x, y) -> tupled [x, y]) . bimap pretty pretty) rs)\n  pretty (SolvedG tv t) = \"SolvedG:\" <+> pretty tv <+> \"=\" <+> pretty t\n  pretty (MarkG tv) = \"MarkG:\" <+> pretty tv\n  pretty (SrcG (Source ev1 lang _ _ _ _ _ _ _)) = \"SrcG:\" <+> pretty ev1 <+> viaShow lang\n  pretty (AnnG v t) = pretty v <+> \"::\" <+> pretty t\n"
  },
  {
    "path": "library/Morloc/Namespace/Type.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Namespace.Type\nDescription : Type system types and partial order logic\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nCore type representations. 'Type' is the ground type (no quantifiers) used\nafter type erasure. 'TypeU' is the full type with existentials ('ExistU') and\nuniversals ('ForallU'), used during typechecking and in type signatures.\n'EType' extends 'TypeU' with typeclass constraints and documentation.\n\nA partial order is defined on 'TypeU' via \"Data.PartialOrd\" where @t1 <= t2@\nmeans t1 is at least as specific as t2 (t1 is a subtype of t2).\n-}\nmodule Morloc.Namespace.Type\n  ( -- * Types\n    NamType (..)\n  , Type (..)\n  , TypeU (..)\n  , OpenOrClosed (..)\n  , extractKey\n  , type2typeu\n  , EType (..)\n  , unresolvedType2type\n\n    -- * Effect types\n  , EffectLabel\n  , EffectSet (..)\n  , resolveEffectSet\n  , emptyEffectSet\n  , ioEffectSet\n  , effectSubsetOf\n\n    -- * Docstring related types\n  , CliOpt (..)\n  , ArgDoc (..)\n  , ArgDocVars (..)\n  , ExprTypeE (..)\n\n    -- * Scope\n  , Scope\n\n    -- * Type extensions\n  , Constraint (..)\n\n    -- * Predicates\n  , containsUnk\n\n    -- * Typeclasses\n  , Typelike (..)\n\n    -- * kludge\n  , newVariable\n\n    -- * Partial order logic\n  , isSubtypeOf\n  , equivalent\n  , mostGeneral\n  , mostSpecific\n  , mostSpecificSubtypes\n  , substituteFirst\n  , findFirst\n  ) where\n\nimport qualified Data.List as DL\nimport Data.Map.Strict (Map)\nimport qualified Data.PartialOrd as P\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Data.Text as DT\nimport Morloc.Data.Doc\nimport Morloc.Namespace.Prim\n\n---- Effect types\n\n-- | A named effect label (e.g., \"IO\", \"Random\", \"Error\")\ntype EffectLabel = Text\n\n-- | An effect set for use during typechecking. May contain concrete labels,\n-- unsolved effect variables, or unions of effect sets.\ndata EffectSet\n  = EffectSet (Set.Set EffectLabel)\n  | EffectVar TVar\n  | EffectUnion EffectSet EffectSet\n  deriving (Show, Ord, Eq)\n\n-- | Resolve an effect set to concrete labels. Unsolved variables resolve to empty.\nresolveEffectSet :: EffectSet -> Set.Set EffectLabel\nresolveEffectSet (EffectSet labels) = labels\nresolveEffectSet (EffectVar _) = Set.empty\nresolveEffectSet (EffectUnion a b) = Set.union (resolveEffectSet a) (resolveEffectSet b)\n\n-- | An empty effect set (no effects)\nemptyEffectSet :: EffectSet\nemptyEffectSet = EffectSet Set.empty\n\n-- | An IO effect set\nioEffectSet :: EffectSet\nioEffectSet = EffectSet (Set.singleton \"IO\")\n\n-- | Check if one effect set is a subset of another (resolved labels).\n-- Unsolved EffectVar resolves to empty, so EffectVar is a subset of everything.\neffectSubsetOf :: EffectSet -> EffectSet -> Bool\neffectSubsetOf e1 e2 = Set.isSubsetOf (resolveEffectSet e1) (resolveEffectSet e2)\n\n---- Type definitions\n\n{- | Scope maps each type name to its definitions: the type parameters, the\nbody type, documentation, and whether the definition is terminal (won't be\nexpanded further during type resolution).\n-}\ntype Scope =\n  Map\n    TVar\n    [ ( [Either (TVar, Kind) TypeU] -- type parameters (generic for left, specific for right)\n      , TypeU\n      , ArgDoc\n      , Bool -- True if this is a \"terminal\" type (won't be reduced further)\n      )\n    ]\n\n-- | Flavors of named (keyed) types\ndata NamType\n  = -- | Structural record with named fields\n    NamRecord\n  | -- | Nominal object type\n    NamObject\n  | -- | Tabular type (columns as fields)\n    NamTable\n  deriving (Show, Ord, Eq)\n\n{- | Ground type with no quantifiers. Produced after type erasure and used in\ncode generation where all type variables have been resolved.\n-}\ndata Type\n  = UnkT TVar\n  | VarT TVar\n  | FunT [Type] Type\n  | AppT Type [Type]\n  | NamT NamType TVar [Type] [(Key, Type)]\n  | EffectT (Set.Set EffectLabel) Type\n  | OptionalT Type\n  | NatLitT Integer\n  | NatAddT Type Type\n  | NatMulT Type Type\n  | NatSubT Type Type\n  | NatDivT Type Type\n  deriving (Show, Ord, Eq)\n\ndata OpenOrClosed = Open | Closed\n  deriving (Show, Ord, Eq)\n\n{- | Full type with quantifiers. 'ExistU' represents existential variables\n(solved during unification), 'ForallU' represents universally quantified\nvariables. This is the primary type representation during typechecking.\n-}\ndata TypeU\n  = VarU TVar\n  | NatVarU TVar -- ^ Nat-kinded variable, never quantified by ForallU\n  | ExistU\n      TVar\n      ([TypeU], OpenOrClosed)\n      ([(Key, TypeU)], OpenOrClosed)\n  | ForallU TVar TypeU\n  | FunU [TypeU] TypeU\n  | AppU TypeU [TypeU]\n  | NamU NamType TVar [TypeU] [(Key, TypeU)]\n  | EffectU EffectSet TypeU\n  | OptionalU TypeU\n  | NatLitU Integer\n  | NatAddU TypeU TypeU\n  | NatMulU TypeU TypeU\n  | NatSubU TypeU TypeU\n  | NatDivU TypeU TypeU\n  | LabeledU TVar TypeU -- ^ Transient: m:Int -> LabeledU (TV \"m\") Int, stripped in desugar\n  deriving (Show, Ord, Eq)\n\n{- | Extended Type that may represent a language specific type as well as sets\nof properties and constrains.\n-}\ndata EType\n  = EType\n  { etype :: TypeU\n  , econs :: Set.Set Constraint\n  , edocs :: ArgDoc\n  , enatLabels :: Map TVar Int -- ^ Nat var name -> argument position index (from m:Int syntax)\n  }\n  deriving (Show, Eq, Ord)\n\ndata Constraint = Constraint ClassName [TypeU]\n  deriving (Show, Eq, Ord)\n\n-- a CLI option that takes an argument\ndata CliOpt\n  = CliOptShort Char\n  | CliOptLong Text\n  | CliOptBoth Char Text\n  deriving (Show, Ord, Eq)\n\ndata ArgDocVars = ArgDocVars\n  { docLines :: [Text]\n  , docName :: Maybe Text\n  , docLiteral :: Maybe Bool\n  , docUnroll :: Maybe Bool\n  , docDefault :: Maybe Text\n  , docMetavar :: Maybe Text\n  , docArg :: Maybe CliOpt\n  , docTrue :: Maybe CliOpt\n  , docFalse :: Maybe CliOpt\n  , docReturn :: Maybe Text\n  }\n  deriving (Show, Ord, Eq)\n\ndata ArgDoc\n  = ArgDocRec ArgDocVars [(Key, ArgDocVars)]\n  | ArgDocSig\n      ArgDocVars\n      [ArgDocVars]\n      ArgDocVars\n  | ArgDocAlias ArgDocVars\n  deriving (Show, Ord, Eq)\n\n-- Wraps all information stored in a type definition\ndata ExprTypeE = ExprTypeE\n  { exprTypeConcreteForm :: Maybe (Lang, Bool)\n  , exprTypeName :: TVar\n  , exprTypeParams :: [Either (TVar, Kind) TypeU]\n  , exprTypeType :: TypeU\n  , exprTypeDoc :: ArgDoc\n  }\n  deriving (Show, Ord, Eq)\n\n---- Typeclasses\n\nclass Typelike a where\n  typeOf :: a -> Type\n\n  free :: a -> Set.Set a\n\n  substituteTVar :: TVar -> a -> a -> a\n\n  nargs :: a -> Int\n  nargs (typeOf -> FunT ts _) = length ts\n  nargs _ = 0\n\n  normalizeType :: a -> a\n\n---- Typeclass instances\n\ninstance Defaultable ArgDocVars where\n  defaultValue =\n    ArgDocVars\n      { docLines = []\n      , docName = Nothing\n      , docLiteral = Nothing\n      , docUnroll = Nothing\n      , docDefault = Nothing\n      , docMetavar = Nothing\n      , docArg = Nothing\n      , docTrue = Nothing\n      , docFalse = Nothing\n      , docReturn = Nothing\n      }\n\ninstance Typelike Type where\n  typeOf = id\n\n  substituteTVar v0 r0 t0 = sub t0\n    where\n      sub t@(UnkT _) = t\n      sub t@(VarT v)\n        | v0 == v = r0\n        | otherwise = t\n      sub (FunT ts t) = FunT (map sub ts) (sub t)\n      sub (AppT v ts) = AppT (sub v) (map sub ts)\n      sub (NamT r n ps es) = NamT r n ps [(k, sub t) | (k, t) <- es]\n      sub (EffectT effs t) = EffectT effs (sub t)\n      sub (OptionalT t) = OptionalT (sub t)\n      sub t@(NatLitT _) = t\n      sub (NatAddT a b) = NatAddT (sub a) (sub b)\n      sub (NatMulT a b) = NatMulT (sub a) (sub b)\n      sub (NatSubT a b) = NatSubT (sub a) (sub b)\n      sub (NatDivT a b) = NatDivT (sub a) (sub b)\n\n  free (UnkT _) = Set.empty\n  free v@(VarT _) = Set.singleton v\n  free (FunT ts t) = Set.unions (map free (t : ts))\n  free (AppT t ts) = Set.unions (map free (t : ts))\n  free (NamT _ _ _ es) = Set.unions (map (free . snd) es)\n  free (EffectT _ t) = free t\n  free (OptionalT t) = free t\n  free (NatLitT _) = Set.empty\n  free (NatAddT a b) = Set.union (free a) (free b)\n  free (NatMulT a b) = Set.union (free a) (free b)\n  free (NatSubT a b) = Set.union (free a) (free b)\n  free (NatDivT a b) = Set.union (free a) (free b)\n\n  normalizeType (FunT ts1 (FunT ts2 ft)) = normalizeType $ FunT (ts1 <> ts2) ft\n  normalizeType (AppT t ts) = AppT (normalizeType t) (map normalizeType ts)\n  normalizeType (NamT n v ds ks) = NamT n v (map normalizeType ds) (zip (map fst ks) (map (normalizeType . snd) ks))\n  normalizeType (EffectT effs t) = EffectT effs (normalizeType t)\n  normalizeType (OptionalT t) = OptionalT (normalizeType t)\n  normalizeType (NatAddT a b) = NatAddT (normalizeType a) (normalizeType b)\n  normalizeType (NatMulT a b) = NatMulT (normalizeType a) (normalizeType b)\n  normalizeType (NatSubT a b) = NatSubT (normalizeType a) (normalizeType b)\n  normalizeType (NatDivT a b) = NatDivT (normalizeType a) (normalizeType b)\n  normalizeType t = t\n\ninstance Typelike TypeU where\n  typeOf (VarU v) = VarT v\n  typeOf (NatVarU _) = NatLitT 0\n  typeOf (ExistU _ (ps, _) (rs@(_ : _), _)) = NamT NamRecord (TV \"Record\") (map typeOf ps) (map (second typeOf) rs)\n  typeOf (ExistU v _ _) = typeOf (ForallU v (VarU v))\n  typeOf (ForallU v t) = substituteTVar v (UnkT v) (typeOf t)\n  typeOf (FunU ts t) = FunT (map typeOf ts) (typeOf t)\n  typeOf (AppU t ts) = AppT (typeOf t) (map typeOf ts)\n  typeOf (NamU n o ps rs) = NamT n o (map typeOf ps) (zip (map fst rs) (map (typeOf . snd) rs))\n  typeOf (EffectU effs t) = EffectT (resolveEffectSet effs) (typeOf t)\n  typeOf (OptionalU t) = OptionalT (typeOf t)\n  typeOf (NatLitU n) = NatLitT n\n  typeOf (NatAddU a b) = NatAddT (typeOf a) (typeOf b)\n  typeOf (NatMulU a b) = NatMulT (typeOf a) (typeOf b)\n  typeOf (NatSubU a b) = NatSubT (typeOf a) (typeOf b)\n  typeOf (NatDivU a b) = NatDivT (typeOf a) (typeOf b)\n  typeOf (LabeledU _ t) = typeOf t\n\n  free v@(VarU _) = Set.singleton v\n  free (NatVarU _) = Set.empty\n  free v@(ExistU _ ([], _) (rs, _)) = Set.unions $ Set.singleton v : map (free . snd) rs\n  free (ExistU v (ts, _) _) = Set.unions $ Set.singleton (AppU (VarU v) ts) : map free ts\n  free (ForallU v t) = Set.delete (VarU v) (free t)\n  free (FunU ts t) = Set.unions $ map free (t : ts)\n  free (AppU t ts) = Set.unions $ map free (t : ts)\n  free (NamU _ _ ps rs) = Set.unions $ map free (map snd rs <> ps)\n  free (EffectU _ t) = free t\n  free (OptionalU t) = free t\n  free (NatLitU _) = Set.empty\n  free (NatAddU a b) = Set.union (free a) (free b)\n  free (NatMulU a b) = Set.union (free a) (free b)\n  free (NatSubU a b) = Set.union (free a) (free b)\n  free (NatDivU a b) = Set.union (free a) (free b)\n  free (LabeledU _ t) = free t\n\n  substituteTVar v (ForallU q r) t =\n    if Set.member (VarU q) (free t)\n      then\n        let q' = newVariable r t\n            r' = substituteTVar q (VarU q') r\n         in ForallU q' (substituteTVar v r' t)\n      else\n        ForallU q (substituteTVar v r t)\n  substituteTVar _ _ t@(NatVarU _) = t\n  substituteTVar v0 r0 t0 = sub t0\n    where\n      sub t@(VarU v)\n        | v0 == v = r0\n        | otherwise = t\n      sub t@(NatVarU _) = t\n      sub (ExistU v (map sub -> ps, pc) (map (second sub) -> rs, rc)) = ExistU v (ps, pc) (rs, rc)\n      sub (ForallU v t)\n        | v0 == v = ForallU v t\n        | otherwise = ForallU v (sub t)\n      sub (FunU ts t) = FunU (map sub ts) (sub t)\n      sub (AppU t ts) = AppU (sub t) (map sub ts)\n      sub (NamU r n ps rs) = NamU r n (map sub ps) [(k, sub t) | (k, t) <- rs]\n      sub (EffectU effs t) = EffectU effs (sub t)\n      sub (OptionalU t) = OptionalU (sub t)\n      sub t@(NatLitU _) = t\n      sub (NatAddU a b) = NatAddU (sub a) (sub b)\n      sub (NatMulU a b) = NatMulU (sub a) (sub b)\n      sub (NatSubU a b) = NatSubU (sub a) (sub b)\n      sub (NatDivU a b) = NatDivU (sub a) (sub b)\n      sub (LabeledU n t) = LabeledU n (sub t)\n\n  normalizeType (FunU ts1 (FunU ts2 ft)) = normalizeType $ FunU (ts1 <> ts2) ft\n  normalizeType (AppU t ts) = AppU (normalizeType t) (map normalizeType ts)\n  normalizeType (NamU n v ds ks) = NamU n v (map normalizeType ds) (zip (map fst ks) (map (normalizeType . snd) ks))\n  normalizeType (ForallU v t) = ForallU v (normalizeType t)\n  normalizeType (ExistU v (map normalizeType -> ps, pc) (map (second normalizeType) -> rs, rc)) = ExistU v (ps, pc) (rs, rc)\n  normalizeType (EffectU effs t) = EffectU effs (normalizeType t)\n  normalizeType (OptionalU t) = OptionalU (normalizeType t)\n  normalizeType (NatAddU a b) = NatAddU (normalizeType a) (normalizeType b)\n  normalizeType (NatMulU a b) = NatMulU (normalizeType a) (normalizeType b)\n  normalizeType (NatSubU a b) = NatSubU (normalizeType a) (normalizeType b)\n  normalizeType (NatDivU a b) = NatDivU (normalizeType a) (normalizeType b)\n  normalizeType t@(NatVarU _) = t\n  normalizeType (LabeledU n t) = LabeledU n (normalizeType t)\n  normalizeType t = t\n\n----- Partial order logic\n\ninstance P.PartialOrd TypeU where\n  (<=) (VarU v1) (VarU v2) = v1 == v2\n  (<=) (NatVarU v1) (NatVarU v2) = v1 == v2\n  (<=) (ExistU v1 (ts1, _) (rs1, _)) (ExistU v2 (ts2, _) (rs2, _)) =\n    v1 == v2\n      && length ts1 == length ts2\n      && and (zipWith (P.<=) ts1 ts2)\n      && and [maybe False (t1 P.<=) (lookup k rs2) | (k, t1) <- rs1]\n  (<=) (ForallU v t1) t2\n    | (P.==) (ForallU v t1) t2 = True\n    | otherwise = (P.<=) (substituteFirst v t1 t2) t2\n  (<=) (FunU (t11 : rs1) t12) (FunU (t21 : rs2) t22) = t11 P.<= t21 && FunU rs1 t12 P.<= FunU rs2 t22\n  (<=) (FunU [] t12) (FunU [] t22) = t12 P.<= t22\n  (<=) (AppU t1 (t11 : rs1)) (AppU t2 (t21 : rs2)) = t11 P.<= t21 && AppU t1 rs1 P.<= AppU t2 rs2\n  (<=) (AppU t1 []) (AppU t2 []) = t1 P.<= t2\n  (<=) (NamU o1 n1 ps1 ((k1, e1) : rs1)) (NamU o2 n2 ps2 es2) =\n    case DL.partition ((== k1) . fst) es2 of\n      ([(_, e2)], rs2) -> e1 P.<= e2 && NamU o1 n1 ps1 rs1 P.<= NamU o2 n2 ps2 rs2\n      _ -> False\n  (<=) (NamU o1 n1 ps1 []) (NamU o2 n2 ps2 []) =\n    o1 == o2 && n1 == n2 && length ps1 == length ps2\n  (<=) (EffectU e1 t1) (EffectU e2 t2) = e1 == e2 && t1 P.<= t2\n  (<=) (OptionalU t1) (OptionalU t2) = t1 P.<= t2\n  (<=) (NatLitU n1) (NatLitU n2) = n1 == n2\n  (<=) (NatAddU a1 b1) (NatAddU a2 b2) = a1 P.<= a2 && b1 P.<= b2\n  (<=) (NatMulU a1 b1) (NatMulU a2 b2) = a1 P.<= a2 && b1 P.<= b2\n  (<=) (NatSubU a1 b1) (NatSubU a2 b2) = a1 P.<= a2 && b1 P.<= b2\n  (<=) (NatDivU a1 b1) (NatDivU a2 b2) = a1 P.<= a2 && b1 P.<= b2\n  (<=) (LabeledU _ t1) t2 = t1 P.<= t2\n  (<=) t1 (LabeledU _ t2) = t1 P.<= t2\n  (<=) _ _ = False\n\n  (==) (ForallU v1 t1) (ForallU v2 t2) =\n    if Set.member (VarU v1) (free t2)\n      then\n        let v = newVariable t1 t2\n         in (P.==) (substituteTVar v1 (VarU v) t1) (substituteTVar v2 (VarU v) t2)\n      else (P.==) t1 (substituteTVar v2 (VarU v1) t2)\n  (==) a b = a == b\n\nsubstituteFirst :: TVar -> TypeU -> TypeU -> TypeU\nsubstituteFirst v t1 t2 = case findFirst v t1 t2 of\n  (Just t) -> substituteTVar v t t1\n  Nothing -> t1\n\nfindFirst :: TVar -> TypeU -> TypeU -> Maybe TypeU\nfindFirst v = f\n  where\n    f (VarU v') t2\n      | v == v' = Just t2\n      | otherwise = Nothing\n    f (NatVarU _) _ = Nothing\n    f (ForallU v1 t1) (ForallU v2 t2)\n      | v == v1 = Nothing\n      | otherwise = f t1 (substituteTVar v2 (VarU v1) t2)\n    f (ForallU v1 t1) t2\n      | v == v1 = Nothing\n      | otherwise = f (substituteTVar v1 (VarU v1) t1) t2\n    f (FunU ts1 t1) (FunU ts2 t2) =\n      foldl firstOf Nothing (zipWith f (ts1 <> [t1]) (ts2 <> [t2]))\n    f (AppU t1 ts1) (AppU t2 ts2) =\n      foldl firstOf Nothing (zipWith f (t1 : ts1) (t2 : ts2))\n    f (NamU o1 n1 ps1 ((k1, e1) : rs1)) (NamU o2 n2 ps2 es2) =\n      case DL.partition ((== k1) . fst) es2 of\n        ([(_, e2)], rs2) -> firstOf (f e1 e2) (f (NamU o1 n1 ps1 rs1) (NamU o2 n2 ps2 rs2))\n        _ -> Nothing\n    f (EffectU _ t1) (EffectU _ t2) = f t1 t2\n    f (OptionalU t1) (OptionalU t2) = f t1 t2\n    f (NatAddU a1 b1) (NatAddU a2 b2) = firstOf (f a1 a2) (f b1 b2)\n    f (NatMulU a1 b1) (NatMulU a2 b2) = firstOf (f a1 a2) (f b1 b2)\n    f (NatSubU a1 b1) (NatSubU a2 b2) = firstOf (f a1 a2) (f b1 b2)\n    f (NatDivU a1 b1) (NatDivU a2 b2) = firstOf (f a1 a2) (f b1 b2)\n    f (LabeledU _ t1) t2 = f t1 t2\n    f t1 (LabeledU _ t2) = f t1 t2\n    f _ _ = Nothing\n\n    firstOf :: Maybe a -> Maybe a -> Maybe a\n    firstOf (Just x) _ = Just x\n    firstOf _ (Just x) = Just x\n    firstOf _ _ = Nothing\n\n-- | is t1 a generalization of t2?\nisSubtypeOf :: TypeU -> TypeU -> Bool\nisSubtypeOf t1 t2 = case P.compare t1 t2 of\n  (Just x) -> x <= EQ\n  _ -> False\n\nequivalent :: TypeU -> TypeU -> Bool\nequivalent t1 t2 = isSubtypeOf t1 t2 && isSubtypeOf t2 t1\n\n-- | find the most specific subtypes\nmostSpecificSubtypes :: TypeU -> [TypeU] -> [TypeU]\nmostSpecificSubtypes t ts = mostSpecific $ filter (`isSubtypeOf` t) ts\n\n-- | find all types that are not greater than any other type\nmostGeneral :: [TypeU] -> [TypeU]\nmostGeneral = P.minima\n\n-- | find all types that are not less than any other type\nmostSpecific :: [TypeU] -> [TypeU]\nmostSpecific = P.maxima\n\n---- Utility functions\n\nextractKey :: TypeU -> TVar\nextractKey (VarU v) = v\nextractKey (NatVarU v) = v\nextractKey (ForallU _ t) = extractKey t\nextractKey (AppU t _) = extractKey t\nextractKey (NamU _ v _ _) = v\nextractKey (ExistU v _ _) = v\nextractKey (EffectU _ t) = extractKey t\nextractKey (OptionalU t) = extractKey t\nextractKey (NatLitU _) = TV \"Nat\"\nextractKey (NatAddU _ _) = TV \"Nat\"\nextractKey (NatMulU _ _) = TV \"Nat\"\nextractKey (NatSubU _ _) = TV \"Nat\"\nextractKey (NatDivU _ _) = TV \"Nat\"\nextractKey (LabeledU _ t) = extractKey t\nextractKey t = error $ \"Cannot currently handle functional type imports: \" <> show t\n\ntype2typeu :: Type -> TypeU\ntype2typeu (VarT v) = VarU v\ntype2typeu (UnkT v) = ForallU v (VarU v)\ntype2typeu (FunT ts t) = FunU (map type2typeu ts) (type2typeu t)\ntype2typeu (AppT v ts) = AppU (type2typeu v) (map type2typeu ts)\ntype2typeu (NamT o n ps rs) = NamU o n (map type2typeu ps) [(k, type2typeu x) | (k, x) <- rs]\ntype2typeu (EffectT effs t) = EffectU (EffectSet effs) (type2typeu t)\ntype2typeu (OptionalT t) = OptionalU (type2typeu t)\ntype2typeu (NatLitT n) = NatLitU n\ntype2typeu (NatAddT a b) = NatAddU (type2typeu a) (type2typeu b)\ntype2typeu (NatMulT a b) = NatMulU (type2typeu a) (type2typeu b)\ntype2typeu (NatSubT a b) = NatSubU (type2typeu a) (type2typeu b)\ntype2typeu (NatDivT a b) = NatDivU (type2typeu a) (type2typeu b)\n\nunresolvedType2type :: TypeU -> Type\nunresolvedType2type (VarU v) = VarT v\nunresolvedType2type (NatVarU _) = NatLitT 0\nunresolvedType2type ExistU {} = error \"Cannot cast existential type to Type\"\nunresolvedType2type (ForallU _ _) = error \"Cannot cast universal type as Type\"\nunresolvedType2type (FunU ts t) = FunT (map unresolvedType2type ts) (unresolvedType2type t)\nunresolvedType2type (AppU v ts) = AppT (unresolvedType2type v) (map unresolvedType2type ts)\nunresolvedType2type (NamU t n ps rs) = NamT t n (map unresolvedType2type ps) [(k, unresolvedType2type e) | (k, e) <- rs]\nunresolvedType2type (EffectU effs t) = EffectT (resolveEffectSet effs) (unresolvedType2type t)\nunresolvedType2type (OptionalU t) = OptionalT (unresolvedType2type t)\nunresolvedType2type (NatLitU n) = NatLitT n\nunresolvedType2type (NatAddU a b) = NatAddT (unresolvedType2type a) (unresolvedType2type b)\nunresolvedType2type (NatMulU a b) = NatMulT (unresolvedType2type a) (unresolvedType2type b)\nunresolvedType2type (NatSubU a b) = NatSubT (unresolvedType2type a) (unresolvedType2type b)\nunresolvedType2type (NatDivU a b) = NatDivT (unresolvedType2type a) (unresolvedType2type b)\nunresolvedType2type (LabeledU _ t) = unresolvedType2type t\n\n-- | get a fresh variable name that is not used in t1 or t2\nnewVariable :: TypeU -> TypeU -> TVar\nnewVariable t1 t2 = findNew variables (Set.union (allVars t1) (allVars t2))\n  where\n    variables = [1 ..] >>= flip replicateM ['a' .. 'z']\n\n    findNew :: [String] -> Set.Set TypeU -> TVar\n    findNew [] _ = error \"No variable in the infinite list was OK with you? Sheesh, picky.\"\n    findNew (x : xs) ts\n      | Set.member (VarU v) ts = findNew xs ts\n      | otherwise = v\n      where\n        v = TV $ DT.pack x\n\n    allVars :: TypeU -> Set.Set TypeU\n    allVars (ForallU v t) = Set.union (Set.singleton (VarU v)) (allVars t)\n    allVars (NatVarU v) = Set.singleton (NatVarU v)\n    allVars (EffectU _ t) = allVars t\n    allVars (OptionalU t) = allVars t\n    allVars (NatAddU a b) = Set.union (allVars a) (allVars b)\n    allVars (NatMulU a b) = Set.union (allVars a) (allVars b)\n    allVars (NatSubU a b) = Set.union (allVars a) (allVars b)\n    allVars (NatDivU a b) = Set.union (allVars a) (allVars b)\n    allVars (LabeledU _ t) = allVars t\n    allVars t = free t\n\n{- | Check whether a ground type contains any unknown (unresolved) type variables.\nThese arise from erasing 'ForallU' during 'typeOf', indicating a polymorphic type.\n-}\ncontainsUnk :: Type -> Bool\ncontainsUnk (UnkT _) = True\ncontainsUnk (VarT _) = False\ncontainsUnk (FunT ts t) = any containsUnk ts || containsUnk t\ncontainsUnk (AppT t ts) = containsUnk t || any containsUnk ts\ncontainsUnk (NamT _ _ ps rs) = any containsUnk ps || any (containsUnk . snd) rs\ncontainsUnk (EffectT _ t) = containsUnk t\ncontainsUnk (OptionalT t) = containsUnk t\ncontainsUnk (NatLitT _) = False\ncontainsUnk (NatAddT a b) = containsUnk a || containsUnk b\ncontainsUnk (NatMulT a b) = containsUnk a || containsUnk b\ncontainsUnk (NatSubT a b) = containsUnk a || containsUnk b\ncontainsUnk (NatDivT a b) = containsUnk a || containsUnk b\n\n----- Pretty instances -------------------------------------------------------\n\n-- | Records, objects, and tables render identically in type signatures —\n-- just the type name optionally followed by parameters. The record/object/\n-- table distinction is surfaced separately in the CLI help's type\n-- definition block, where each named type is listed with its tag and\n-- fields. Using a trivial instance here keeps signatures uncluttered.\ninstance Pretty NamType where\n  pretty _ = mempty\n\ninstance Pretty Type where\n  pretty t0 = f True t0\n    where\n      f _ (UnkT v) = pretty v\n      f _ (VarT v) = pretty v\n      f _ (AppT (VarT (TV \"List\")) [t]) = \"[\" <> f True t <> \"]\"\n      f _ (AppT (VarT (TV \"Tuple2\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppT (VarT (TV \"Tuple3\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppT (VarT (TV \"Tuple4\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppT (VarT (TV \"Tuple5\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppT (VarT (TV \"Tuple6\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppT (VarT (TV \"Tuple7\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppT (VarT (TV \"Tuple8\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (EffectT effs t)\n        | Set.null effs = \"{\" <> f True t <> \"}\"\n        | otherwise = \"<\" <> hcat (punctuate \",\" (map pretty (Set.toList effs))) <> \">\" <+> f False t\n      f _ (OptionalT t) = \"?\" <> f False t\n      f _ (NatLitT n) = pretty n\n      f _ (NatAddT a b) = \"(\" <> f True a <+> \"+\" <+> f True b <> \")\"\n      f _ (NatMulT a b) = \"(\" <> f True a <+> \"*\" <+> f True b <> \")\"\n      f _ (NatSubT a b) = \"(\" <> f True a <+> \"-\" <+> f True b <> \")\"\n      f _ (NatDivT a b) = \"(\" <> f True a <+> \"/\" <+> f True b <> \")\"\n      f False t = parens (f True t)\n      f _ (FunT [] t) = \"() -> \" <> f False t\n      f _ (FunT ts t) = hsep $ punctuate \" -> \" (map (f False) (ts <> [t]))\n      f _ (AppT t ts) = hsep (map (f False) (t : ts))\n      -- Named types (records / objects / tables) render as \"name [p1 ...]\",\n      -- Haskell-style. No tag and no inline field block; the record/table\n      -- distinction and the field list are surfaced in the CLI help's\n      -- type definition section and in typeclass-aware contexts.\n      f _ (NamT _ n ps _) =\n        let params = if null ps\n                     then mempty\n                     else space <> hsep (map (f False) ps)\n        in pretty n <> params\n\ninstance Pretty TypeU where\n  pretty t0 = f True t0\n    where\n      f _ (VarU v) = pretty v\n      f _ (NatVarU v) = pretty v\n      f _ (ExistU v ([], _) ([], _)) = angles $ pretty v\n      f _ (AppU (VarU (TV \"List\")) [t]) = \"[\" <> f True t <> \"]\"\n      f _ (AppU (VarU (TV \"Tuple2\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppU (VarU (TV \"Tuple3\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppU (VarU (TV \"Tuple4\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppU (VarU (TV \"Tuple5\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppU (VarU (TV \"Tuple6\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppU (VarU (TV \"Tuple7\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (AppU (VarU (TV \"Tuple8\")) ts) = encloseSep \"(\" \")\" \", \" (map (f True) ts)\n      f _ (EffectU effs t) =\n        let labels = resolveEffectSet effs\n         in if Set.null labels\n              then \"{\" <> f True t <> \"}\"\n              else \"<\" <> hcat (punctuate \",\" (map pretty (Set.toList labels))) <> \">\" <+> f False t\n      f _ (OptionalU t) = \"?\" <> f False t\n      f _ (NatLitU n) = pretty n\n      f _ (NatAddU a b) = \"(\" <> f True a <+> \"+\" <+> f True b <> \")\"\n      f _ (NatMulU a b) = \"(\" <> f True a <+> \"*\" <+> f True b <> \")\"\n      f _ (NatSubU a b) = \"(\" <> f True a <+> \"-\" <+> f True b <> \")\"\n      f _ (NatDivU a b) = \"(\" <> f True a <+> \"/\" <+> f True b <> \")\"\n      f _ (LabeledU (TV n) t) = pretty n <> \":\" <> f False t\n      f False t = parens (f True t)\n      f _ (ExistU v (ts, _) (rs, _)) =\n        angles $\n          pretty v\n            <+> list (map (f False) ts)\n            <+> list (map ((\\(x, y) -> tupled [x, y]) . bimap pretty (f True)) rs)\n      f _ (FunU [] t) = \"() -> \" <> f False t\n      f _ (FunU ts t) = hsep $ punctuate \" ->\" (map (f False) (ts <> [t]))\n      f _ (ForallU v t) = \"forall\" <+> pretty v <+> \".\" <+> f True t\n      f _ (AppU t ts) = hsep $ map (f False) (t : ts)\n      -- See the NamT case in 'Pretty Type' above for the rendering rules.\n      f _ (NamU _ n ps _) =\n        let params = if null ps\n                     then mempty\n                     else space <> hsep (map (f False) ps)\n        in pretty n <> params\n\ninstance Pretty EType where\n  pretty (EType t (Set.toList -> cs) _ _) = case cs of\n    [] -> pretty t\n    [c] -> pretty c <+> \"=>\" <+> pretty t\n    _ -> tupled (map pretty cs) <+> \"=>\" <+> pretty t\n\ninstance Pretty Constraint where\n  pretty (Constraint cls ts) = pretty cls <+> hsep (map pretty ts)\n"
  },
  {
    "path": "library/Morloc/ProgramBuilder/Build.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.ProgramBuilder.Build\nDescription : Compile pool source files and assemble the final executable\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nOrchestrates the @morloc make@ build step: writes generated pool source\nfiles, compiles them with the appropriate language toolchain, copies the\npre-compiled nexus binary, and writes the manifest file.\n-}\nmodule Morloc.ProgramBuilder.Build\n  ( buildProgram\n  ) where\n\nimport Control.Monad.Except (catchError, throwError)\nimport Morloc.Data.Doc ((<+>), vsep, pretty)\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport qualified Morloc.System as MS\nimport qualified System.Directory as SD\nimport System.Process (callProcess)\n\nbuildProgram :: (Script, [Script]) -> MorlocMonad ()\nbuildProgram (nexus, pools) = do\n  installDir <- MM.gets stateInstallDir\n  case installDir of\n    Just dir -> do\n      -- When installing, copy includes and the morloc script into the install\n      -- directory, cd there, and build as normal. This avoids leaving artifacts\n      -- in CWD and ensures the installed pools are a fresh build.\n      force <- MM.gets stateInstallForce\n      dirExists <- liftIO $ SD.doesDirectoryExist dir\n      when (dirExists && not force) $ do\n        contents <- liftIO $ SD.listDirectory dir\n        unless (null contents) $\n          MM.throwSystemError $ \"Install directory already exists: \" <> pretty dir\n            <> \". Use --force to overwrite.\"\n      when (dirExists && force) $\n        liftIO $ SD.removeDirectoryRecursive dir\n      liftIO $ SD.createDirectoryIfMissing True dir\n      origDir <- liftIO SD.getCurrentDirectory\n      liftIO $ SD.setCurrentDirectory dir\n      mapM_ build (nexus : pools) `finally` liftIO (SD.setCurrentDirectory origDir)\n    Nothing ->\n      mapM_ build (nexus : pools)\n\n-- | catch/finally for MorlocMonad\nfinally :: MorlocMonad a -> MorlocMonad () -> MorlocMonad a\nfinally action cleanup = do\n  result <- catchError (fmap Right action) (return . Left)\n  cleanup\n  case result of\n    Right a -> return a\n    Left e -> throwError e\n\nbuild :: Script -> MorlocMonad ()\nbuild s = do\n  (_ :/ tree) <- liftIO $ MS.writeDirectoryWith (\\f c -> MT.writeFile f (unCode c)) (scriptCode s)\n  case failures tree of\n    [] -> return ()\n    errs -> MM.throwSystemError $ \"Failed to write generated files:\" <+> vsep\n      [pretty (show e) | Failed _ e <- errs]\n  mapM_ runSysCommand (scriptMake s)\n\nrunSysCommand :: SysCommand -> MorlocMonad ()\nrunSysCommand (SysExe path) = liftIO $ callProcess \"chmod\" [\"755\", path]\nrunSysCommand (SysRun (Code cmd)) = MM.runCommand \"runSysCommand\" cmd\nrunSysCommand other =\n  MM.throwSystemError $ \"Unsupported SysCommand: \" <> pretty (show other)\n"
  },
  {
    "path": "library/Morloc/ProgramBuilder/Install.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.ProgramBuilder.Install\nDescription : Install compiled morloc programs system-wide\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.ProgramBuilder.Install\n  ( installProgram\n  , validateIncludeScope\n  , validateIncludeCoverage\n  , copyAllFiltered\n  , cleanIgnoredFiles\n  ) where\n\nimport Control.Exception (throwIO)\nimport Control.Monad (forM_, when, unless)\nimport Data.List (isInfixOf, isPrefixOf, isSuffixOf)\nimport Data.Maybe (fromMaybe)\nimport Data.Text (Text)\nimport qualified Data.Text as T\nimport qualified Morloc.Completion as Completion\nimport System.Directory\n  ( copyFile\n  , createDirectoryIfMissing\n  , doesDirectoryExist\n  , doesFileExist\n  , listDirectory\n  , removeDirectoryRecursive\n  , removeFile\n  )\nimport System.Environment (lookupEnv)\nimport System.Process (callProcess)\nimport System.FilePath\n  ( isAbsolute\n  , makeRelative\n  , normalise\n  , splitDirectories\n  , takeDirectory\n  , takeFileName\n  , (</>)\n  )\nimport System.IO (hPutStrLn, stderr)\n\n-- | Finalize an installed program. The build step has already written pools\n-- and the wrapper script directly into installDir. This function copies the\n-- wrapper to bin/, extracts the manifest to fdb/, and copies include files.\ninstallProgram ::\n  -- | configHome (e.g. ~/.local/share/morloc)\n  String ->\n  -- | installDir (e.g. <configHome>/exe/<name>)\n  String ->\n  -- | installName\n  String ->\n  -- | include patterns (Nothing = copy everything, Just [] = copy nothing)\n  Maybe [Text] ->\n  -- | force overwrite\n  Bool ->\n  IO ()\ninstallProgram configHome installDir installName includes force = do\n  let binDir = configHome </> \"bin\"\n      binPath = binDir </> installName\n      installedWrapper = installDir </> installName\n\n  -- Check for existing bin entry (installDir is already populated by build)\n  binExists <- doesFileExist binPath\n  when (binExists && not force) $\n    throwIO . userError $ \"'\" <> installName <> \"' is already installed. Use --force to overwrite.\"\n  when (binExists && force) $\n    removeFile binPath\n\n  -- Copy files from CWD to installDir\n  case includes of\n    Nothing -> copyAllFiltered \".\" installDir\n    Just pats -> mapM_ (\\pat -> copyIncludePattern (T.unpack pat) \".\" installDir) pats\n\n  -- Copy wrapper from installDir to bin/\n  createDirectoryIfMissing True binDir\n  copyFile installedWrapper binPath\n  makeExecutable binPath\n\n  -- Copy manifest to fdb/ for daemon discovery\n  let fdbDir = configHome </> \"fdb\"\n      fdbPath = fdbDir </> (installName ++ \".manifest\")\n  createDirectoryIfMissing True fdbDir\n  extractAndWriteManifest binPath fdbPath\n\n  -- Check if bin dir is on PATH and print hint if not\n  pathEnv <- lookupEnv \"PATH\"\n  let pathStr = fromMaybe \"\" pathEnv\n  when (not (binDir `isInfixOf` pathStr)) $\n    hPutStrLn stderr $ \"Note: add \" <> binDir <> \" to your PATH\"\n\n  hPutStrLn stderr $ \"Installed '\" <> installName <> \"' to \" <> binPath\n\n  -- Regenerate shell completions\n  Completion.regenerateCompletions False configHome\n\n-- ======================================================================\n-- Copy-everything mode (default)\n-- ======================================================================\n\n-- | Always-excluded patterns, applied even without a .morlocignore file.\ndefaultIgnorePatterns :: [String]\ndefaultIgnorePatterns =\n  [ \".git/\"\n  , \".morlocignore\"\n  , \"*.manifest\"\n  ]\n\n-- | Copy all files from srcRoot to dstRoot, excluding files that match\n-- .morlocignore patterns and always-excluded patterns. Preserves\n-- relative directory structure.\ncopyAllFiltered :: FilePath -> FilePath -> IO ()\ncopyAllFiltered srcRoot dstRoot = do\n  userPatterns <- readMorlocIgnore (srcRoot </> \".morlocignore\")\n  let allPatterns = defaultIgnorePatterns ++ userPatterns\n  files <- listDirectoryRecursive srcRoot\n  let relFiles = map (makeRelative srcRoot) files\n      kept = filter (not . isIgnored allPatterns) relFiles\n  forM_ kept $ \\rel -> do\n    let dst = dstRoot </> rel\n    createDirectoryIfMissing True (takeDirectory dst)\n    -- Skip if dst already exists (build artifacts placed by the compiler)\n    dstExists <- doesFileExist dst\n    unless dstExists $\n      copyFile (srcRoot </> rel) dst\n\n-- | Remove files matching .morlocignore and always-excluded patterns\n-- from an already-populated directory. Used after git clone for module\n-- install to clean up ignored files in-place.\ncleanIgnoredFiles :: FilePath -> IO ()\ncleanIgnoredFiles dir = do\n  userPatterns <- readMorlocIgnore (dir </> \".morlocignore\")\n  let allPatterns = defaultIgnorePatterns ++ userPatterns\n  files <- listDirectoryRecursive dir\n  let relFiles = map (makeRelative dir) files\n      ignored = filter (isIgnored allPatterns) relFiles\n  forM_ ignored $ \\rel ->\n    removeFile (dir </> rel)\n  -- Clean up empty directories left behind\n  cleanEmptyDirs dir\n\n-- | Remove empty directories recursively (bottom-up).\ncleanEmptyDirs :: FilePath -> IO ()\ncleanEmptyDirs dir = do\n  entries <- listDirectory dir\n  forM_ entries $ \\entry -> do\n    let path = dir </> entry\n    isDir <- doesDirectoryExist path\n    when isDir $ do\n      cleanEmptyDirs path\n      subEntries <- listDirectory path\n      when (null subEntries) $\n        removeDirectoryRecursive path\n\n-- ======================================================================\n-- .morlocignore parsing\n-- ======================================================================\n\n-- | Read and parse a .morlocignore file. Returns empty list if the file\n-- does not exist.\nreadMorlocIgnore :: FilePath -> IO [String]\nreadMorlocIgnore path = do\n  exists <- doesFileExist path\n  if exists\n    then do\n      content <- readFile path\n      return $ parseIgnorePatterns content\n    else return []\n\n-- | Parse .morlocignore content into a list of patterns.\n-- Supports: blank lines, # comments, negation with !, trailing / for dirs.\nparseIgnorePatterns :: String -> [String]\nparseIgnorePatterns = filter (not . null) . map clean . lines\n  where\n    clean line =\n      let trimmed = dropWhile (== ' ') line\n      in if null trimmed || head trimmed == '#'\n           then \"\"\n           else trimmed\n\n-- | Check if a relative path should be ignored based on ignore patterns.\n-- Supports: glob patterns, directory patterns (trailing /), negation (!).\nisIgnored :: [String] -> FilePath -> Bool\nisIgnored patterns relPath = foldl apply False patterns\n  where\n    apply acc pat\n      | \"!\" `isPrefixOf` pat =\n          if matchIgnorePattern (drop 1 pat) relPath then False else acc\n      | otherwise =\n          if matchIgnorePattern pat relPath then True else acc\n\n-- | Match a single ignore pattern against a relative path.\nmatchIgnorePattern :: String -> FilePath -> Bool\nmatchIgnorePattern pat relPath\n  -- Directory pattern: \"build/\" matches any path under build/\n  | \"/\" `isSuffixOf` pat =\n      let dir = init pat\n      in dir == relPath\n           || (dir ++ \"/\") `isPrefixOf` relPath\n           || (\"/\" ++ dir ++ \"/\") `isInfixOf` (\"/\" ++ relPath)\n           || takeFileName (takeDirectory relPath) == dir\n  -- Pattern contains / → match against full relative path\n  | '/' `elem` pat = matchGlob pat relPath\n  -- Pattern without / → match against filename only\n  | otherwise = matchGlob pat (takeFileName relPath)\n\n-- ======================================================================\n-- Allowlist mode (explicit include patterns)\n-- ======================================================================\n\n-- | Recursively copy a directory\ncopyDirectoryRecursive :: FilePath -> FilePath -> IO ()\ncopyDirectoryRecursive src dst = do\n  createDirectoryIfMissing True dst\n  entries <- listDirectory src\n  mapM_\n    ( \\entry -> do\n        let srcPath = src </> entry\n            dstPath = dst </> entry\n        isDir <- doesDirectoryExist srcPath\n        if isDir\n          then copyDirectoryRecursive srcPath dstPath\n          else copyFile srcPath dstPath\n    )\n    entries\n\n{- | Copy files matching an include pattern, preserving relative paths.\nTrailing \"/\" means copy a directory recursively.\n\"*\" in a pattern means glob match.\nOtherwise treated as an exact file/directory path.\n-}\ncopyIncludePattern :: String -> FilePath -> FilePath -> IO ()\ncopyIncludePattern pattern srcRoot dstRoot\n  | \"/\" `isSuffixOf` pattern = do\n      let dirName = init pattern\n          srcDir = srcRoot </> dirName\n      exists <- doesDirectoryExist srcDir\n      if exists\n        then copyDirectoryRecursive srcDir (dstRoot </> dirName)\n        else return ()\n  | '*' `elem` pattern = do\n      files <- listDirectoryRecursive srcRoot\n      let matching = filter (matchGlob pattern . makeRelative srcRoot) files\n      mapM_\n        ( \\f -> do\n            let rel = makeRelative srcRoot f\n                dst = dstRoot </> rel\n            createDirectoryIfMissing True (takeDirectory dst)\n            copyFile f dst\n        )\n        matching\n  | otherwise = do\n      let srcPath = srcRoot </> pattern\n      isFile <- doesFileExist srcPath\n      isDir <- doesDirectoryExist srcPath\n      if isFile\n        then do\n          let dst = dstRoot </> pattern\n          createDirectoryIfMissing True (takeDirectory dst)\n          copyFile srcPath dst\n        else\n          if isDir\n            then\n              copyDirectoryRecursive srcPath (dstRoot </> pattern)\n            else\n              return ()\n\n-- | Recursively list all files in a directory\nlistDirectoryRecursive :: FilePath -> IO [FilePath]\nlistDirectoryRecursive dir = do\n  exists <- doesDirectoryExist dir\n  if not exists\n    then return []\n    else do\n      entries <- listDirectory dir\n      paths <-\n        mapM\n          ( \\entry -> do\n              let path = dir </> entry\n              isDir <- doesDirectoryExist path\n              if isDir\n                then listDirectoryRecursive path\n                else return [path]\n          )\n          entries\n      return (concat paths)\n\n{- | Simple glob pattern matching supporting * (any sequence within a segment)\nand ** (any path segments). Matches against relative paths.\n-}\nmatchGlob :: String -> FilePath -> Bool\nmatchGlob [] [] = True\nmatchGlob ('*' : '*' : '/' : rest) path =\n  matchGlob rest path || case break (== '/') path of\n    (_, '/' : remaining) -> matchGlob ('*' : '*' : '/' : rest) remaining\n    _ -> False\nmatchGlob ('*' : rest) path = any (\\i -> matchGlob rest (drop i path)) [0 .. length segment]\n  where\n    segment = takeWhile (/= '/') path\nmatchGlob (p : rest) (c : cs) | p == c = matchGlob rest cs\nmatchGlob _ _ = False\n\n-- ======================================================================\n-- Validation\n-- ======================================================================\n\n-- | Make a file executable (0755 so group/other can execute too)\nmakeExecutable :: FilePath -> IO ()\nmakeExecutable path = callProcess \"chmod\" [\"755\", path]\n\n{- | Reject include patterns that escape the package root.\n\nAn include path is only valid if it resolves to a location inside the\ndirectory that contains package.yaml (and the main .loc script). Absolute\npaths are rejected outright; relative paths are rejected if normalising\nthem produces a leading @..@ segment.\n\nThis closes the obvious footgun of a package referencing files outside the\nproject directory, which would break reproducibility and make installs\ndepend on ambient filesystem layout.\n-}\nvalidateIncludeScope :: [Text] -> IO ()\nvalidateIncludeScope patterns =\n  case filter (not . inScope . T.unpack) patterns of\n    [] -> return ()\n    bad ->\n      throwIO . userError $\n        \"Invalid `include` in package.yaml: the following entries escape the \"\n          <> \"package directory (absolute paths and `..` are not allowed):\\n\"\n          <> unlines (map ((\"  \" <>) . T.unpack) bad)\n  where\n    inScope :: String -> Bool\n    inScope pat\n      | isAbsolute pat = False\n      | otherwise =\n          case splitDirectories (normalise pat) of\n            (\"..\" : _) -> False\n            segs -> not (any (== \"..\") segs)\n\n{- | Verify that every directly-sourced file in the compiled program is\ncovered by at least one include pattern.\n\nOnly runs in strict mode (when include patterns are explicitly specified).\nIn default mode (include everything), this check is skipped since all\nfiles are copied.\n\nNote: we only check /directly/ sourced files, not files that those sources\nin turn import (e.g., one R file calling @source()@ on another). Transitive\ndependencies cannot be discovered without executing the source language.\n-}\nvalidateIncludeCoverage ::\n  -- | package root (directory containing the main .loc script / package.yaml)\n  FilePath ->\n  -- | include patterns as written in package.yaml\n  [Text] ->\n  -- | absolute filesystem paths of every directly-sourced file\n  [FilePath] ->\n  IO ()\nvalidateIncludeCoverage packageRoot patterns sourcePaths = do\n  let patStrs = map T.unpack patterns\n      relPaths = [ rel\n                 | p <- sourcePaths\n                 , let rel = makeRelative packageRoot (normalise p)\n                 , not (isAbsolute rel)\n                 , not (\"../\" `isPrefixOf` rel)\n                 , rel /= \"..\"\n                 ]\n      uncovered = filter (not . isCovered patStrs) relPaths\n  case uncovered of\n    [] -> return ()\n    missing ->\n      throwIO . userError $\n        \"The following source files are referenced from your morloc program \"\n          <> \"but not listed in `include` in package.yaml:\\n\"\n          <> unlines (map (\"  \" <>) missing)\n          <> \"\\nAdd them to `include` so they are copied into the install:\\n\"\n          <> \"  include:\\n\"\n          <> unlines (map ((\"    - \" <>)) missing)\n\n-- | True if any include pattern matches the given relative path.\nisCovered :: [String] -> FilePath -> Bool\nisCovered patterns relPath = any (coversOne relPath) patterns\n  where\n    coversOne :: FilePath -> String -> Bool\n    coversOne rel pat\n      -- `src/` matches anything inside the src/ directory\n      | \"/\" `isSuffixOf` pat =\n          let dir = init pat\n          in (dir ++ \"/\") `isPrefixOf` rel\n      -- glob pattern\n      | '*' `elem` pat = matchGlob pat rel\n      -- exact path\n      | otherwise = pat == rel\n\n-- ======================================================================\n-- Manifest extraction\n-- ======================================================================\n\n{- | Extract the manifest JSON from a wrapper script and write it to a file.\nThe wrapper script has the format:\n  #!/bin/sh\n  exec morloc-nexus \"$0\" \"$@\"\n  ### MANIFEST ###\n  <json>\n-}\nextractAndWriteManifest :: FilePath -> FilePath -> IO ()\nextractAndWriteManifest wrapperPath manifestPath = do\n  contents <- readFile wrapperPath\n  let marker = \"### MANIFEST ###\"\n      afterMarker = drop 1 $ dropWhile (/= marker) (lines contents)\n  case afterMarker of\n    [] -> return () -- no manifest found, skip silently\n    _ -> writeFile manifestPath (unlines afterMarker)\n"
  },
  {
    "path": "library/Morloc/Quasi.hs",
    "content": "{-# LANGUAGE QuasiQuotes #-}\n{-# LANGUAGE TemplateHaskell #-}\n\n{- |\nModule      : Morloc.Quasi\nDescription : String-interpolating quasiquoter for Doc values\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nProvides the @[idoc|...|]@ quasiquoter for building 'Doc' values with\nembedded Haskell expressions via @#\\{expr\\}@ syntax. Used in translators\nto generate code with interpolated variable names and types.\n-}\nmodule Morloc.Quasi\n  ( idoc\n  ) where\n\nimport Language.Haskell.TH\nimport Language.Haskell.TH.Quote\nimport qualified Morloc.Data.Doc as G\n\nimport qualified Language.Haskell.Meta.Parse as MP\n\nimport Text.Parsec\n\ntype Parser = Parsec String ()\n\ndata I\n  = S String\n  | V String\n\npIs :: Parser [I]\npIs = many1 (try pV <|> try pS <|> try pE) <* eof\n\npV :: Parser I\npV = V <$> between (string \"#{\") (char '}') (many1 (noneOf \"}\"))\n\npS :: Parser I\npS = S <$> many1 (noneOf \"#\")\n\n-- | match a literal '#' sign\npE :: Parser I\npE = fmap (S . return) $ char '#' <* notFollowedBy (char '}')\n\n-- | __i__nterpolated __doc__ument\nidoc :: QuasiQuoter\nidoc =\n  QuasiQuoter\n    { quoteExp = compile\n    , quotePat = error \"Can't handle patterns\"\n    , quoteType = error \"Can't handle types\"\n    , quoteDec = error \"Can't handle declarations\"\n    }\n  where\n    compile :: String -> Q Exp\n    compile txt =\n      case parse pIs \"\" txt of\n        Left err -> error $ show err\n        Right xs -> return $ AppE (VarE 'G.hcat) (ListE (map qI xs))\n          where\n            qI :: I -> Exp\n            qI (S x) = LitE (StringL x)\n            qI (V x) =\n              case MP.parseExp x of\n                (Right hask) -> hask -- a Haskell expression\n                (Left err) -> error err\n"
  },
  {
    "path": "library/Morloc/System.hs",
    "content": "{- |\nModule      : Morloc.System\nDescription : Filesystem re-exports and YAML config loading\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nRe-exports \"System.Directory\", \"System.Directory.Tree\", and\n\"System.FilePath.Posix\" so that other modules can import a single module\nfor all filesystem operations. Also provides 'loadYamlConfig' for loading\nYAML configuration with defaults.\n-}\nmodule Morloc.System\n  ( module System.Directory.Tree\n  , module System.Directory\n  , module System.FilePath.Posix\n  , loadYamlConfig\n  ) where\n\nimport Morloc.Namespace.Prim\n\nimport Data.Aeson (FromJSON (..))\nimport qualified Data.Yaml.Config as YC\nimport System.Directory\nimport System.Directory.Tree\nimport System.FilePath.Posix\n\nloadYamlConfig ::\n  (FromJSON a) =>\n  -- | possible locations of the config file\n  Maybe [String] ->\n  -- | default values taken from the environment (or a hashmap)\n  YC.EnvUsage ->\n  -- | default configuration\n  IO a ->\n  IO a\nloadYamlConfig (Just fs) e _ = YC.loadYamlSettings fs [] e\nloadYamlConfig Nothing _ d = d\n"
  },
  {
    "path": "library/Morloc/TypeEval.hs",
    "content": "{-# LANGUAGE CPP #-}\n{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.TypeEval\nDescription : Expand type aliases and reduce type applications\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nEvaluates type expressions by expanding type aliases from the scope\n(general and concrete), applying type arguments, and reducing applications.\nUsed by the typechecker and code generator to resolve user-defined types\nto their canonical forms.\n-}\nmodule Morloc.TypeEval\n  ( evaluateType\n  , transformType\n  , evaluateStep\n  , pairEval\n  , reduceType\n  ) where\n\nimport qualified Data.Set as Set\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Map as Map\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State (MorlocError (..))\nimport Morloc.Namespace.Type\n\n-- Evaluate a type expression with both the concrete and general scopes\n--\n-- This function does not know the concrete language, the parent sets that.\n--\n-- First try to resolve an expression with the concrete scope\n-- If this fails, resolve one step with the general scope.\npairEval ::\n  Scope -> -- concrete scope\n  Scope -> -- general scope\n  TypeU ->\n  Either MorlocError TypeU\npairEval cscope gscope =\n  -- transform the concrete type until an unresolvable node is reached\n  generalTransformType Set.empty id resolveGen cscope\n  where\n    -- resolve by attempting to evaluate one step as in the general scope\n    resolveGen f bnd t =\n      case generalTransformType bnd (\\_ _ -> return) resolveFail gscope t of\n        (Right t') ->\n          if t' /= t\n            -- if general resolution succeeds, continue evaluation with the concrete scope\n            then f bnd t'\n            -- if it fails, return to the concrete scope to handle failure without\n            -- general resolution option\n            else generalTransformType Set.empty id resolveFail cscope t\n        -- if no resolution is possible, propagate the error\n        e -> e\n\nevaluateStep :: Scope -> TypeU -> Maybe TypeU\nevaluateStep scope t0 =\n  case generalTransformType Set.empty (\\_ _ -> return) resolveFail scope t0 of\n    (Left _) -> Nothing\n    (Right t) -> Just t\n\n-- | evaluate a type exactly one step, return nothing if no evaluation is possible\nreduceType :: Scope -> TypeU -> Maybe TypeU\nreduceType scope t0 =\n  case evaluateStep scope t0 of\n    (Just t1) -> if t1 == t0 then Nothing else Just t1\n    Nothing -> Nothing\n\n-- evaluate a type until terminal functions called, fail if termini are not reached\ntransformType :: Scope -> TypeU -> Either MorlocError TypeU\ntransformType = generalTransformType Set.empty id resolveFail\n\n-- evaluate a type as far as possible given the type functions in scope\nevaluateType :: Scope -> TypeU -> Either MorlocError TypeU\nevaluateType = generalTransformType Set.empty id resolveIgnore\n\nresolveIgnore ::\n  (Set.Set TVar -> TypeU -> Either MorlocError TypeU) ->\n  Set.Set TVar ->\n  TypeU ->\n  Either MorlocError TypeU\nresolveIgnore f bnd (AppU (VarU v) ts) = AppU (VarU v) <$> mapM (f bnd) ts\nresolveIgnore _ _ t@(VarU _) = return t\nresolveIgnore _ _ _ = MM.throwSystemError \"Compiler bug (__FILE__:__LINE__): Reached unexpected branch\"\n\nresolveFail ::\n  (Set.Set TVar -> TypeU -> Either MorlocError TypeU) ->\n  Set.Set TVar ->\n  TypeU ->\n  Either MorlocError TypeU\nresolveFail _ _ (AppU (VarU v) _) =\n  MM.throwSystemError $\n    \"Could not resolve type applied variable\" <+> squotes (pretty v)\n      <> \". You may be missing a language-specific type definition.\"\nresolveFail _ _ (VarU v) =\n  MM.throwSystemError $\n    \"Could not resolve type for variable\" <+> squotes (pretty v)\n      <> \". You may be missing a language-specific type definition.\"\nresolveFail _ _ _ = MM.throwSystemError \"Compiler bug (__FILE__:__LINE__): Reached unexpected branch\"\n\ngeneralTransformType ::\n  Set.Set TVar ->\n  ( (Set.Set TVar -> TypeU -> Either MorlocError TypeU) ->\n    Set.Set TVar ->\n    TypeU ->\n    Either MorlocError TypeU\n  ) ->\n  ( (Set.Set TVar -> TypeU -> Either MorlocError TypeU) ->\n    Set.Set TVar ->\n    TypeU ->\n    Either MorlocError TypeU\n  ) ->\n  Scope -> -- may be general or concrete scope\n  TypeU ->\n  Either MorlocError TypeU\ngeneralTransformType bnd0 recurse' resolve' scope = f bnd0\n  where\n    recurse = recurse' f\n    resolve = resolve' recurse\n\n    f :: Set.Set TVar -> TypeU -> Either MorlocError TypeU\n    f bnd (ExistU v (ps, pc) (rs, rc)) = do\n      ps' <- mapM (recurse bnd) ps\n      rs' <- mapM (\\(k, v') -> (,) k <$> recurse bnd v') rs\n      return $ ExistU v (ps', pc) (rs', rc)\n    f bnd (FunU ts t) = FunU <$> mapM (recurse bnd) ts <*> recurse bnd t\n    f bnd (NamU o n ps rs) = do\n      (n', o') <- case Map.lookup n scope of\n        -- If the record type itself is aliased, substitute the name and record form\n        (Just [(_, NamU o'' n'' _ _, _, _)]) -> return (n'', o'')\n        -- Otherwise, keep the record name and form and recurse only into children\n        _ -> return (n, o)\n      ts' <- mapM (recurse bnd . snd) rs\n      ps' <- mapM (recurse bnd) ps\n      return $ NamU o' n' ps' (zip (map fst rs) ts')\n    f bnd t0@(AppU (VarU v) ts)\n      -- Handle generic case:\n      --   type Cpp => A a b = \"map<$1,$2>\" a b\n      --   foo Cpp :: A D [B] -> X\n      --   -----------------------------------\n      --   foo :: \"map<$1,$2>\" D [B] -> X\n      --\n      --   type Foo a = (a, A)\n      --   f :: Foo Int -> B\n      --   -----------------\n      --   f :: (Int, A) -> B\n      | Set.member v bnd = AppU (VarU v) <$> mapM (recurse bnd) ts\n      -- Handle specialization, e.g.\n      --   type Py => List Int64 = \"np.ndarray\" \"int64\"\n      | otherwise =\n          case Map.lookup v scope of\n            (Just ts') -> do\n              mergedAliases <- foldlM (mergeAliases ts) Nothing (map Just ts') |>> fmap (renameTypedefs bnd)\n              case mergedAliases of\n                (Just (vs, newType, _, isTerminal)) -> case isTerminal of\n                  True -> terminate bnd $ foldr parsub newType (zip vs ts)\n                  -- substitute the head term and re-evaluate\n                  False -> recurse bnd $ foldr parsub newType (zip vs ts)\n                Nothing ->\n                  MM.throwSystemError $\n                    \"No matching alias found for\" <+> pretty t0\n                      <> \"\\n  Available aliases have\"\n                      <+> pretty (length ts') <+> \"entries, none match the given arguments\"\n            _ -> resolve bnd t0\n    -- t may be existential\n    f bnd (AppU t ts) = AppU <$> recurse bnd t <*> mapM (recurse bnd) ts\n    -- type Foo = A\n    -- f :: Foo -> B\n    -- -----------------\n    -- f :: A -> B\n    f bnd t0@(VarU v)\n      | Set.member v bnd = return t0\n      | otherwise = case Map.lookup v scope of\n          (Just []) -> return t0\n          (Just ts1) -> do\n            -- new parameters may be added on the right that are not on the left\n            mergedAliases <- foldlM (mergeAliases []) Nothing (map Just ts1)\n            case mergedAliases of\n              (Just (_, t2, _, isTerminal)) ->\n                if isTerminal\n                  then terminate bnd t2\n                  else recurse bnd t2\n              Nothing ->\n                MM.throwSystemError $\n                  \"No matching alias found for\" <+> pretty t0\n                    <> \"\\n  Available aliases have\"\n                    <+> pretty (length ts1) <+> \"entries, none match\"\n          Nothing -> resolve bnd t0\n    f bnd (ForallU v t) = ForallU v <$> recurse (Set.insert v bnd) t\n    f bnd (EffectU effs t) = EffectU effs <$> recurse bnd t\n    f bnd (OptionalU t) = OptionalU <$> recurse bnd t\n    f _ t@(NatVarU _) = return t  -- nat vars are not type aliases\n    f _ t@(NatLitU _) = return t\n    f bnd (NatAddU a b) = NatAddU <$> recurse bnd a <*> recurse bnd b\n    f bnd (NatMulU a b) = NatMulU <$> recurse bnd a <*> recurse bnd b\n    f bnd (NatSubU a b) = NatSubU <$> recurse bnd a <*> recurse bnd b\n    f bnd (NatDivU a b) = NatDivU <$> recurse bnd a <*> recurse bnd b\n    f bnd (LabeledU n t) = LabeledU n <$> recurse bnd t\n\n    terminate :: Set.Set TVar -> TypeU -> Either MorlocError TypeU\n    terminate bnd (ExistU v (ts, tc) (rs, rc)) = do\n      ts' <- mapM (recurse bnd) ts\n      rs' <- mapM (secondM (recurse bnd)) rs\n      return $ ExistU v (ts', tc) (rs', rc)\n    terminate bnd (FunU ts t) = FunU <$> mapM (recurse bnd) ts <*> recurse bnd t\n    terminate bnd (ForallU v t) = ForallU v <$> recurse (Set.insert v bnd) t\n    terminate bnd (AppU t ts) = AppU t <$> mapM (recurse bnd) ts\n    terminate bnd (NamU o v ts rs) = NamU o v <$> mapM (recurse bnd) ts <*> mapM (secondM (recurse bnd)) rs\n    terminate _ (VarU v) = return (VarU v)\n    terminate _ t@(NatVarU _) = return t\n    terminate bnd (EffectU effs t) = EffectU effs <$> recurse bnd t\n    terminate bnd (OptionalU t) = OptionalU <$> recurse bnd t\n    terminate _ t@(NatLitU _) = return t\n    terminate bnd (NatAddU a b) = NatAddU <$> recurse bnd a <*> recurse bnd b\n    terminate bnd (NatMulU a b) = NatMulU <$> recurse bnd a <*> recurse bnd b\n    terminate bnd (NatSubU a b) = NatSubU <$> recurse bnd a <*> recurse bnd b\n    terminate bnd (NatDivU a b) = NatDivU <$> recurse bnd a <*> recurse bnd b\n    terminate bnd (LabeledU n t) = LabeledU n <$> recurse bnd t\n\n    renameTypedefs ::\n      Set.Set TVar -> ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) -> ([TVar], TypeU, ArgDoc, Bool)\n    renameTypedefs _ ([], t, d, isTerminal) = ([], t, d, isTerminal)\n    renameTypedefs bnd (Left (v@(TV x), _) : vs, t, d, isTerminal)\n      | Set.member v bnd =\n          let (vs', t', d', isTerminal') = renameTypedefs bnd (vs, t, d, isTerminal)\n              v' =\n                head\n                  [ x' | x' <- [TV (MT.show' i <> x) | i <- [(0 :: Int) ..]], not (Set.member x' bnd), x' `notElem` vs'\n                  ]\n              t'' = substituteTVar v (VarU v') t'\n           in (v' : vs', t'', d', isTerminal')\n      | otherwise =\n          let (vs', t', d', isTerminal') = renameTypedefs bnd (vs, t, d, isTerminal)\n           in (v : vs', t', d', isTerminal')\n    renameTypedefs bnd (Right _ : vs, t, d, isTerminal) =\n      renameTypedefs bnd (vs, t, d, isTerminal)\n\n    -- When a type alias is imported from two places, this function reconciles them, if possible\n    mergeAliases ::\n      [TypeU] ->\n      Maybe ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) ->\n      Maybe ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) ->\n      Either MorlocError (Maybe ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool))\n    mergeAliases _ Nothing Nothing = Right Nothing\n    mergeAliases tsMain Nothing (Just b)\n      | checkAlias tsMain b = Right (Just b)\n      | otherwise = Right Nothing\n    mergeAliases tsMain (Just a) Nothing\n      | checkAlias tsMain a = Right (Just a)\n      | otherwise = Right Nothing\n    -- TODO: should the docstring args be considered here?\n    mergeAliases tsMain (Just a@(ts1, t1, _, isTerminal1)) (Just b@(ts2, t2, _, isTerminal2))\n      -- if both are invalid, return nothing\n      | not aIsValid && not bIsValid = Right Nothing\n      -- if one is valid and the other isn't, return the valid one\n      | aIsValid && not bIsValid = Right (Just a)\n      | not aIsValid && bIsValid = Right (Just b)\n      -- if they are both valid AND they are identical AND there is no specialization, return the first\n      | -- the return types are the same\n        isSubtypeOf t1 t2\n          && isSubtypeOf t2 t1\n          -- there is no specialization\n          && nonspecialized\n          -- the return type is concrete, not an alias for something else\n          && isTerminal1 == isTerminal2 =\n          return (Just a)\n      -- handle specialization\n      | not nonspecialized = return $ selectSpecialization a b\n      | otherwise =\n          MM.throwSystemError $\n            \"Cannot merge conflicting type aliases:\"\n              <> \"\\n  t1:\" <+> pretty t1\n              <> \"\\n  t2:\" <+> pretty t2\n      where\n        aIsValid = checkAlias tsMain a\n        bIsValid = checkAlias tsMain b\n        -- True if all parameters in both aliases are generic\n        nonspecialized =\n          all\n            (\\(x, y) -> either (\\_ -> either (const True) (const False) y) (const False) x)\n            (zip ts1 ts2)\n\n    selectSpecialization ::\n      ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) ->\n      ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) ->\n      Maybe ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)\n    selectSpecialization a@(aps0, _, _, _) b@(bps0, _, _, _) = g aps0 bps0\n      where\n        g [] _ = Just a\n        g _ [] = Just b\n        g ((Right _) : _) ((Left _) : _) = Just a\n        g ((Left _) : _) ((Right _) : _) = Just b\n        g ((Left _) : aps) ((Left _) : bps) = g aps bps\n        g ((Right ta) : aps) ((Right tb) : bps)\n          | isSubtypeOf ta tb && isSubtypeOf tb ta = g aps bps\n          | isSubtypeOf ta tb && not (isSubtypeOf tb ta) = Just b\n          | not (isSubtypeOf ta tb) && isSubtypeOf tb ta = Just a\n          | otherwise = Nothing\n\n    checkAlias ::\n      [TypeU] ->\n      ([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool) ->\n      Bool\n    checkAlias ts1 (ts2, _, _, _) =\n      length ts1 == length ts2\n        && all (\\(x, y) -> either (const True) (\\ytype -> isSubtypeOf ytype x) y) (zip ts1 ts2)\n\n-- Replace a type variable with an expression. For example:\n-- parsub (\"a\", \"Int\") -> \"Map a b\" -> \"Map Int b\"\nparsub :: (TVar, TypeU) -> TypeU -> TypeU\nparsub (v, t2) t1@(VarU v0)\n  | v0 == v = t2 -- substitute\n  | otherwise = t1 -- keep the original\nparsub _ t@(NatVarU _) = t\nparsub pair (ExistU t (ts, tc) (rs, rc)) = ExistU t (map (parsub pair) ts, tc) (zip (map fst rs) (map (parsub pair . snd) rs), rc)\nparsub pair (ForallU v t1) = ForallU v (parsub pair t1)\nparsub pair (FunU ts t) = FunU (map (parsub pair) ts) (parsub pair t)\nparsub pair (AppU t ts) = AppU (parsub pair t) (map (parsub pair) ts)\nparsub pair (NamU o n ps rs) = NamU o n (map (parsub pair) ps) [(k', parsub pair t) | (k', t) <- rs]\nparsub pair (EffectU effs t) = EffectU effs (parsub pair t)\nparsub pair (OptionalU t) = OptionalU (parsub pair t)\nparsub _ t@(NatLitU _) = t\nparsub pair (NatAddU a b) = NatAddU (parsub pair a) (parsub pair b)\nparsub pair (NatMulU a b) = NatMulU (parsub pair a) (parsub pair b)\nparsub pair (NatSubU a b) = NatSubU (parsub pair a) (parsub pair b)\nparsub pair (NatDivU a b) = NatDivU (parsub pair a) (parsub pair b)\nparsub pair (LabeledU n t) = LabeledU n (parsub pair t)\n"
  },
  {
    "path": "library/Morloc/Typecheck/Internal.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : Morloc.Typecheck.Internal\nDescription : Shared typechecking machinery (unification, substitution, context)\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nExports typechecking primitives shared between the frontend general\ntypechecker ('Morloc.Frontend.Typecheck') and any backend-specific\ntypecheckers: unification, context operations, substitution, fresh\nvariable generation, and type quantification\\/unquantification.\n-}\nmodule Morloc.Typecheck.Internal\n  ( (+>)\n  , (++>)\n\n    -- * accessing state\n  , newvar\n  , tvarname\n  , newvarRich\n  , evarname\n  , qualify\n  , unqualify\n\n    -- * Typeclasses\n  , Applicable (..)\n  , GammaIndexLike (..)\n\n    -- * manipulating context\n  , access1\n  , access2\n  , solveExist\n  , solveExistWith\n  , lookupU\n  , lookupE\n  , cacheSolved\n  , cut\n  , substitute\n  , rename\n  , cleanTypeName\n  , prettyTypeU\n  , occursCheck\n  , toExistential\n  , gammaContextList\n  , gammaTrimAfter\n\n    -- * selectors\n  , selectorType\n  , selectorGetter\n  , selectorSetter\n\n    -- * subtyping\n  , subtype\n  , isSubtypeOf2\n  , recheckDeferred\n\n    -- * nat label helpers\n  , collectNatVarNames\n\n    -- * debugging\n  , seeGamma\n  -- debugging\n  , enter\n  , insetSay\n  , leave\n  , peak\n  , peakGen\n  , seeType\n  ) where\n\nimport qualified Data.IntMap.Strict as IntMap\nimport qualified Data.Map.Strict as Map\nimport qualified Data.Set as Set\nimport Data.Text (Text)\nimport qualified Morloc.BaseTypes as BT\nimport Morloc.Data.Doc\nimport qualified Morloc.Data.Text as MT\nimport qualified Morloc.Monad as MM\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport Morloc.Namespace.Type\nimport qualified Morloc.Typecheck.NatSolver as NS\nimport qualified Morloc.TypeEval as TE\n\nqualify :: [TVar] -> TypeU -> TypeU\nqualify vs t = foldr (\\v -> ForallU v) t vs\n\nunqualify :: TypeU -> ([TVar], TypeU)\nunqualify (ForallU v (unqualify -> (vs, t))) = (v : vs, t)\nunqualify t = ([], t)\n\ntoExistential :: Gamma -> TypeU -> (Gamma, TypeU)\ntoExistential g0 (unqualify -> (vs0, t0)) = f g0 vs0 t0\n  where\n    f g [] t = (g, t)\n    f g (v : vs) t =\n      let (g', newVar) = newvar (\"cls_\" <> unTVar v) g\n       in f g' vs (substituteTVar v newVar t)\n\nclass Applicable a where\n  apply :: Gamma -> a -> a\n\n-- | Apply a context to a type (See Dunfield Figure 8).\ninstance Applicable TypeU where\n  -- [G]a = a\n  apply g (VarU v) =\n    -- FIXME: very wrong - only works because of my renaming scheme\n    case lookupU v g of\n      (Just t') -> t'\n      Nothing -> VarU v\n  apply g (NatVarU v) = case Map.lookup v (gammaNatSubs g) of\n    Just t -> t\n    Nothing -> NatVarU v\n  -- [G](A->B) = ([G]A -> [G]B)\n  apply g (FunU ts t) = FunU (map (apply g) ts) (apply g t)\n  apply g (AppU t ts) = AppU (apply g t) (map (apply g) ts)\n  -- [G]ForallU a.a = forall a. [G]a\n  apply g (ForallU v a) =\n    -- FIXME: VERY WRONG\n    case lookupU v g of\n      (Just _) -> apply g a\n      Nothing -> ForallU v (apply g a)\n  -- [G[a=t]]a = [G[a=t]]t\n  apply g (ExistU v (ts, tc) (rs, rc)) =\n    case lookupU v g of\n      -- FIXME: this seems problematic - do I keep the previous parameters or the new ones?\n      (Just t') -> apply g t' -- reduce an existential; strictly smaller term\n      Nothing -> ExistU v (map (apply g) ts, tc) (map (second (apply g)) rs, rc)\n  apply g (NamU o n ps rs) = NamU o n ps [(k, apply g t) | (k, t) <- rs]\n  apply g (EffectU effs t) = EffectU effs (apply g t)\n  apply g (OptionalU t) = OptionalU (apply g t)\n  apply _ t@(NatLitU _) = t\n  apply g (NatAddU a b) = NatAddU (apply g a) (apply g b)\n  apply g (NatMulU a b) = NatMulU (apply g a) (apply g b)\n  apply g (NatSubU a b) = NatSubU (apply g a) (apply g b)\n  apply g (NatDivU a b) = NatDivU (apply g a) (apply g b)\n  apply g (LabeledU n t) = LabeledU n (apply g t)\n\ninstance Applicable EType where\n  apply g e =\n    e\n      { etype = apply g (etype e)\n      , econs = Set.map (applyConstraint g) (econs e)\n      }\n    where\n      applyConstraint g' (Constraint cls ts) = Constraint cls (map (apply g') ts)\n\ninstance Applicable Gamma where\n  apply g1 g2 =\n    g2\n      { gammaContext = IntMap.map f (gammaContext g2)\n      , gammaSolved = Map.map (apply g1) (gammaSolved g2)\n      , gammaNatSubs = Map.map (apply g1) (gammaNatSubs g2)\n      }\n    where\n      f :: GammaIndex -> GammaIndex\n      f (AnnG v t) = AnnG v (apply g1 t)\n      f (ExistG v (ps, pc) (rs, rc)) = ExistG v (map (apply g1) ps, pc) (map (second (apply g1)) rs, rc)\n      f (SolvedG v t) = SolvedG v (apply g1 t)\n      f x = x\n\nclass GammaIndexLike a where\n  index :: a -> GammaIndex\n\ninstance GammaIndexLike GammaIndex where\n  index = id\n\ninstance GammaIndexLike TypeU where\n  index (ExistU t (ts, tc) (rs, rc)) = ExistG t (ts, tc) (rs, rc)\n  index t = error $ \"Can only index ExistT, found: \" <> show t\n\ninstance GammaIndexLike TVar where\n  index v = ExistG v ([], Open) ([], Open)\n\n-- | Slot spacing between consecutive entries added by (+>).\n-- Leaves room for solveExistWith to insert entries in between.\nslotSpacing :: Int\nslotSpacing = 256\n\n-- | Prepend an entry to the context (newest position).\n(+>) :: (GammaIndexLike a) => Gamma -> a -> Gamma\n(+>) g x =\n  let gi = index x\n      s = gammaSlot g\n  in g { gammaSlot = s + slotSpacing\n       , gammaContext = IntMap.insert s gi (gammaContext g)\n       , gammaExist = case gi of\n           ExistG v _ _ -> Map.insert v s (gammaExist g)\n           _ -> gammaExist g\n       }\n\n-- | Add multiple entries: last element of list becomes newest (highest slot).\n(++>) :: (GammaIndexLike a) => Gamma -> [a] -> Gamma\n(++>) g xs = foldl' (+>) g xs\n\nisSubtypeOf2 :: Scope -> TypeU -> TypeU -> Bool\nisSubtypeOf2 scope a b = case subtype scope a b (Gamma 0 0 IntMap.empty Map.empty Map.empty [] Map.empty Map.empty) of\n  (Left _) -> False\n  (Right _) -> True\n\nsubtypeEvaluated :: Scope -> TypeU -> TypeU -> Gamma -> Either MDoc Gamma\nsubtypeEvaluated scope t1 t2 g\n  -- Reject sibling aliases before reduction. Without this, Array Int <: Deque Int\n  -- would succeed transitively (Array Int -> List Int -> Deque Int) even though\n  -- they are on different branches of the alias tree.\n  | areSiblingAliases scope t1 t2 =\n      Left $ \"Cannot compare sibling types\" <+> pretty t1 <+> \"and\" <+> pretty t2\n  | otherwise = case (TE.reduceType scope t1, TE.reduceType scope t2) of\n    (Just t1', _) -> subtype scope t1' t2 g\n    (_, Just t2') -> subtype scope t1 t2' g\n    (_, _)\n      -- When both are bare type constructors that can't be reduced (e.g.,\n      -- List vs Deque where Deque a = List a), check if one is an ancestor\n      -- of the other by evaluating and comparing heads.\n      | aliasEquivConstructors scope t1 t2 -> Right g\n      | otherwise -> Left $ \"Cannot compare types\" <+> pretty t1 <+> \"and\" <+> pretty t2\n\n-- | Check whether two applied types are sibling aliases -- both reduce to\n-- the same ancestor but neither reduces to the other. For example,\n-- Array Int and Deque Int are siblings (both reduce to List Int, but\n-- Array does not reduce to Deque nor vice versa).\nareSiblingAliases :: Scope -> TypeU -> TypeU -> Bool\nareSiblingAliases scope (AppU (VarU v1) _) (AppU (VarU v2) _)\n  | v1 == v2 = False\n  | otherwise =\n    let h1 = evalHead v1\n        h2 = evalHead v2\n    in case (h1, h2) of\n         -- Both reduce to the same ancestor, but neither is the other's ancestor\n         (Just hv1, Just hv2) -> hv1 == hv2 && hv1 /= v1 && hv2 /= v2\n         _ -> False\n  where\n    evalHead v = case Map.lookup v scope of\n      Just ((ps, _, _, _) : _)\n        | all isGenericParam ps && not (null ps) ->\n          let n = length ps\n              freshVars = [VarU (TV (MT.show' i <> \"__sib_cmp\")) | i <- [0 .. n - 1]]\n              app = AppU (VarU v) freshVars\n          in case TE.evaluateType scope app of\n               Right (AppU (VarU headV) _) -> Just headV\n               _ -> Nothing\n        | otherwise -> Nothing\n      _ -> Nothing\n    isGenericParam (Left _) = True\n    isGenericParam _ = False\nareSiblingAliases _ _ _ = False\n\n-- | Check whether two unapplied type constructors are on the same path in\n-- the alias hierarchy -- i.e., one reduces to the other. Applied aliases\n-- (like Deque Int) are handled by reduceType above; this covers the bare\n-- constructor case (Deque vs List) which arises when an existential is solved\n-- to one name and then compared against an ancestor or descendant alias.\n--\n-- Only ancestor-descendant pairs match: List<->Deque and List<->Array succeed,\n-- but Array<->Deque fails (siblings with a common ancestor but neither\n-- reduces to the other).\naliasEquivConstructors :: Scope -> TypeU -> TypeU -> Bool\naliasEquivConstructors scope (VarU v1) (VarU v2) =\n  reducesToHead v1 v2 || reducesToHead v2 v1\n  where\n    reducesToHead src target =\n      case arityOf (Map.lookup src scope) of\n        Just n | n > 0 ->\n          let freshVars = [VarU (TV (MT.show' i <> \"__alias_cmp\")) | i <- [0 .. n - 1]]\n              app = AppU (VarU src) freshVars\n          in case TE.evaluateType scope app of\n               Right (AppU (VarU headV) _) -> headV == target\n               _ -> False\n        -- base type with no alias: matches only itself\n        _ -> False\n\n    arityOf :: Maybe [([Either (TVar, Kind) TypeU], TypeU, ArgDoc, Bool)] -> Maybe Int\n    arityOf Nothing = Nothing\n    arityOf (Just []) = Nothing\n    arityOf (Just ((ps, _, _, _) : _))\n      | all isGenericParam ps = Just (length ps)\n      | otherwise = Nothing\n\n    isGenericParam (Left _) = True\n    isGenericParam _ = False\naliasEquivConstructors _ _ _ = False\n\nsubtypeError :: TypeU -> TypeU -> MDoc -> Either MDoc a\nsubtypeError t1 t2 msg =\n  Left $\n    \"Subtype error:\" <+> msg\n      <> \"\\n  \"\n      <> prettyTypeU t1 <+> \"<:\" <+> prettyTypeU t2\n\n-- Nat expression helpers for SOP-based comparison\nisNatExpr :: TypeU -> Bool\nisNatExpr (NatVarU _) = True\nisNatExpr (NatLitU _) = True\nisNatExpr (NatAddU _ _) = True\nisNatExpr (NatMulU _ _) = True\nisNatExpr (NatSubU _ _) = True\nisNatExpr (NatDivU _ _) = True\nisNatExpr _ = False\n\ntypeUToNatExpr :: TypeU -> Maybe NS.NatExpr\ntypeUToNatExpr (NatVarU v) = Just (NS.NatVar v)\ntypeUToNatExpr (NatLitU n) = Just (NS.NatLit n)\ntypeUToNatExpr (NatAddU a b) = NS.NatAdd <$> typeUToNatExpr a <*> typeUToNatExpr b\ntypeUToNatExpr (NatMulU a b) = NS.NatMul <$> typeUToNatExpr a <*> typeUToNatExpr b\ntypeUToNatExpr (NatSubU a b) = NS.NatSub <$> typeUToNatExpr a <*> typeUToNatExpr b\ntypeUToNatExpr (NatDivU a b) = NS.NatDiv <$> typeUToNatExpr a <*> typeUToNatExpr b\ntypeUToNatExpr (VarU v) = Just (NS.NatVar v)\ntypeUToNatExpr (ExistU v _ _) = Just (NS.NatVar v)\ntypeUToNatExpr _ = Nothing\n\nnatExprToTypeU :: NS.NatExpr -> TypeU\nnatExprToTypeU (NS.NatLit n) = NatLitU n\nnatExprToTypeU (NS.NatVar v) = NatVarU v\nnatExprToTypeU (NS.NatAdd a b) = NatAddU (natExprToTypeU a) (natExprToTypeU b)\nnatExprToTypeU (NS.NatMul a b) = NatMulU (natExprToTypeU a) (natExprToTypeU b)\nnatExprToTypeU (NS.NatSub a b) = NatSubU (natExprToTypeU a) (natExprToTypeU b)\nnatExprToTypeU (NS.NatDiv a b) = NatDivU (natExprToTypeU a) (natExprToTypeU b)\n\napplyNatSolutions :: Map.Map TVar NS.NatExpr -> Gamma -> Either MDoc Gamma\napplyNatSolutions subs g0 = foldM applySub g0 (Map.toList subs)\n  where\n    applySub g (v, ne) =\n      let t = natExprToTypeU ne\n      -- Try solving as existential first (for existential nat vars),\n      -- then store in gammaNatSubs (for NatVarU variables)\n      in case solveExist v t g of\n           Right (Just g') -> Right g'\n           Right Nothing ->\n             -- Not an existential — store as a NatVarU solution\n             Right g { gammaNatSubs = Map.insert v t (gammaNatSubs g) }\n           Left err -> Left err\n\n-- | Re-check deferred Nat constraints after all existentials are solved.\n-- Applies the final gamma to each deferred pair, converts to NatExpr,\n-- and re-solves. Returns Left on contradiction, Right with remaining\n-- still-deferred constraints (now truly unsolvable).\nrecheckDeferred :: Gamma -> Either MDoc [(TypeU, TypeU)]\nrecheckDeferred g = foldM check [] (gammaDeferred g)\n  where\n    check acc (t1, t2) =\n      let t1' = apply g t1\n          t2' = apply g t2\n      in case (typeUToNatExpr t1', typeUToNatExpr t2') of\n           (Just ne1, Just ne2) ->\n             case NS.solveNat ne1 ne2 of\n               Right _ -> Right acc\n               Left NS.Contradiction ->\n                 Left $ \"Nat constraint mismatch (deferred):\"\n                   <+> prettyTypeU t1' <+> \"~\" <+> prettyTypeU t2'\n               Left (NS.Deferred _) -> Right ((t1', t2') : acc)\n           _ -> Right acc  -- not nat exprs after apply, skip\n\n-- | type 1 is more polymorphic than type 2 (Dunfield Figure 9)\nsubtype :: Scope -> TypeU -> TypeU -> Gamma -> Either MDoc Gamma\n-- NatVarU: identical nat variables are equal; different ones fall to isNatExpr path\nsubtype _ (NatVarU v1) (NatVarU v2) g\n  | v1 == v2 = return g\n-- VarU vs VarT\nsubtype scope t1@(VarU a1) t2@(VarU a2) g\n  -- If everything is the same, do nothing\n  --\n  -- ----------------------------------------- <:Var\n  --  G[a] |- a_l <: a_l -| G[a]\n  | a1 == a2 = return g\n  | otherwise = subtypeEvaluated scope t1 t2 g\nsubtype scope a@ExistU {} b@ExistU {} g\n  --\n  -- ----------------------------------------- <:Exvar\n  --  G[E.a] |- E.a <: E.a -| G[E.a]\n  | a == b = return g\n  -- ----------------------------------------- <:InstantiateL/<:InstantiateR\n  --  G[E.a] |- Ea <: Ea -| G[E.a]\n  | otherwise = instantiate scope a b g\n-- formally, an `Ea notin FV(G)` check should be done here, but since the\n-- types involved are all existentials, it will always pass, so I omit\n-- it.\n\n-- EffectU: covariant subtyping with effect row subsumption.\n-- <E1> T1 <: <E2> T2 when E1 is a subset of E2 and T1 <: T2.\n-- Fewer effects can be used where more effects are expected.\nsubtype scope (EffectU e1 t1) (EffectU e2 t2) g\n  | effectSubsetOf e1 e2 = subtype scope t1 t2 g\n  | otherwise = subtype scope t1 t2 g -- permissive for now: EffectVar not yet solved\n-- OptionalU: covariant subtyping\nsubtype scope (OptionalU t1) (OptionalU t2) g = subtype scope t1 t2 g\n--  g1 |- B1 <: A1 -| g2\n--  g2 |- [g2]A2 <: [g2]B2 -| g3\n-- ----------------------------------------- <:-->\n--  g1 |- A1 -> A2 <: B1 -> B2 -| g3\n--\n-- function subtypes are *contravariant* with respect to the input, that is,\n-- the subtypes are reversed so we have b1<:a1 instead of a1<:b1.\n--\n-- Apply context between each argument subtype check so that solved\n-- existentials propagate to later arguments. This is necessary when a\n-- forall-bound variable appears in multiple argument positions (e.g.,\n-- (==) :: c -> c -> Bool passed to fold).\nsubtype scope t1@(FunU as1 ret1) t2@(FunU as2 ret2) g0\n  | length as1 /= length as2 = subtypeError t1 t2 \"function arity mismatch\"\n  | null as1 = subtype scope ret1 ret2 g0\n  | otherwise = do\n      -- Process all arguments (contravariant: b <: a), applying context between each\n      g1 <- foldlM (\\g (b, a) -> subtype scope (apply g b) (apply g a) g) g0 (zip as2 as1)\n      -- Apply context to return types, then subtype\n      subtype scope (apply g1 ret1) (apply g1 ret2) g1\n\n--  g1 |- A1 <: B1\n-- ----------------------------------------- <:App\n--  g1 |- A1 A2 <: B1 B2 -| g2\n--  unparameterized types are the same as VarT, so subtype on that instead\nsubtype scope t1@(AppU v1@(ExistU _ _ _) vs1) t2@(AppU v2 vs2) g\n  | length vs1 == length vs2 = zipSubtype t1 t2 scope (v1 : vs1) (v2 : vs2) g\n  | otherwise = subtypeEvaluated scope t1 t2 g\nsubtype scope t1@(AppU v1 vs1) t2@(AppU v2@(ExistU _ _ _) vs2) g\n  | length vs1 == length vs2 = zipSubtype t1 t2 scope (v1 : vs1) (v2 : vs2) g\n  | otherwise = subtypeEvaluated scope t1 t2 g\nsubtype scope t1@(AppU v1 vs1) t2@(AppU v2 vs2) g\n  | v1 == v2 && length vs1 == length vs2 = zipSubtype t1 t2 scope vs1 vs2 g\n  | otherwise = subtypeEvaluated scope t1 t2 g\n-- subtype unordered records\nsubtype scope (NamU _ v1 _ []) (NamU _ v2 _ []) g\n  -- If one of the records is generic, allow promotion\n  | v1 == BT.record || v2 == BT.record = return g\n  -- Otherwise subtype the variable names\n  | otherwise = subtype scope (VarU v1) (VarU v2) g\nsubtype _ t1@(NamU _ _ _ []) t2@(NamU _ _ _ _) _ =\n  subtypeError t1 t2 \"NamU - Unequal number of fields\"\nsubtype _ t1@(NamU _ _ _ _) t2@(NamU _ _ _ []) _ =\n  subtypeError t1 t2 \"NamU - Unequal number of fields\"\nsubtype scope t1@(NamU o1 v1 p1 ((k1, x1) : rs1)) t2@(NamU o2 v2 p2 es2) g0 =\n  case filterApart (\\(k2, _) -> k2 == k1) es2 of\n    (Nothing, _) -> subtypeError t1 t2 \"NamU - Unequal fields\"\n    (Just (_, x2), rs2) ->\n      subtype scope x1 x2 g0\n        >>= subtype scope (NamU o1 v1 p1 rs1) (NamU o2 v2 p2 rs2)\n--  Ea not in FV(a)\n--  g1[Ea] |- A <=: Ea -| g2\n-- ----------------------------------------- <:InstantiateR\n--  g1[Ea] |- A <: Ea -| g2\nsubtype scope a b@(ExistU _ ([], _) _) g = occursCheck b a \"InstantiateR\" >> instantiate scope a b g\n--  Ea not in FV(a)\n--  g1[Ea] |- Ea <=: A -| g2\n-- ----------------------------------------- <:InstantiateL\n--  g1[Ea] |- Ea <: A -| g2\nsubtype scope a@(ExistU _ ([], _) _) b g = occursCheck a b \"InstantiateL\" >> instantiate scope a b g\nsubtype scope a@(AppU _ _) b@(ExistU _ _ _) g = subtype scope b a g\nsubtype scope t1@(ExistU v1 (ps1, pc1) rs@([], _)) t2@(AppU _ ps2) g1\n  -- if the existential is closed and the parameter length is not equal, die\n  | pc1 == Closed && length ps1 /= length ps2 =\n      subtypeError t1 t2 \"InstantiateL - Expected equal number of type parameters\"\n  -- if the exsistential is open and it has fewer parameters, extend the\n  -- parameter list and retry\n  | pc1 == Open && length ps1 < length ps2 = do\n      let (ps1', _) = extendList ps1 ps2\n      subtype scope (ExistU v1 (ps1', pc1) rs) t2 g1\n  | length ps1 > length ps2 =\n      subtypeError t1 t2 \"InstantiateL - too many parameters in left existential\"\n  -- otherwise, do the thing\n  | otherwise = do\n      g2 <- foldM (\\g (p1, p2) -> subtype scope p1 p2 g) g1 (zip ps1 ps2)\n      solveExist v1 t2 g2 >>= maybe (return g2) return\n\n--  g1,>Ea,Ea |- [Ea/x]A <: B -| g2,>Ea,g3\n-- ----------------------------------------- <:ForallL\n--  g1 |- Forall x . A <: B -| g2\n--\nsubtype scope (ForallU v a) b g0 = subtype scope (substitute v a) b (g0 +> v)\n-- NOTE: I am deviating from the rules here by not cutting. It is not\n-- necessary to do so since I rewrote all qualifiers to be globally unique.\n-- Also, when I cut here I lose my only link to v, and that caused `map fst`\n-- to not compile.\n\n--  g1,a |- A <: B -| g2,a,g3\n-- ----------------------------------------- <:ForallR\n--  g1 |- A <: Forall a. B -| g2\nsubtype scope a (ForallU v b) g = subtype scope a b (g +> VarG v) >>= cut (VarG v)\n-- Nat expressions: compare via SOP normalization (handles commutativity,\n-- associativity, and cross-form equality like 2+3 ~ 5)\nsubtype _ t1 t2 g\n  | isNatExpr t1 && isNatExpr t2 =\n      let t1' = apply g t1\n          t2' = apply g t2\n      in case (typeUToNatExpr t1', typeUToNatExpr t2') of\n           (Just ne1, Just ne2) ->\n             case NS.solveNat ne1 ne2 of\n               Right subs\n                 | Map.null subs -> return g\n                 | otherwise -> applyNatSolutions subs g\n               Left NS.Contradiction -> subtypeError t1 t2 \"Nat constraint mismatch\"\n               Left (NS.Deferred _) -> return g { gammaDeferred = (t1', t2') : gammaDeferred g }\n           _ -> subtypeError t1 t2 \"Cannot compare Nat expressions\"\n-- note that these need to be evaluated AFTER all the existentials\nsubtype scope t1@(VarU _) t2 g = subtypeEvaluated scope t1 t2 g\nsubtype scope t1 t2@(VarU _) g = subtypeEvaluated scope t1 t2 g\n-- fall through\nsubtype _ a b _ = subtypeError a b \"Type mismatch fall through\"\n\nzipSubtype :: TypeU -> TypeU -> Scope -> [TypeU] -> [TypeU] -> Gamma -> Either MDoc Gamma\nzipSubtype _ _ _ [] [] g' = return g'\nzipSubtype a b scope (t1' : ts1') (t2' : ts2') g' = do\n  g'' <- subtype scope t1' t2' g'\n  zipSubtype a b scope (map (apply g'') ts1') (map (apply g'') ts2') g''\nzipSubtype a b _ _ _ _ = subtypeError a b \"Parameter type mismatch\"\n\n-- | Dunfield Figure 10 -- type-level structural recursion\ninstantiate :: Scope -> TypeU -> TypeU -> Gamma -> Either MDoc Gamma\ninstantiate scope ta@(ExistU _ _ (_ : _, _)) tb@(NamU _ _ _ _) g1 = instantiate scope tb ta g1\ninstantiate scope ta@(ExistU _ _ (_ : _, _)) tb@(VarU _) g1 = instantiate scope tb ta g1\ninstantiate scope ta@(VarU _) tb@(ExistU _ _ (_ : _, _)) g1 = do\n  case TE.reduceType scope ta of\n    (Just ta') -> instantiate scope ta' tb g1\n    Nothing -> subtypeError ta tb \"Error in VarU versus NamU with existential keys\"\ninstantiate scope ta@(NamU _ _ _ rs1) tb@(ExistU v _ (rs2@(_ : _), rc)) g1 = do\n  let keyset1 = Set.fromList $ map fst rs1\n      keyset2 = Set.fromList $ map fst rs2\n  _ <- case rc of\n    -- if the existential keys are closed, the the ta and tb keys must be identical\n    Closed ->\n      if keyset1 == keyset2\n        then return ()\n        else subtypeError ta tb \"Error in NamU with conflicting closed keysets\"\n    -- if the existential keys are open, then all existential keys muts be in\n    -- ta, but not vice versa\n    Open ->\n      if Set.isSubsetOf keyset2 keyset1\n        then return ()\n        else subtypeError ta tb \"Error in NamU with conflicting open keysets\"\n\n  g2 <-\n    foldM\n      (\\g' (t1, t2) -> subtype scope t1 t2 g')\n      g1\n      [(t1, t2) | (k1, t1) <- rs1, (k2, t2) <- rs2, k1 == k2]\n  solveExist v ta g2 >>= maybe (subtypeError ta tb \"Error in NamU with existential keys\") return\n-- ExistU vs EffectU: solve ?a = <effs> ?b, then ?b <: inner\ninstantiate scope (ExistU v ([], _) _) (EffectU effs inner) g1 = do\n  let (g2, veb) = tvarname g1 \"eff\"\n      eb = ExistU veb ([], Open) ([], Open)\n  g3 <- solveExistWith v (EffectU effs eb) [index eb] g2 >>= maybe (return g2) return\n  instantiate scope eb (apply g3 inner) g3\ninstantiate scope (EffectU effs inner) (ExistU v ([], _) _) g1 = do\n  let (g2, veb) = tvarname g1 \"eff\"\n      eb = ExistU veb ([], Open) ([], Open)\n  g3 <- solveExistWith v (EffectU effs eb) [index eb] g2 >>= maybe (return g2) return\n  instantiate scope (apply g3 inner) eb g3\n-- ExistU vs OptionalU: solve ?a = ??b, then ?b <: inner\ninstantiate scope (ExistU v ([], _) _) (OptionalU inner) g1 = do\n  let (g2, veb) = tvarname g1 \"opt\"\n      eb = ExistU veb ([], Open) ([], Open)\n  g3 <- solveExistWith v (OptionalU eb) [index eb] g2 >>= maybe (return g2) return\n  instantiate scope eb (apply g3 inner) g3\ninstantiate scope (OptionalU inner) (ExistU v ([], _) _) g1 = do\n  let (g2, veb) = tvarname g1 \"opt\"\n      eb = ExistU veb ([], Open) ([], Open)\n  g3 <- solveExistWith v (OptionalU eb) [index eb] g2 >>= maybe (return g2) return\n  instantiate scope (apply g3 inner) eb g3\ninstantiate scope (ExistU v ([], _) _) (FunU as b) g1 = do\n  let (g2, veas) = statefulMap (\\g _ -> tvarname g \"ta\") g1 as\n      (g3, veb) = tvarname g2 \"to\"\n      eas = [ExistU v' ([], Open) ([], Open) | v' <- veas]\n      eb = ExistU veb ([], Open) ([], Open)\n  g4 <- solveExistWith v (FunU eas eb) (index eb : map index eas) g3 >>= maybe (return g3) return\n  g5 <- foldlM (\\g (e, t) -> instantiate scope e t g) g4 (zip eas as)\n  instantiate scope eb (apply g5 b) g5\n\n--  g1[Ea2,Ea1,Ea=Ea1->Ea2] |- Ea1 <=: A1 -| g2\n--  g2 |- [g2]A2 <=: Ea2 -| g3\n-- ----------------------------------------- InstRApp\n--  g1[Ea] |- A1 -> A2 <=: Ea -| g3\ninstantiate scope (FunU as b) (ExistU v ([], _) _) g1 = do\n  let (g2, veas) = statefulMap (\\g _ -> tvarname g \"ta\") g1 as\n      (g3, veb) = tvarname g2 \"to\"\n      eas = [ExistU v' ([], Open) ([], Open) | v' <- veas]\n      eb = ExistU veb ([], Open) ([], Open)\n  g4 <- solveExistWith v (FunU eas eb) (index eb : map index eas) g3 >>= maybe (return g3) return\n  g5 <- foldlM (\\g (e, t) -> instantiate scope t e g) g4 (zip eas as)\n  instantiate scope eb (apply g5 b) g5\n\n-- This is terrible kludge, I am not close to having considered all the edge\n-- cases. I need to completely rewrite my type system. Argh. I also need to get\n-- rid of all default types. Defaults should be set explicitly in morloc code.\ninstantiate _ ta@(ExistU _ _ (_ : _, _)) (ExistU v ([], _) ([], _)) g1 =\n  solveExist v ta g1 >>= maybe (return g1) return\ninstantiate _ (ExistU v ([], _) ([], _)) tb@(ExistU _ _ (_ : _, _)) g1 =\n  solveExist v tb g1 >>= maybe (return g1) return\n\n--\n-- ----------------------------------------- InstLAllR\n--\ninstantiate scope ta@(ExistU _ _ _) (ForallU v2 t2) g1 =\n  instantiate scope ta t2 (g1 +> VarG v2)\n    >>= cut (VarG v2)\n-- InstLReach or instRReach -- each rule eliminates an existential\n-- Replace the rightmost with leftmost (G[a][b] --> L,a,M,b=a,R)\n-- WARNING: be careful here, since the implementation adds to the front and the\n-- formal syntax adds to the back. Don't change anything in the function unless\n-- you really know what you are doing and have tests to confirm it.\ninstantiate scope ta@(ExistU v1 (ps1, pc1) (rs1, rc1)) tb@(ExistU v2 (ps2, pc2) (rs2, rc2)) g1 = do\n  -- check and expand open parameters\n  (ps1', ps2') <- case (pc1, pc2, compare (length ps1) (length ps2)) of\n    (_, _, EQ) -> Right (ps1, ps2)\n    (Closed, Closed, _) -> subtypeError ta tb \"Unequal parameter length for closed existentials\"\n    (Closed, Open, GT) -> Right $ extendList ps1 ps2\n    (Closed, Open, LT) -> subtypeError ta tb \"Left closed existential parameter list is less than right\"\n    (Open, Closed, LT) -> Right $ extendList ps1 ps2\n    (Open, Closed, GT) -> subtypeError ta tb \"Right closed existential parameter list is less than left\"\n    (Open, Open, _) -> Right $ extendList ps1 ps2\n\n  let keyset1 = Set.fromList (map fst rs1)\n  let keyset2 = Set.fromList (map fst rs2)\n\n  -- check and expand open records\n  (g2, rs1', rs2') <- case (rc1, rc2, Set.isSubsetOf keyset1 keyset2, Set.isSubsetOf keyset2 keyset1) of\n    (Closed, Closed, False, _) -> subtypeError ta tb \"Right closed existential contains keys missing in left closed existential\"\n    (Closed, Closed, _, False) -> subtypeError ta tb \"Right closed existential contains keys missing in left closed existential\"\n    (Closed, Open, a, False) ->\n      subtypeError ta tb $\n        \"Right existential contains keys missing in left closed existential \" <> pretty a\n    (Open, Closed, False, b) ->\n      subtypeError ta tb $\n        \"Left existential contains keys missing in right closed existential \" <> pretty b\n    _ -> extendRec scope g1 rs1 rs2\n\n  g3 <- foldM (\\g (t1, t2) -> subtype scope t1 t2 g) g2 (zip ps1 ps2)\n  g4 <-\n    foldM\n      (\\g' (t1, t2) -> subtype scope t1 t2 g')\n      g3\n      [(t1, t2) | (k1, t1) <- rs1, (k2, t2) <- rs2, k1 == k2]\n\n  -- define new types to insert\n  let taExpanded = ExistU v1 (ps1', pc1) (rs1', rc1)\n  let tbExpanded = ExistU v2 (ps2', pc1) (rs2', rc1)\n\n  -- Check gammaSolved first: if either is already solved, skip access2\n  case (Map.lookup v1 (gammaSolved g4), Map.lookup v2 (gammaSolved g4)) of\n    (Just t1, Just t2) -> subtype scope t1 t2 g4\n    (Just t1, _)       -> subtype scope t1 tb g4\n    (_, Just t2)       -> subtype scope ta t2 g4\n    _ -> case access2 v1 v2 g4 of\n      -- InstLReach: v1 is newer than v2, solve v1 = tbExpanded\n      Just _ -> solveExist v1 tbExpanded g4 >>= maybe (return g4) return\n      Nothing -> case access2 v2 v1 g4 of\n        -- InstRReach: v2 is newer than v1, solve v2 = taExpanded\n        Just _ -> solveExist v2 taExpanded g4 >>= maybe (return g4) return\n        Nothing -> return g4\n\n--  g1[Ea],>Eb,Eb |- [Eb/x]B <=: Ea -| g2,>Eb,g3\n-- ----------------------------------------- InstRAllL\n--  g1[Ea] |- Forall x. B <=: Ea -| g2\ninstantiate scope (ForallU x b) tb@(ExistU _ ([], _) _) g1 =\n  instantiate\n    scope\n    (substitute x b) -- [Eb/x]B\n    tb -- Ea\n    (g1 +> MarkG x +> ExistG x ([], Open) ([], Open)) -- g1[Ea],>Eb,Eb\n    >>= cut (MarkG x)\n--  g1 |- t\n-- ----------------------------------------- InstRSolve\n--  g1,Ea,g2 |- t <=: Ea -| g1,Ea=t,g2\ninstantiate scope ta (ExistU v ([], _) ([], _)) g1 =\n  case lookupU v g1 of\n    Just t  -> subtype scope ta t g1 >>= specializeExist scope v t ta\n    Nothing -> solveExist v ta g1 >>= maybe (return g1) return\n\n--  g1 |- t\n-- ----------------------------------------- instLSolve\n--  g1,Ea,g2 |- Ea <=: t -| g1,Ea=t,g2\ninstantiate scope (ExistU v ([], _) ([], _)) tb g1 =\n  case lookupU v g1 of\n    Just t  -> subtype scope t tb g1 >>= specializeExist scope v t tb\n    Nothing -> solveExist v tb g1 >>= maybe (return g1) return\n\ninstantiate _ ta tb _ = subtypeError ta tb \"Unexpected types\"\n\n-- | After a subtype check succeeds between a solved existential's current\n-- value and a new type, check if the new type is more specialized (a\n-- descendant in the alias hierarchy). If so, update the solution.\n-- E.g., if ?a = List Int and we check against Deque Int, update to Deque Int\n-- since Deque is a specialization of List.\nspecializeExist :: Scope -> TVar -> TypeU -> TypeU -> Gamma -> Either MDoc Gamma\nspecializeExist scope v currentType newType g\n  | isMoreSpecialized scope newType currentType = Right $ cacheSolved v newType g\n  | otherwise = Right g\n\n-- | Check if t1 is a more specialized (descendant) alias of t2.\n-- t1 is more specialized if it has an alias definition that evaluates to\n-- t2's head constructor, while t2 does not evaluate to t1's head.\nisMoreSpecialized :: Scope -> TypeU -> TypeU -> Bool\nisMoreSpecialized scope (AppU (VarU v1) _) (AppU (VarU v2) _) =\n  v1 /= v2 && reducesToHead scope v1 v2\n  where\n    reducesToHead scope' src target =\n      case Map.lookup src scope' of\n        Just ((ps, _, _, _) : _)\n          | all isGenericParam ps && not (null ps) ->\n            let n = length ps\n                freshVars = [VarU (TV (MT.show' i <> \"__spec_cmp\")) | i <- [0 .. n - 1]]\n                app = AppU (VarU src) freshVars\n            in case TE.evaluateType scope' app of\n                 Right (AppU (VarU headV) _) -> headV == target\n                 _ -> False\n        _ -> False\n    isGenericParam (Left _) = True\n    isGenericParam _ = False\nisMoreSpecialized _ _ _ = False\n\nsolve :: TVar -> TypeU -> Either MDoc GammaIndex\nsolve v t\n  | occursIn v t =\n      Left $ \"Infinite recursion, cannot substitute\" <+> pretty v <+> \"into type\" <+> pretty t\n  | otherwise = Right (SolvedG v t)\n  where\n    occursIn :: TVar -> TypeU -> Bool\n    occursIn v' (VarU v'') = v' == v''\n    occursIn _ (NatVarU _) = False\n    occursIn v' (ExistU v'' (ps, _) (rs, _)) = v' == v'' || any (occursIn v') ps || any (occursIn v' . snd) rs\n    occursIn v' (ForallU _ t') = occursIn v' t'\n    occursIn v' (FunU ts t') = any (occursIn v') ts || occursIn v' t'\n    occursIn v' (AppU t' ts) = occursIn v' t' || any (occursIn v') ts\n    occursIn v' (NamU _ _ ps rs) = any (occursIn v') ps || any (occursIn v' . snd) rs\n    occursIn v' (EffectU _ t') = occursIn v' t'\n    occursIn v' (OptionalU t') = occursIn v' t'\n    occursIn _ (NatLitU _) = False\n    occursIn v' (NatAddU a b) = occursIn v' a || occursIn v' b\n    occursIn v' (NatMulU a b) = occursIn v' a || occursIn v' b\n    occursIn v' (NatSubU a b) = occursIn v' a || occursIn v' b\n    occursIn v' (NatDivU a b) = occursIn v' a || occursIn v' b\n    occursIn v' (LabeledU _ t') = occursIn v' t'\n\n-- | Record a solved variable in the gamma map cache\ncacheSolved :: TVar -> TypeU -> Gamma -> Gamma\ncacheSolved v t g = g {gammaSolved = Map.insert v t (gammaSolved g)}\n\noccursCheck :: TypeU -> TypeU -> Text -> Either MDoc ()\noccursCheck t1 t2 place =\n  if Set.member t1 (free t2)\n    then subtypeError t1 t2 $ \"Occurs check at\" <+> pretty place\n    else Right ()\n\n{- | substitute all appearances of a given variable with an existential\n[t/v]A\n-}\nsubstitute :: TVar -> TypeU -> TypeU\nsubstitute v = substituteTVar v (ExistU v ([], Open) ([], Open))\n\n-- | Find an unsolved ExistG by TVar. O(log N) via gammaExist index.\n-- Returns the slot and entry if found.\naccess1 :: TVar -> Gamma -> Maybe (Int, GammaIndex)\naccess1 v g = do\n  slot <- Map.lookup v (gammaExist g)\n  entry <- IntMap.lookup slot (gammaContext g)\n  return (slot, entry)\n\n-- | Check if ExistG v1 has a HIGHER slot (= newer) than ExistG v2.\n-- Used for InstLReach/InstRReach ordering.\naccess2 :: TVar -> TVar -> Gamma -> Maybe (Int, Int)\naccess2 v1 v2 g = do\n  s1 <- Map.lookup v1 (gammaExist g)\n  s2 <- Map.lookup v2 (gammaExist g)\n  if s1 > s2 then Just (s1, s2) else Nothing\n\n-- | Solve an ExistG: replace it with SolvedG in place. O(log N).\n-- Returns Right Nothing if the ExistG is not found (already solved).\n-- Returns Left on solve error (e.g., occurs check). Returns Right (Just g) on success.\nsolveExist :: TVar -> TypeU -> Gamma -> Either MDoc (Maybe Gamma)\nsolveExist v t g = case Map.lookup v (gammaExist g) of\n  Nothing -> Right Nothing\n  Just slot -> do\n    solved <- solve v t\n    return . Just $ cacheSolved v t $ g\n      { gammaContext = IntMap.insert slot solved (gammaContext g)\n      , gammaExist = Map.delete v (gammaExist g)\n      }\n\n-- | Solve an ExistG and insert additional entries between the solved\n-- position and older entries. O(log N + K) where K = length extras.\n-- The first element of extras gets the highest sub-slot (= newest).\n-- Returns Right Nothing if ExistG not found. Left on solve error.\nsolveExistWith :: TVar -> TypeU -> [GammaIndex] -> Gamma -> Either MDoc (Maybe Gamma)\nsolveExistWith v t extras g = case Map.lookup v (gammaExist g) of\n  Nothing -> Right Nothing\n  Just slot -> do\n    solved <- solve v t\n    let g1 = cacheSolved v t $ g\n          { gammaContext = IntMap.insert slot solved (gammaContext g)\n          , gammaExist = Map.delete v (gammaExist g)\n          }\n        -- Insert extras at slots below the solved entry\n        insertExtra (g', subSlot) x =\n          ( g' { gammaContext = IntMap.insert subSlot x (gammaContext g')\n               , gammaExist = case x of\n                   ExistG ev _ _ -> Map.insert ev subSlot (gammaExist g')\n                   _ -> gammaExist g'\n               }\n          , subSlot - 1\n          )\n    return . Just $ fst $ foldl' insertExtra (g1, slot - 1) extras\n\n-- | Look up a solved existential type variable. O(log N) via gammaSolved.\nlookupU :: TVar -> Gamma -> Maybe TypeU\nlookupU v g = Map.lookup v (gammaSolved g)\n\n-- | Look up an annotation type variable. O(N) scan (AnnG entries are rare).\nlookupE :: EVar -> Gamma -> Maybe TypeU\nlookupE v g = foldr step Nothing (IntMap.toDescList (gammaContext g))\n  where\n    step (_, AnnG v' t) _ | v == v' = Just t\n    step _ acc = acc\n\n-- | Remove context entries newer than (and including) a marker. O(N) in\n-- removed entries for cleanup; O(log N) for the IntMap split itself.\ncut :: GammaIndex -> Gamma -> Either MDoc Gamma\ncut marker g = do\n  -- Find the marker's slot by scanning (markers are infrequent)\n  markerSlot <- case [s | (s, gi) <- IntMap.toDescList (gammaContext g), gi == marker] of\n    (s : _) -> Right s\n    [] -> Left $ \"Empty cut\" <+> pretty marker\n  -- Everything with slot < markerSlot is kept (older entries)\n  let kept = fst (IntMap.split markerSlot (gammaContext g))\n      -- Collect removed entries for cleanup\n      removedSlots = IntMap.filterWithKey (\\s _ -> s >= markerSlot) (gammaContext g)\n      removedSolvedKeys = [v | (_, SolvedG v _) <- IntMap.toList removedSlots]\n      removedExistKeys = [v | (_, ExistG v _ _) <- IntMap.toList removedSlots]\n      solvedMap' = foldl' (flip Map.delete) (gammaSolved g) removedSolvedKeys\n      existMap' = foldl' (flip Map.delete) (gammaExist g) removedExistKeys\n  return $ g\n    { gammaContext = kept\n    , gammaExist = existMap'\n    , gammaSolved = solvedMap'\n    }\n\n-- | Convert context to a list (newest first) for iteration/debugging.\ngammaContextList :: Gamma -> [GammaIndex]\ngammaContextList g = map snd (IntMap.toDescList (gammaContext g))\n\n-- | Trim context back to the state it had at a given slot counter value.\n-- Removes all entries with slot >= the given threshold.\ngammaTrimAfter :: Int -> Gamma -> Gamma\ngammaTrimAfter slotThreshold g =\n  let kept = fst (IntMap.split slotThreshold (gammaContext g))\n      removedSlots = IntMap.filterWithKey (\\s _ -> s >= slotThreshold) (gammaContext g)\n      removedSolvedKeys = [v | (_, SolvedG v _) <- IntMap.toList removedSlots]\n      removedExistKeys = [v | (_, ExistG v _ _) <- IntMap.toList removedSlots]\n      solvedMap' = foldl' (flip Map.delete) (gammaSolved g) removedSolvedKeys\n      existMap' = foldl' (flip Map.delete) (gammaExist g) removedExistKeys\n  in g\n    { gammaContext = kept\n    , gammaSlot = slotThreshold\n    , gammaExist = existMap'\n    , gammaSolved = solvedMap'\n    }\n\nselectorType :: Gamma -> Selector -> MorlocMonad (Gamma, TypeU)\nselectorType g0 SelectorEnd = do\n  let (g1, s) = newvar \"_pattern_\" g0\n  return (g1, s)\nselectorType g0 (SelectorIdx x xs) = do\n  -- highest index in this pattern, matching tuple must be at least this long\n  let maxIndex = maximum (map fst (x : xs))\n\n  -- combine groups, e.g.: .(.1.(0,1), .1.2, .2) --> .(.1.(0,1,2), .2)\n  xs' <- mapM (secondM weaveSelectors) (groupSort (x : xs))\n\n  (g1, ts) <- statefulMapM (makeIndexType xs') g0 (take (maxIndex + 1) [0 ..])\n\n  return $ newvarRich (ts, Open) ([], Closed) \"_pattern_\" g1\n  where\n    makeIndexType :: [(Int, Selector)] -> Gamma -> Int -> MorlocMonad (Gamma, TypeU)\n    makeIndexType xs' g i = case lookup i xs' of\n      (Just s) -> selectorType g s\n      Nothing -> selectorType g SelectorEnd\nselectorType g0 (SelectorKey x xs) = do\n  xs' <- mapM (secondM weaveSelectors) (groupSort (x : xs))\n  (g1, ss) <- statefulMapM selectorType g0 (map snd xs')\n  return $ newvarRich ([], Closed) (zip (map (Key . fst) xs') ss, Open) \"_pattern_\" g1\n\nweaveSelectors :: [Selector] -> MorlocMonad Selector\nweaveSelectors [] = return SelectorEnd\nweaveSelectors (s0 : ss0) = foldrM weavePair s0 ss0\n  where\n    weavePair :: Selector -> Selector -> MorlocMonad Selector\n    weavePair SelectorEnd s = return s\n    weavePair s SelectorEnd = return s\n    weavePair (SelectorIdx s1 ss1) (SelectorIdx s2 ss2) = do\n      xs <- mapM (secondM weaveSelectors) (groupSort ((s1 : ss1) <> (s2 : ss2)))\n      return $ SelectorIdx (head xs) (tail xs)\n    weavePair (SelectorKey s1 ss1) (SelectorKey s2 ss2) = do\n      xs <- mapM (secondM weaveSelectors) (groupSort ((s1 : ss1) <> (s2 : ss2)))\n      return $ SelectorKey (head xs) (tail xs)\n    weavePair x@(SelectorKey _ _) y@(SelectorIdx _ _) = weavePair y x\n    weavePair (SelectorIdx _ _) (SelectorKey _ _) = MM.throwSystemError $ \"Bad pattern, cannot merge index and keyword patterns\"\n\nselectorGetter :: TypeU -> Selector -> [TypeU]\nselectorGetter t SelectorEnd = [t]\nselectorGetter (ExistU _ _ (ks, _)) (SelectorKey x xs) =\n  concat [maybe [] (\\t -> selectorGetter t s) (lookup (Key k) ks) | (k, s) <- (x : xs)]\nselectorGetter (ExistU _ (ts, _) _) (SelectorIdx x xs) =\n  concat [selectorGetter (ts !! i) s | (i, s) <- (x : xs)]\nselectorGetter _ _ = error \"Unreachable\"\n\n-- | map over a type using a selector and update the type using set values\nselectorSetter ::\n  [TypeU] -> -- types to which the selected fields are set\n  Selector -> -- current selector pattern\n  TypeU -> -- current type that is being updated\n  TypeU -- modified return type\nselectorSetter setTypes0 s0 t0 = fst (f t0 setTypes0 s0)\n  where\n    f ::\n      TypeU ->\n      [TypeU] ->\n      Selector ->\n      (TypeU, [TypeU]) -- the modified type and the list of remaining setters\n    f _ (t : ts) SelectorEnd = (t, ts)\n    f (ExistU v (ts, tc) (ks, kc)) setTypes1 (SelectorKey s ss) =\n      let (ks', setTypes2) = foldr subKey (ks, setTypes1) (s : ss)\n       in (ExistU v (ts, tc) (ks', kc), setTypes2)\n    f (NamU o v ps ks) setTypes1 (SelectorKey s ss) =\n      let (ks', setTypes2) = foldr subKey (ks, setTypes1) (s : ss)\n       in (NamU o v ps ks', setTypes2)\n    -- handle non-existential records\n    --  * note that this may well change the field type of the record, this should\n    --    raise an error later if such changes are not allowed\n    f (ExistU v (ts, tc) (ks, kc)) setTypes1 (SelectorIdx s ss) =\n      let (ts', setTypes2) = foldl subIdx (ts, setTypes1) (s : ss)\n       in (ExistU v (ts', tc) (ks, kc), setTypes2)\n    -- handle non-existential tuples\n    f (AppU t ts) setTypes1 (SelectorIdx s ss)\n      -- if this is a tuple, fine, proceed\n      | (VarU (BT.tuple (length ts))) == t =\n          let (ts', setTypes2) = foldl subIdx (ts, setTypes1) (s : ss)\n           in (AppU t ts', setTypes2)\n      -- otherwise die\n      | otherwise = error \"Unreachable case\"\n    -- and die some more\n    f _ _ _ = error \"Unreachable pattern case\"\n\n    subKey :: (Text, Selector) -> ([(Key, TypeU)], [TypeU]) -> ([(Key, TypeU)], [TypeU])\n    subKey (k, s) (ks, setTypesN) = case lookup (Key k) ks of\n      Nothing -> error \"Malformed pattern\"\n      (Just priorType) -> (ks', setTypesN')\n        where\n          (newType, setTypesN') = f priorType setTypesN s\n          ks' = [if k' == k then (Key k, newType) else x | x@(Key k', _) <- ks]\n\n    subIdx :: ([TypeU], [TypeU]) -> (Int, Selector) -> ([TypeU], [TypeU])\n    subIdx (ts, setTypesN) (i, s)\n      | i < length ts =\n          let (newType, setTypesN') = f (ts !! i) setTypesN s\n           in (take i ts <> [newType] <> drop (i + 1) ts, setTypesN')\n      | otherwise = error $ \"Bad pattern, index \" <> show i <> \" is greather than tuple length\"\n\nextendList :: [a] -> [a] -> ([a], [a])\nextendList [] ys = (ys, ys)\nextendList xs [] = (xs, xs)\nextendList (x : xs) (y : ys) =\n  let (xs', ys') = extendList xs ys\n   in (x : xs', y : ys')\n\nextendRec ::\n  (Ord k) =>\n  Scope ->\n  Gamma ->\n  [(k, TypeU)] ->\n  [(k, TypeU)] ->\n  Either MDoc (Gamma, [(k, TypeU)], [(k, TypeU)])\nextendRec scope g0 xs ys = do\n  g1 <-\n    foldlM\n      ( \\g (k, x) ->\n          maybe\n            (return g)\n            (\\y -> subtype scope x y g)\n            (lookup k ys)\n      )\n      g0\n      xs\n  return $\n    ( g1\n    , xs <> [y | y@(k, _) <- ys, Set.notMember k setX]\n    , ys <> [x | x@(k, _) <- xs, Set.notMember k setY]\n    )\n  where\n    setX = Set.fromList (map fst xs)\n    setY = Set.fromList (map fst ys)\n\nnewvar :: Text -> Gamma -> (Gamma, TypeU)\nnewvar = newvarRich ([], Open) ([], Open)\n\nnewvarRich ::\n  -- | type parameters\n  ([TypeU], OpenOrClosed) ->\n  -- | key-value pairs\n  ([(Key, TypeU)], OpenOrClosed) ->\n  -- | prefix, just for readability\n  Text ->\n  Gamma ->\n  (Gamma, TypeU)\nnewvarRich ps rs prefix g =\n  let (g', v) = tvarname g prefix\n   in (g' +> ExistG v ps rs, ExistU v ps rs)\n\n-- | standardize quantifier names, for example, replace `a -> b` with `v0 -> v1`.\nrename :: Gamma -> TypeU -> (Gamma, TypeU)\nrename g0 (ForallU v@(TV s) t0) =\n  let (g1, v') = tvarname g0 (s <> \"___q\")\n      (g2, t1) = rename g1 t0\n      t2 = substituteTVar v (VarU v') t1\n   in (g2, ForallU v' t2)\n-- After stripping ForallU, rename NatVarU variables to fresh names\nrename g0 t0 =\n  let nvs = nub (collectNatVarNames t0)\n   in if null nvs then (g0, t0)\n      else\n        let (g1, nvs') = statefulMap (\\g (TV s) -> tvarname g (s <> \"___n\")) g0 nvs\n            renameMap = Map.fromList (zip nvs nvs')\n         in (g1, renameNatVars renameMap t0)\n\n-- | Rename NatVarU variables according to a mapping\nrenameNatVars :: Map.Map TVar TVar -> TypeU -> TypeU\nrenameNatVars m = go\n  where\n    ren v = Map.findWithDefault v v m\n    go (NatVarU v) = NatVarU (ren v)\n    go (VarU v) = VarU v\n    go (ExistU v (ts, tc) (rs, rc)) = ExistU v (map go ts, tc) ([(k, go t) | (k, t) <- rs], rc)\n    go (ForallU v t) = ForallU v (go t)\n    go (FunU ts t) = FunU (map go ts) (go t)\n    go (AppU t ts) = AppU (go t) (map go ts)\n    go (NamU n o ps rs) = NamU n o (map go ps) [(k, go t) | (k, t) <- rs]\n    go (EffectU effs t) = EffectU effs (go t)\n    go (OptionalU t) = OptionalU (go t)\n    go t@(NatLitU _) = t\n    go (NatAddU a b) = NatAddU (go a) (go b)\n    go (NatMulU a b) = NatMulU (go a) (go b)\n    go (NatSubU a b) = NatSubU (go a) (go b)\n    go (NatDivU a b) = NatDivU (go a) (go b)\n    go (LabeledU n t) = LabeledU n (go t)\n\n{- | Rename all generic type variables (ForallU-bound and ExistU) to clean\nletters from a lazy pool: a, b, c, ..., z, a1, b1, ..., z1, a2, ...\nAvoids names already used by concrete types in the expression.\n-}\ncleanTypeName :: TypeU -> TypeU\ncleanTypeName t0 =\n  let (vs, body) = unqualify t0\n      evs = collectExistVars body\n      nvs = collectNatVarNames body\n      allGeneric = nub (vs ++ evs ++ nvs)\n      fixed = collectFixedNames (Set.fromList allGeneric) body\n      pool = filter (\\(TV n) -> Set.notMember n fixed) letterPool\n      renameMap = Map.fromList (zip allGeneric pool)\n      renamedBody = applyVarRenaming renameMap body\n      renamedVs = map (\\v -> Map.findWithDefault v v renameMap) vs\n   in simplifyNats $ qualify renamedVs renamedBody\n\n-- | Simplify nat arithmetic in types (e.g., 34 + (4 + 5) -> 43)\nsimplifyNats :: TypeU -> TypeU\nsimplifyNats = go\n  where\n    go (NatAddU a b) = trySimplify (NatAddU (go a) (go b))\n    go (NatMulU a b) = trySimplify (NatMulU (go a) (go b))\n    go (NatSubU a b) = trySimplify (NatSubU (go a) (go b))\n    go (NatDivU a b) = trySimplify (NatDivU (go a) (go b))\n    go (AppU f ts) = AppU (go f) (map go ts)\n    go (FunU ts r) = FunU (map go ts) (go r)\n    go (ForallU v t) = ForallU v (go t)\n    go (NamU o v ps es) = NamU o v (map go ps) [(k, go t) | (k, t) <- es]\n    go (ExistU v (ps, pc) (rs, rc)) = ExistU v (map go ps, pc) ([(k, go t) | (k, t) <- rs], rc)\n    go (EffectU e t) = EffectU e (go t)\n    go (OptionalU t) = OptionalU (go t)\n    go t@(NatVarU _) = t\n    go (LabeledU n t) = LabeledU n (go t)\n    go t = t\n\n    trySimplify nat = case typeUToNatExpr nat of\n      Just ne -> natExprToTypeU (NS.sopToNatExpr (NS.normalize ne))\n      Nothing -> nat\n\nletterPool :: [TVar]\nletterPool =\n  [ TV (MT.singleton c <> suffix)\n  | suffix <- \"\" : map MT.show' [1 :: Int ..]\n  , c <- ['a' .. 'z']\n  ]\n\ncollectExistVars :: TypeU -> [TVar]\ncollectExistVars = go\n  where\n    go (VarU _) = []\n    go (NatVarU _) = []\n    go (ExistU v (ts, _) (rs, _)) = v : concatMap go ts ++ concatMap (go . snd) rs\n    go (ForallU _ t) = go t\n    go (FunU ts t) = concatMap go (t : ts)\n    go (AppU t ts) = concatMap go (t : ts)\n    go (NamU _ _ ps rs) = concatMap go ps ++ concatMap (go . snd) rs\n    go (EffectU _ t) = go t\n    go (OptionalU t) = go t\n    go (NatLitU _) = []\n    go (NatAddU a b) = go a ++ go b\n    go (NatMulU a b) = go a ++ go b\n    go (NatSubU a b) = go a ++ go b\n    go (NatDivU a b) = go a ++ go b\n    go (LabeledU _ t) = go t\n\n-- | Collect NatVarU variable names from a type (for renaming)\ncollectNatVarNames :: TypeU -> [TVar]\ncollectNatVarNames = go\n  where\n    go (NatVarU v) = [v]\n    go (VarU _) = []\n    go (ExistU _ (ts, _) (rs, _)) = concatMap go ts ++ concatMap (go . snd) rs\n    go (ForallU _ t) = go t\n    go (FunU ts t) = concatMap go (t : ts)\n    go (AppU t ts) = concatMap go (t : ts)\n    go (NamU _ _ ps rs) = concatMap go ps ++ concatMap (go . snd) rs\n    go (EffectU _ t) = go t\n    go (OptionalU t) = go t\n    go (NatLitU _) = []\n    go (NatAddU a b) = go a ++ go b\n    go (NatMulU a b) = go a ++ go b\n    go (NatSubU a b) = go a ++ go b\n    go (NatDivU a b) = go a ++ go b\n    go (LabeledU _ t) = go t\n\ncollectFixedNames :: Set.Set TVar -> TypeU -> Set.Set Text\ncollectFixedNames generics = go\n  where\n    go (VarU v)\n      | Set.member v generics = Set.empty\n      | otherwise = Set.singleton (unTVar v)\n    go (NatVarU _) = Set.empty\n    go (ExistU _ (ts, _) (rs, _)) = Set.unions (map go ts ++ map (go . snd) rs)\n    go (ForallU _ t) = go t\n    go (FunU ts t) = Set.unions $ map go (t : ts)\n    go (AppU t ts) = Set.unions $ map go (t : ts)\n    go (NamU _ (TV n) ps rs) =\n      Set.insert n $ Set.unions (map go ps ++ map (go . snd) rs)\n    go (EffectU _ t) = go t\n    go (OptionalU t) = go t\n    go (NatLitU _) = Set.empty\n    go (NatAddU a b) = Set.union (go a) (go b)\n    go (NatMulU a b) = Set.union (go a) (go b)\n    go (NatSubU a b) = Set.union (go a) (go b)\n    go (NatDivU a b) = Set.union (go a) (go b)\n    go (LabeledU _ t) = go t\n\napplyVarRenaming :: Map.Map TVar TVar -> TypeU -> TypeU\napplyVarRenaming m = go\n  where\n    ren v = Map.findWithDefault v v m\n    go (VarU v) = VarU (ren v)\n    go (NatVarU v) = NatVarU (ren v)\n    go (ExistU v (ts, tc) (rs, rc)) =\n      ExistU (ren v) (map go ts, tc) ([(k, go t) | (k, t) <- rs], rc)\n    go (ForallU v t) = ForallU (ren v) (go t)\n    go (FunU ts t) = FunU (map go ts) (go t)\n    go (AppU t ts) = AppU (go t) (map go ts)\n    go (NamU n o ps rs) = NamU n o (map go ps) [(k, go t) | (k, t) <- rs]\n    go (EffectU effs t) = EffectU effs (go t)\n    go (OptionalU t) = OptionalU (go t)\n    go t@(NatLitU _) = t\n    go (NatAddU a b) = NatAddU (go a) (go b)\n    go (NatMulU a b) = NatMulU (go a) (go b)\n    go (NatSubU a b) = NatSubU (go a) (go b)\n    go (NatDivU a b) = NatDivU (go a) (go b)\n    go (LabeledU n t) = LabeledU n (go t)\n\nprettyTypeU :: TypeU -> MDoc\nprettyTypeU = pretty . cleanTypeName\n\ntvarname :: Gamma -> Text -> (Gamma, TVar)\ntvarname g prefix =\n  let i = gammaCounter g\n   in (g {gammaCounter = i + 1}, TV (prefix <> MT.pack (show i)))\n\nevarname :: Gamma -> Text -> (Gamma, EVar)\nevarname g prefix =\n  let i = gammaCounter g\n   in (g {gammaCounter = i + 1}, EV (prefix <> \"@@\" <> MT.pack (show i)))\n\n-- debugging -------------------\n\nenter :: MDoc -> MorlocMonad ()\nenter d = do\n  depth <- MM.incDepth\n  insetSay $ \"--\" <> pretty depth <> \"-->\" <+> d\n\ninsetSay :: MDoc -> MorlocMonad ()\ninsetSay d = do\n  MM.sayVVV $ \" :\" <+> d\n\nseeType :: TypeU -> MorlocMonad ()\nseeType t = insetSay $ pretty t\n\nleave :: MDoc -> MorlocMonad ()\nleave d = do\n  depth <- MM.decDepth\n  insetSay $ \"<--\" <> pretty (depth + 1) <> \"--\" <+> d\n\nseeGamma :: Gamma -> MorlocMonad ()\nseeGamma g = MM.sayVVV $ nest 4 $ \"Gamma:\" <> line <> vsep (map pretty (gammaContextList g))\n\npeak :: (Foldable f) => ExprS g f c -> MorlocMonad ()\npeak = insetSay . pretty\n\npeakGen :: (Foldable f) => AnnoS g f c -> MorlocMonad ()\npeakGen = insetSay . pretty\n"
  },
  {
    "path": "library/Morloc/Typecheck/NatSolver.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc.Typecheck.NatSolver\nDescription : Type-level natural number arithmetic solver\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nNormalizes type-level Nat expressions to Sum-of-Products (SOP) canonical form\nand solves equality constraints between Nat expressions. Based on the\napproach in ghc-typelits-natnormalise by Christiaan Baaij.\n-}\nmodule Morloc.Typecheck.NatSolver\n  ( NatExpr(..)\n  , NatSOP(..)\n  , NatProduct(..)\n  , NatError(..)\n  , normalize\n  , natEqual\n  , solveNat\n  , substituteNat\n  , isGround\n  , freeNatVars\n  , sopToNatExpr\n  ) where\n\nimport Data.Map.Strict (Map)\nimport qualified Data.Map.Strict as Map\nimport qualified Data.Set as Set\nimport Data.List (sortBy, groupBy)\nimport Data.Ord (comparing)\nimport Data.Function (on)\nimport Morloc.Namespace.Prim (TVar(..))\n\n-- | A type-level natural number expression\ndata NatExpr\n  = NatLit Integer        -- ^ literal: 0, 1, 2, ...\n  | NatVar TVar           -- ^ type variable of kind Nat\n  | NatAdd NatExpr NatExpr -- ^ addition\n  | NatMul NatExpr NatExpr -- ^ multiplication\n  | NatSub NatExpr NatExpr -- ^ subtraction (a - b = a + negate b in SOP)\n  | NatDiv NatExpr NatExpr -- ^ division (ground-only or constant-divisor)\n  deriving (Eq, Ord, Show)\n\n-- | Sum-of-Products canonical form for Nat expressions.\n-- Represents: sum of (coefficient * product-of-variable-powers)\nnewtype NatSOP = NatSOP { unNatSOP :: [NatProduct] }\n  deriving (Eq, Ord, Show)\n\n-- | A single product term: coefficient * (v1^e1 * v2^e2 * ...)\n-- Invariants: exponents > 0, zero-coefficient terms removed\ndata NatProduct = NatProduct\n  { npCoeff :: !Integer\n  , npVars  :: !(Map TVar Integer)\n  } deriving (Show)\n\n-- Custom Eq/Ord: full comparison including coefficient\ninstance Eq NatProduct where\n  (NatProduct c1 v1) == (NatProduct c2 v2) = c1 == c2 && v1 == v2\n\ninstance Ord NatProduct where\n  compare (NatProduct c1 v1) (NatProduct c2 v2) =\n    compare (Map.size v1, v1, c1) (Map.size v2, v2, c2)\n\n-- | Result of attempting to solve a Nat constraint\ndata NatError\n  = Contradiction\n  | Deferred NatSOP  -- ^ cannot solve yet, keep as deferred constraint\n  deriving (Eq, Show)\n\n-- | Normalize a NatExpr to canonical SOP form\nnormalize :: NatExpr -> NatSOP\nnormalize (NatLit n)   = NatSOP [NatProduct n Map.empty]\nnormalize (NatVar v)   = NatSOP [NatProduct 1 (Map.singleton v 1)]\nnormalize (NatAdd a b) = addSOP (normalize a) (normalize b)\nnormalize (NatMul a b) = mulSOP (normalize a) (normalize b)\nnormalize (NatSub a b) = addSOP (normalize a) (negateSOP (normalize b))\nnormalize (NatDiv a b) = divSOP (normalize a) (normalize b)\n\n-- | Add two SOPs by merging and combining like terms\naddSOP :: NatSOP -> NatSOP -> NatSOP\naddSOP (NatSOP ps1) (NatSOP ps2) = NatSOP (mergeLikeTerms (ps1 ++ ps2))\n\n-- | Multiply two SOPs by distributing (cross-product of terms)\nmulSOP :: NatSOP -> NatSOP -> NatSOP\nmulSOP (NatSOP ps1) (NatSOP ps2) =\n  NatSOP (mergeLikeTerms [mulProduct p1 p2 | p1 <- ps1, p2 <- ps2])\n\n-- | Multiply two product terms\nmulProduct :: NatProduct -> NatProduct -> NatProduct\nmulProduct (NatProduct c1 vs1) (NatProduct c2 vs2) =\n  NatProduct (c1 * c2) (Map.unionWith (+) vs1 vs2)\n\n-- | Merge like terms: group by variable-power maps, sum coefficients,\n-- remove zero-coefficient products, sort canonically\nmergeLikeTerms :: [NatProduct] -> [NatProduct]\nmergeLikeTerms =\n    filter (\\p -> npCoeff p /= 0)\n  . map mergeGroup\n  . groupBy ((==) `on` npVars)\n  . sortBy (comparing npVars)\n  where\n    mergeGroup :: [NatProduct] -> NatProduct\n    mergeGroup [] = error \"impossible: groupBy produces non-empty groups\"\n    mergeGroup ps@(p:_) = NatProduct (sum (map npCoeff ps)) (npVars p)\n\n-- | Check if two Nat expressions are equal (via SOP normalization)\nnatEqual :: NatExpr -> NatExpr -> Bool\nnatEqual e1 e2 = normalize e1 == normalize e2\n\n-- | Solve the constraint e1 ~ e2, returning variable substitutions\nsolveNat :: NatExpr -> NatExpr -> Either NatError (Map TVar NatExpr)\nsolveNat e1 e2 =\n  let sop1 = normalize e1\n      sop2 = normalize e2\n      diff = subSOP sop1 sop2\n  in solveSOP diff\n\n-- | Subtract two SOPs: sop1 - sop2\nsubSOP :: NatSOP -> NatSOP -> NatSOP\nsubSOP (NatSOP ps1) (NatSOP ps2) =\n  addSOP (NatSOP ps1) (NatSOP (map negateProduct ps2))\n\n-- | Negate a product term\nnegateProduct :: NatProduct -> NatProduct\nnegateProduct (NatProduct c vs) = NatProduct (negate c) vs\n\n-- | Negate an entire SOP\nnegateSOP :: NatSOP -> NatSOP\nnegateSOP (NatSOP ps) = NatSOP (map negateProduct ps)\n\n-- | Divide two SOPs. Only handles ground division or constant divisor.\n-- For ground: compute directly. For constant divisor: divide each coefficient.\n-- Otherwise: return the original forms unchanged (will be Deferred by solver).\ndivSOP :: NatSOP -> NatSOP -> NatSOP\ndivSOP (NatSOP ps1) (NatSOP [NatProduct d vs2])\n  | Map.null vs2, d /= 0\n  , all (\\p -> npCoeff p `mod` d == 0) ps1\n  = NatSOP (mergeLikeTerms [NatProduct (npCoeff p `div` d) (npVars p) | p <- ps1])\ndivSOP (NatSOP ps1) (NatSOP ps2)\n  -- Both ground: compute directly\n  | all (\\p -> Map.null (npVars p)) ps1\n  , all (\\p -> Map.null (npVars p)) ps2\n  , let n = sum (map npCoeff ps1)\n  , let d = sum (map npCoeff ps2)\n  , d /= 0\n  , n `mod` d == 0\n  = NatSOP [NatProduct (n `div` d) Map.empty]\n  -- Cannot simplify: return a sentinel that won't match anything useful.\n  -- The solver will see non-matching SOPs and return Deferred.\n  | otherwise = NatSOP [NatProduct 0 (Map.singleton (TV \"__div__\") 1)]\n\n-- | Solve sop = 0\nsolveSOP :: NatSOP -> Either NatError (Map TVar NatExpr)\nsolveSOP (NatSOP []) = Right Map.empty  -- 0 = 0\nsolveSOP (NatSOP [NatProduct c vs])\n  | Map.null vs && c /= 0 = Left Contradiction  -- c = 0 where c /= 0\n  | Map.null vs           = Right Map.empty      -- 0 = 0\n  | Map.size vs == 1, [(v, 1)] <- Map.toList vs =\n      -- c*v = 0, only solution is v = 0 (but only if c divides 0, which it does)\n      if c == 0\n        then Right Map.empty\n        else Right (Map.singleton v (NatLit 0))\n  | otherwise = Left (Deferred (NatSOP [NatProduct c vs]))\nsolveSOP (NatSOP prods)\n  | Just (v, a, b) <- extractLinearVar prods =\n      if b `mod` a == 0\n        then Right (Map.singleton v (NatLit (negate b `div` a)))\n        else Left Contradiction\n  | otherwise = Left (Deferred (NatSOP prods))\n\n-- | Find a variable that appears linearly (exponent 1, alone in its product)\n-- Returns (variable, coefficient, sum of constant terms)\nextractLinearVar :: [NatProduct] -> Maybe (TVar, Integer, Integer)\nextractLinearVar prods =\n  let -- Find products with exactly one variable at exponent 1\n      linearSingles = [ (v, npCoeff p)\n                       | p <- prods\n                       , Map.size (npVars p) == 1\n                       , [(v, 1)] <- [Map.toList (npVars p)]\n                       ]\n      -- Check which linear variables appear only once AND all other\n      -- products are constant (no other variables). Without this guard,\n      -- expressions like i*j - n would incorrectly solve n = 0.\n      candidates = [ (v, c, constSum)\n                    | (v, c) <- linearSingles\n                    , length [() | p <- prods, Map.member v (npVars p)] == 1\n                    , let others = [p | p <- prods, not (Map.member v (npVars p))]\n                    , all (\\p -> Map.null (npVars p)) others\n                    , let constSum = sum (map npCoeff others)\n                    ]\n  in case candidates of\n       ((v, c, s) : _) -> Just (v, c, s)\n       [] -> Nothing\n\n-- | Apply substitutions to a NatExpr\nsubstituteNat :: Map TVar NatExpr -> NatExpr -> NatExpr\nsubstituteNat m = go\n  where\n    go (NatLit n) = NatLit n\n    go (NatVar v) = case Map.lookup v m of\n      Just e  -> e\n      Nothing -> NatVar v\n    go (NatAdd a b) = NatAdd (go a) (go b)\n    go (NatMul a b) = NatMul (go a) (go b)\n    go (NatSub a b) = NatSub (go a) (go b)\n    go (NatDiv a b) = NatDiv (go a) (go b)\n\n-- | Check if a NatExpr has no free variables\nisGround :: NatExpr -> Bool\nisGround (NatLit _) = True\nisGround (NatVar _) = False\nisGround (NatAdd a b) = isGround a && isGround b\nisGround (NatMul a b) = isGround a && isGround b\nisGround (NatSub a b) = isGround a && isGround b\nisGround (NatDiv a b) = isGround a && isGround b\n\n-- | Get all free variables in a NatExpr\nfreeNatVars :: NatExpr -> Set.Set TVar\nfreeNatVars (NatLit _) = Set.empty\nfreeNatVars (NatVar v) = Set.singleton v\nfreeNatVars (NatAdd a b) = Set.union (freeNatVars a) (freeNatVars b)\nfreeNatVars (NatMul a b) = Set.union (freeNatVars a) (freeNatVars b)\nfreeNatVars (NatSub a b) = Set.union (freeNatVars a) (freeNatVars b)\nfreeNatVars (NatDiv a b) = Set.union (freeNatVars a) (freeNatVars b)\n\n-- | Convert a SOP back to a NatExpr (for error messages and further processing)\nsopToNatExpr :: NatSOP -> NatExpr\nsopToNatExpr (NatSOP []) = NatLit 0\nsopToNatExpr (NatSOP prods) = foldl1 NatAdd (map productToExpr prods)\n  where\n    productToExpr :: NatProduct -> NatExpr\n    productToExpr (NatProduct c vs)\n      | Map.null vs = NatLit c\n      | c == 1      = varsToExpr (Map.toList vs)\n      | otherwise   = NatMul (NatLit c) (varsToExpr (Map.toList vs))\n\n    varsToExpr :: [(TVar, Integer)] -> NatExpr\n    varsToExpr [] = NatLit 1\n    varsToExpr pairs = foldl1 NatMul (concatMap expandVar pairs)\n\n    expandVar :: (TVar, Integer) -> [NatExpr]\n    expandVar (v, n)\n      | n <= 0    = []\n      | otherwise = replicate (fromIntegral n) (NatVar v)\n"
  },
  {
    "path": "library/Morloc/Version.hs",
    "content": "{- |\nModule      : Morloc.Version\nDescription : Store the morloc version\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n-}\nmodule Morloc.Version (versionStr) where\n\nimport Data.Version (showVersion)\nimport qualified Paths_morloc (version)\n\nversionStr :: String\nversionStr = showVersion Paths_morloc.version\n"
  },
  {
    "path": "library/Morloc.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n\n{- |\nModule      : Morloc\nDescription : Top-level compiler pipeline: parse, typecheck, generate, build\nCopyright   : (c) Zebulun Arendsee, 2016-2026\nLicense     : Apache-2.0\nMaintainer  : z@morloc.io\n\nEntry point for the morloc compiler library. Orchestrates the full pipeline:\nparsing source into a module DAG, typechecking, code generation (pools +\nmanifest), and building executables. The 'writeProgram' function is the\nmain API consumed by the CLI.\n-}\nmodule Morloc\n  ( writeProgram\n  , typecheck\n  , typecheckFrontend\n  , generatePools\n  ) where\n\nimport Morloc.Namespace.Expr\nimport Morloc.Namespace.Prim\nimport Morloc.Namespace.State\nimport Morloc.Namespace.Type\n\nimport Morloc.Data.Doc (pretty)\nimport qualified Data.Map as Map\nimport qualified Data.Set as Set\n\nimport Morloc.CodeGenerator.Docstrings (processDocstrings)\nimport Morloc.CodeGenerator.Emit (TranslateFn, emit, pool)\nimport Morloc.CodeGenerator.Express (express)\nimport Morloc.CodeGenerator.LambdaEval (applyLambdas)\nimport Morloc.CodeGenerator.Namespace (SerialManifold)\nimport qualified Morloc.CodeGenerator.Nexus as Nexus\nimport Morloc.CodeGenerator.Parameterize (parameterize)\nimport Morloc.CodeGenerator.Realize (realityCheck)\nimport Morloc.CodeGenerator.Segment (segment)\nimport Morloc.CodeGenerator.Reduce (reduce)\nimport Morloc.CodeGenerator.Serialize (serialize)\nimport qualified Morloc.Data.DAG as DAG\nimport qualified Morloc.Frontend.API as F\nimport Morloc.Frontend.Restructure (restructure)\nimport Morloc.Frontend.Treeify (treeify)\nimport qualified Morloc.Monad as MM\nimport Morloc.ProgramBuilder.Build (buildProgram)\n\n-- | Check the general types only\ntypecheckFrontend ::\n  Maybe Path ->\n  Code ->\n  MorlocMonad [AnnoS (Indexed TypeU) Many Int]\ntypecheckFrontend path code = do\n  dag <- F.parse path code\n  evalMode <- MM.gets stateEvalMode\n  if evalMode then checkEvalRestrictions dag else return ()\n  case DAG.roots dag of\n    (r : _) -> MM.modify (\\s -> s {stateModuleName = Just r})\n    _ -> return ()\n  restructure dag\n    >>= treeify\n    >>= F.typecheck\n\n-- | Check general types and also resolve implementations\ntypecheck ::\n  Maybe Path ->\n  Code ->\n  MorlocMonad\n    ( [AnnoS (Indexed Type) One ()]\n    , [AnnoS (Indexed Type) One (Indexed Lang)]\n    )\ntypecheck path code =\n  typecheckFrontend path code\n    -- resolve all TypeU types to Type\n    |>> map F.resolveTypes\n    -- resolve all TypeU types to Type\n    >>= mapM F.valuecheck\n    -- check for value contradictions between implementations\n    >>= realityCheck\n\n-- | Do everything except language specific code generation.\ngeneratePools :: [AnnoS (Indexed Type) One (Indexed Lang)] -> MorlocMonad [(Lang, [SerialManifold])]\ngeneratePools rASTs = do\n  paramRASTs <- mapM parameterize rASTs\n  let langMap = Map.fromList\n        [(midx, lang) | AnnoS (Idx midx _) (Idx _ lang, _) _ <- paramRASTs]\n  MM.modify (\\s -> s { stateManifoldLang = langMap })\n  mapM express paramRASTs\n    >>= mapM segment |>> concat\n    >>= mapM serialize\n    >>= mapM reduce\n      |>> pool\n\n-- | Build a program as a local executable\nwriteProgram ::\n  -- | language-specific translator callback\n  TranslateFn ->\n  -- | source code filename (for debugging messages)\n  Maybe Path ->\n  -- | source code text\n  Code ->\n  MorlocMonad ()\nwriteProgram translateFn path code = do\n  typecheck path code\n    -- evaluate all applied lambdas in rasts and gasts\n    >>= bimapM (mapM applyLambdas) (mapM applyLambdas)\n    -- process docstrings to determine how to build CLI\n    >>= bimapM (mapM processDocstrings) (mapM processDocstrings)\n    -- generate nexus and pools\n    >>= \\(gASTs, rASTs) ->\n      do\n        -- Filter out generic (polymorphic) exports -- they can't become CLI subcommands\n        let isConcreteExport (AnnoS (Idx _ t) _ _, _) = not (containsUnk t)\n            (concreteGASTs, genericGASTs) = partition isConcreteExport gASTs\n            (concreteRASTs, genericRASTs) = partition isConcreteExport rASTs\n            warnSkip (AnnoS (Idx i _) _ _) = do\n              name <- MM.metaName i\n              case name of\n                Just (EV n) -> MM.say $ \"Warning: skipping generic export '\" <> pretty n <> \"'\"\n                Nothing -> return ()\n        mapM_ (warnSkip . fst) genericGASTs\n        mapM_ (warnSkip . fst) genericRASTs\n        -- Only pass exported rASTs to the nexus (not recursive helpers)\n        exports <- MM.gets stateExports\n        let exportSet = Set.fromList exports\n            isExported (AnnoS (Idx midx _) _ _, _) = Set.member midx exportSet\n            exportedRASTs = filter isExported concreteRASTs\n        nexus <- Nexus.generate concreteGASTs exportedRASTs\n        MM.startCounter\n        paramRASTs <- mapM parameterize (map fst concreteRASTs)\n        let langMap = Map.fromList\n              [(midx, lang) | AnnoS (Idx midx _) (Idx _ lang, _) _ <- paramRASTs]\n        MM.modify (\\s -> s { stateManifoldLang = langMap })\n        pools <-\n          mapM express paramRASTs\n            >>= mapM segment |>> concat\n            >>= mapM serialize\n            >>= mapM reduce\n              |>> pool\n            >>= mapM (uncurry (emit translateFn))\n        return (nexus, pools)\n        -- write the code and compile as needed\n        >>= buildProgram\n\n-- | In eval mode, reject source, class, and instance declarations in the root module.\n-- Imported modules are not checked since they are pre-existing installed code.\ncheckEvalRestrictions :: DAG MVar Import ExprI -> MorlocMonad ()\ncheckEvalRestrictions dag =\n  case DAG.roots dag of\n    [] -> return ()\n    (root : _) -> case Map.lookup root dag of\n      Nothing -> return ()\n      Just (ExprI _ (ModE _ body), _) -> mapM_ checkExpr body\n      Just _ -> return ()\n  where\n    checkExpr :: ExprI -> MorlocMonad ()\n    checkExpr (ExprI i (SrcE _)) =\n      MM.throwSourcedError i \"source statements are not allowed in eval mode\"\n    checkExpr (ExprI i (ClsE _)) =\n      MM.throwSourcedError i \"class declarations are not allowed in eval mode\"\n    checkExpr (ExprI i (IstE _ _ _)) =\n      MM.throwSourcedError i \"instance declarations are not allowed in eval mode\"\n    checkExpr _ = return ()\n"
  },
  {
    "path": "metrics/README.md",
    "content": "# Morloc Compiler Metrics\n\nThis directory contains tools for collecting and tracking code quality metrics for the morloc compiler.\n\n## Purpose\n\nTrack quantitative metrics over time to:\n- Monitor code growth and complexity\n- Detect performance regressions\n- Guide refactoring decisions\n- Compare versions objectively\n\n## Collected Metrics\n\n### Code Metrics (via `scc`)\n- Lines of code (total, by file, by language)\n- Comment ratio\n- Blank lines\n- Cyclomatic complexity estimates\n\n### Build Metrics\n- Full build time (clean build)\n- Binary size\n- Module counts (library, executable, test, bench)\n\n### Performance Metrics (via `tasty-bench`)\n- Parser performance\n- Type checker performance\n- Code generator performance\n\n## Directory Structure\n\n```\nmetrics/\n├── scripts/\n│   ├── collect-metrics.sh   # Collect all metrics and save snapshot\n│   └── compare-metrics.sh    # Compare two metric snapshots\n├── baselines/\n│   └── *.json                # Metric snapshots (version baselines)\n└── reports/\n    └── *.txt                 # Human-readable metric reports\n```\n\n## Usage\n\n### Collect Metrics\n\n```bash\n# Collect metrics for current version\n./metrics/scripts/collect-metrics.sh v0.59.0\n\n# Or use auto-generated timestamp\n./metrics/scripts/collect-metrics.sh\n```\n\nThis creates:\n- `baselines/v0.59.0.json` - Machine-readable metrics\n- `reports/v0.59.0.txt` - Human-readable summary\n\n### Compare Versions\n\n```bash\n./metrics/scripts/compare-metrics.sh \\\n    metrics/baselines/v0.59.0.json \\\n    metrics/baselines/v0.60.0.json\n```\n\nShows changes in:\n- Code lines\n- Build time\n- Binary size\n- Benchmark performance\n\nColor-coded output highlights significant changes (>10%).\n\n## Requirements\n\n### Required\n- `stack` - Haskell build tool (already required for morloc)\n- `bc` - Command-line calculator (usually pre-installed)\n\n### Recommended\n- `scc` - Fast line counter with complexity metrics (written in Go)\n  - Install: `go install github.com/boyter/scc/v3@latest`\n  - Or: `brew install scc` (macOS)\n  - Or download from: https://github.com/boyter/scc/releases\n  - Without it, only basic metrics are collected\n\n- `jq` - JSON processor for detailed comparison\n  - Install: `brew install jq` or `apt-get install jq`\n  - Without it, comparison tool is less detailed\n\n## Workflow\n\n### On Release\n\n```bash\n# Before release, collect baseline\n./metrics/scripts/collect-metrics.sh v0.59.0-baseline\n\n# After changes, collect new metrics\n./metrics/scripts/collect-metrics.sh v0.59.0-final\n\n# Compare\n./metrics/scripts/compare-metrics.sh \\\n    metrics/baselines/v0.59.0-baseline.json \\\n    metrics/baselines/v0.59.0-final.json\n```\n\n### During Development\n\nCollect metrics periodically (weekly, monthly) to track trends:\n\n```bash\n./metrics/scripts/collect-metrics.sh $(date +%Y-%m-%d)\n```\n\nCompare against previous week/month to catch drift.\n\n### After Major Refactoring\n\n```bash\n# Before refactoring\n./metrics/scripts/collect-metrics.sh before-refactor-auth\n\n# After refactoring\n./metrics/scripts/collect-metrics.sh after-refactor-auth\n\n# Verify no regressions\n./metrics/scripts/compare-metrics.sh \\\n    metrics/baselines/before-refactor-auth.json \\\n    metrics/baselines/after-refactor-auth.json\n```\n\n## Interpreting Results\n\n### Good Trends\n- ✅ Build time stable or decreasing\n- ✅ Comment ratio >15%\n- ✅ Benchmark performance stable or improving\n- ✅ Binary size stable (unless adding features)\n\n### Warning Signs\n- ⚠️  Code lines growing >20% per release\n- ⚠️  Build time increasing >15%\n- ⚠️  Benchmark performance degrading >10%\n- ⚠️  Comment ratio decreasing\n\n### When to Act\n- 🚨 Any metric changes >25% unexpectedly\n- 🚨 Build time >5 minutes\n- 🚨 Binary size >100MB\n- 🚨 Average module size >500 lines\n\n## Extending Metrics\n\nTo add new metrics, edit `collect-metrics.sh`:\n\n```bash\n# Add new metric collection\necho \"Collecting custom metric...\"\nCUSTOM_VALUE=$(your-command)\necho \"  \\\"custom_metric\\\": $CUSTOM_VALUE,\" >> \"$OUTPUT_FILE\"\n```\n\nEnsure JSON formatting is valid (commas, no trailing comma on last entry).\n\n## Notes\n\n- Metrics are stored as JSON for easy parsing and analysis\n- Scripts are designed to work without external dependencies (except scc/jq)\n- Git commit hash is automatically captured in each snapshot\n- Build metrics require clean build (slower but reproducible)\n\n## See Also\n\n- `../CLAUDE.md` - Full tooling guidelines including when to run metrics\n- `../bench/` - Benchmark suite source code\n"
  },
  {
    "path": "metrics/scripts/collect-metrics.sh",
    "content": "#!/usr/bin/env bash\n# Collect code metrics for morloc compiler\n# Usage: ./collect-metrics.sh [version-label]\n#\n# Requirements:\n#   - scc (lines of code): https://github.com/boyter/scc\n#     Install: go install github.com/boyter/scc/v3@latest\n#   - stack (Haskell build tool)\n#\n# This script collects:\n#   - Source lines of code (scc)\n#   - Module count and structure\n#   - Build time\n#   - Binary size\n#   - Benchmark results (if available)\n\nset -euo pipefail\n\nVERSION=\"${1:-$(date +%Y%m%d-%H%M%S)}\"\nMETRICS_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")/..\" && pwd)\"\nOUTPUT_FILE=\"$METRICS_DIR/baselines/${VERSION}.json\"\nREPORT_FILE=\"$METRICS_DIR/reports/${VERSION}.txt\"\n\necho \"=== Collecting metrics for version: $VERSION ===\"\necho \"Output: $OUTPUT_FILE\"\necho \"\"\n\n# Create output directories\nmkdir -p \"$METRICS_DIR/baselines\"\nmkdir -p \"$METRICS_DIR/reports\"\n\n# Start JSON output\necho \"{\" > \"$OUTPUT_FILE\"\necho \"  \\\"version\\\": \\\"$VERSION\\\",\" >> \"$OUTPUT_FILE\"\necho \"  \\\"timestamp\\\": \\\"$(date -Iseconds)\\\",\" >> \"$OUTPUT_FILE\"\necho \"  \\\"git_commit\\\": \\\"$(git rev-parse HEAD 2>/dev/null || echo 'unknown')\\\",\" >> \"$OUTPUT_FILE\"\n\n# ==== Source Lines of Code ====\necho \"Collecting source line counts with scc...\"\nif command -v scc &> /dev/null; then\n    scc library/ executable/ test-suite/ bench/ --format json --by-file > \"$METRICS_DIR/.scc-temp.json\"\n    echo \"  \\\"scc\\\": $(cat \"$METRICS_DIR/.scc-temp.json\"),\" >> \"$OUTPUT_FILE\"\n    rm \"$METRICS_DIR/.scc-temp.json\"\n\n    # Summary for report\n    scc library/ executable/ test-suite/ bench/ > \"$REPORT_FILE\"\nelse\n    echo \"  \\\"scc\\\": null,\" >> \"$OUTPUT_FILE\"\n    echo \"WARNING: scc not found. Install with: cargo install scc\" >&2\n    echo \"scc not installed\" > \"$REPORT_FILE\"\nfi\n\n# ==== Module Statistics ====\necho \"Collecting module statistics...\"\nHASKELL_FILES=$(find library/ -name \"*.hs\" | wc -l)\nEXECUTABLE_FILES=$(find executable/ -name \"*.hs\" | wc -l)\nTEST_FILES=$(find test-suite/ -name \"*.hs\" | wc -l)\nBENCH_FILES=$(find bench/ -name \"*.hs\" 2>/dev/null | wc -l || echo 0)\n\necho \"  \\\"modules\\\": {\" >> \"$OUTPUT_FILE\"\necho \"    \\\"library\\\": $HASKELL_FILES,\" >> \"$OUTPUT_FILE\"\necho \"    \\\"executable\\\": $EXECUTABLE_FILES,\" >> \"$OUTPUT_FILE\"\necho \"    \\\"test\\\": $TEST_FILES,\" >> \"$OUTPUT_FILE\"\necho \"    \\\"bench\\\": $BENCH_FILES\" >> \"$OUTPUT_FILE\"\necho \"  },\" >> \"$OUTPUT_FILE\"\n\necho \"\" >> \"$REPORT_FILE\"\necho \"=== Module Counts ===\" >> \"$REPORT_FILE\"\necho \"Library: $HASKELL_FILES\" >> \"$REPORT_FILE\"\necho \"Executable: $EXECUTABLE_FILES\" >> \"$REPORT_FILE\"\necho \"Test: $TEST_FILES\" >> \"$REPORT_FILE\"\necho \"Benchmark: $BENCH_FILES\" >> \"$REPORT_FILE\"\n\n# ==== Build Time ====\necho \"Measuring build time (clean build)...\"\nstack clean 2>/dev/null || true\nBUILD_START=$(date +%s)\nif stack build --force-dirty 2>&1 | tee \"$METRICS_DIR/.build-log.txt\"; then\n    BUILD_END=$(date +%s)\n    BUILD_TIME=$((BUILD_END - BUILD_START))\n    echo \"  \\\"build_time_seconds\\\": $BUILD_TIME,\" >> \"$OUTPUT_FILE\"\n\n    echo \"\" >> \"$REPORT_FILE\"\n    echo \"=== Build Time ===\" >> \"$REPORT_FILE\"\n    echo \"$BUILD_TIME seconds\" >> \"$REPORT_FILE\"\nelse\n    echo \"  \\\"build_time_seconds\\\": null,\" >> \"$OUTPUT_FILE\"\n    echo \"Build failed\" >> \"$REPORT_FILE\"\nfi\nrm -f \"$METRICS_DIR/.build-log.txt\"\n\n# ==== Binary Size ====\necho \"Measuring binary size...\"\nINSTALL_ROOT=$(stack path --local-install-root 2>/dev/null || echo \"\")\nif [ -n \"$INSTALL_ROOT\" ] && [ -f \"$INSTALL_ROOT/bin/morloc\" ]; then\n    BINARY_SIZE=$(stat -f%z \"$INSTALL_ROOT/bin/morloc\" 2>/dev/null || stat -c%s \"$INSTALL_ROOT/bin/morloc\" 2>/dev/null || echo 0)\n    BINARY_SIZE_MB=$(echo \"scale=2; $BINARY_SIZE / 1024 / 1024\" | bc)\n    echo \"  \\\"binary_size_bytes\\\": $BINARY_SIZE,\" >> \"$OUTPUT_FILE\"\n    echo \"  \\\"binary_size_mb\\\": $BINARY_SIZE_MB,\" >> \"$OUTPUT_FILE\"\n\n    echo \"\" >> \"$REPORT_FILE\"\n    echo \"=== Binary Size ===\" >> \"$REPORT_FILE\"\n    echo \"${BINARY_SIZE_MB} MB\" >> \"$REPORT_FILE\"\nelse\n    echo \"  \\\"binary_size_bytes\\\": null,\" >> \"$OUTPUT_FILE\"\n    echo \"  \\\"binary_size_mb\\\": null,\" >> \"$OUTPUT_FILE\"\nfi\n\n# ==== Benchmarks ====\necho \"Running benchmarks...\"\nif stack bench --benchmark-arguments '--csv '\"$METRICS_DIR/.bench-temp.csv\" 2>/dev/null; then\n    # Convert CSV to JSON array\n    echo \"  \\\"benchmarks\\\": [\" >> \"$OUTPUT_FILE\"\n    tail -n +2 \"$METRICS_DIR/.bench-temp.csv\" | while IFS=, read -r name mean meanLB meanUB stddev stddevLB stddevUB; do\n        echo \"    {\\\"name\\\": \\\"$name\\\", \\\"mean\\\": $mean, \\\"stddev\\\": $stddev},\" >> \"$OUTPUT_FILE\"\n    done\n    # Remove trailing comma from last entry\n    sed -i '$s/,$//' \"$OUTPUT_FILE\"\n    echo \"  ]\" >> \"$OUTPUT_FILE\"\n    rm -f \"$METRICS_DIR/.bench-temp.csv\"\n\n    echo \"\" >> \"$REPORT_FILE\"\n    echo \"=== Benchmarks ===\" >> \"$REPORT_FILE\"\n    cat \"$METRICS_DIR/.bench-temp.csv\" >> \"$REPORT_FILE\" 2>/dev/null || echo \"No benchmark results\" >> \"$REPORT_FILE\"\nelse\n    echo \"  \\\"benchmarks\\\": []\" >> \"$OUTPUT_FILE\"\n    echo \"\" >> \"$REPORT_FILE\"\n    echo \"=== Benchmarks ===\" >> \"$REPORT_FILE\"\n    echo \"Benchmarks not available or failed\" >> \"$REPORT_FILE\"\nfi\n\n# Close JSON\necho \"}\" >> \"$OUTPUT_FILE\"\n\necho \"\"\necho \"=== Metrics collection complete ===\"\necho \"Results saved to: $OUTPUT_FILE\"\necho \"Report saved to: $REPORT_FILE\"\necho \"\"\necho \"To compare with another version:\"\necho \"  ./metrics/scripts/compare-metrics.sh baseline.json current.json\"\n"
  },
  {
    "path": "metrics/scripts/compare-metrics.sh",
    "content": "#!/usr/bin/env bash\n# Compare two metric snapshots\n# Usage: ./compare-metrics.sh <baseline.json> <current.json>\n\nset -euo pipefail\n\nif [ $# -ne 2 ]; then\n    echo \"Usage: $0 <baseline.json> <current.json>\"\n    echo \"\"\n    echo \"Example:\"\n    echo \"  $0 metrics/baselines/v0.59.0.json metrics/baselines/v0.60.0.json\"\n    exit 1\nfi\n\nBASELINE=\"$1\"\nCURRENT=\"$2\"\n\nif [ ! -f \"$BASELINE\" ]; then\n    echo \"Error: Baseline file not found: $BASELINE\"\n    exit 1\nfi\n\nif [ ! -f \"$CURRENT\" ]; then\n    echo \"Error: Current file not found: $CURRENT\"\n    exit 1\nfi\n\n# Helper function to extract JSON values\nget_value() {\n    local file=\"$1\"\n    local path=\"$2\"\n\n    # Try with jq if available, otherwise grep\n    if command -v jq &> /dev/null; then\n        jq -r \"$path // \\\"null\\\"\" \"$file\" 2>/dev/null || echo \"null\"\n    else\n        # Fallback to grep (less reliable)\n        grep \"\\\"${path//./}\\\"\" \"$file\" | sed 's/.*: \\(.*\\),\\?/\\1/' | tr -d '\"' || echo \"null\"\n    fi\n}\n\n# Helper function to calculate percentage change\npct_change() {\n    local old=\"$1\"\n    local new=\"$2\"\n\n    if [ \"$old\" = \"null\" ] || [ \"$new\" = \"null\" ]; then\n        echo \"N/A\"\n        return\n    fi\n\n    # Use bc for floating point arithmetic\n    local change=$(echo \"scale=2; (($new - $old) / $old) * 100\" | bc 2>/dev/null || echo \"N/A\")\n\n    if [ \"$change\" = \"N/A\" ]; then\n        echo \"N/A\"\n    else\n        echo \"${change}%\"\n    fi\n}\n\n# Helper to format change with color (if tput available)\nformat_change() {\n    local pct=\"$1\"\n    local inverse=\"${2:-false}\"  # If true, negative is good\n\n    if [ \"$pct\" = \"N/A\" ]; then\n        echo \"$pct\"\n        return\n    fi\n\n    local num=$(echo \"$pct\" | tr -d '%')\n    local color=\"\"\n\n    if command -v tput &> /dev/null; then\n        local green=$(tput setaf 2)\n        local red=$(tput setaf 1)\n        local reset=$(tput sgr0)\n\n        if [ \"$inverse\" = \"true\" ]; then\n            # For things like build time where decrease is good\n            if (( $(echo \"$num < 0\" | bc -l) )); then\n                color=\"$green\"\n            elif (( $(echo \"$num > 0\" | bc -l) )); then\n                color=\"$red\"\n            fi\n        else\n            # For things like SLOC where increase might be concerning\n            if (( $(echo \"$num > 10\" | bc -l) )); then\n                color=\"$red\"\n            elif (( $(echo \"$num < -10\" | bc -l) )); then\n                color=\"$green\"\n            fi\n        fi\n\n        echo \"${color}${pct}${reset}\"\n    else\n        echo \"$pct\"\n    fi\n}\n\necho \"==================================\"\necho \"Morloc Metrics Comparison\"\necho \"==================================\"\necho \"\"\necho \"Baseline: $BASELINE\"\necho \"Current:  $CURRENT\"\necho \"\"\n\n# Extract versions\nBASE_VER=$(get_value \"$BASELINE\" \".version\")\nCURR_VER=$(get_value \"$CURRENT\" \".version\")\necho \"Comparing: $BASE_VER -> $CURR_VER\"\necho \"\"\n\n# ==== Source Lines ====\necho \"--- Source Lines (from scc) ---\"\nif command -v jq &> /dev/null; then\n    BASE_LINES=$(jq -r '.scc[] | select(.Name == \"Total\") | .Code // 0' \"$BASELINE\" 2>/dev/null || echo \"0\")\n    CURR_LINES=$(jq -r '.scc[] | select(.Name == \"Total\") | .Code // 0' \"$CURRENT\" 2>/dev/null || echo \"0\")\n    BASE_COMMENTS=$(jq -r '.scc[] | select(.Name == \"Total\") | .Comments // 0' \"$BASELINE\" 2>/dev/null || echo \"0\")\n    CURR_COMMENTS=$(jq -r '.scc[] | select(.Name == \"Total\") | .Comments // 0' \"$CURRENT\" 2>/dev/null || echo \"0\")\nelse\n    BASE_LINES=\"N/A\"\n    CURR_LINES=\"N/A\"\n    BASE_COMMENTS=\"N/A\"\n    CURR_COMMENTS=\"N/A\"\nfi\n\necho \"Code Lines:    $BASE_LINES -> $CURR_LINES  ($(format_change \"$(pct_change \"$BASE_LINES\" \"$CURR_LINES\")\"))\"\necho \"Comment Lines: $BASE_COMMENTS -> $CURR_COMMENTS  ($(format_change \"$(pct_change \"$BASE_COMMENTS\" \"$CURR_COMMENTS\")\" true))\"\necho \"\"\n\n# ==== Modules ====\necho \"--- Module Counts ---\"\nBASE_MODS=$(get_value \"$BASELINE\" \".modules.library\")\nCURR_MODS=$(get_value \"$CURRENT\" \".modules.library\")\necho \"Library Modules: $BASE_MODS -> $CURR_MODS  ($(pct_change \"$BASE_MODS\" \"$CURR_MODS\"))\"\necho \"\"\n\n# ==== Build Time ====\necho \"--- Build Performance ---\"\nBASE_TIME=$(get_value \"$BASELINE\" \".build_time_seconds\")\nCURR_TIME=$(get_value \"$CURRENT\" \".build_time_seconds\")\necho \"Build Time: ${BASE_TIME}s -> ${CURR_TIME}s  ($(format_change \"$(pct_change \"$BASE_TIME\" \"$CURR_TIME\")\" true))\"\necho \"\"\n\n# ==== Binary Size ====\necho \"--- Binary Size ---\"\nBASE_SIZE=$(get_value \"$BASELINE\" \".binary_size_mb\")\nCURR_SIZE=$(get_value \"$CURRENT\" \".binary_size_mb\")\necho \"Binary Size: ${BASE_SIZE}MB -> ${CURR_SIZE}MB  ($(format_change \"$(pct_change \"$BASE_SIZE\" \"$CURR_SIZE\")\" true))\"\necho \"\"\n\n# ==== Benchmarks ====\necho \"--- Benchmarks ---\"\nif command -v jq &> /dev/null; then\n    # Compare benchmark means\n    jq -r '.benchmarks[]? | .name' \"$CURRENT\" 2>/dev/null | while read -r bench_name; do\n        BASE_MEAN=$(jq -r \".benchmarks[]? | select(.name == \\\"$bench_name\\\") | .mean // null\" \"$BASELINE\" 2>/dev/null)\n        CURR_MEAN=$(jq -r \".benchmarks[]? | select(.name == \\\"$bench_name\\\") | .mean // null\" \"$CURRENT\" 2>/dev/null)\n\n        if [ \"$BASE_MEAN\" != \"null\" ] && [ \"$CURR_MEAN\" != \"null\" ]; then\n            echo \"$bench_name: ${BASE_MEAN} -> ${CURR_MEAN}  ($(format_change \"$(pct_change \"$BASE_MEAN\" \"$CURR_MEAN\")\" true))\"\n        else\n            echo \"$bench_name: New benchmark\"\n        fi\n    done\nelse\n    echo \"Install jq for detailed benchmark comparison\"\nfi\necho \"\"\n\n# ==== Summary ====\necho \"==================================\"\necho \"Summary\"\necho \"==================================\"\necho \"\"\necho \"Key changes:\"\n\n# Highlight significant changes (>10%)\nif [ \"$BASE_LINES\" != \"N/A\" ] && [ \"$CURR_LINES\" != \"N/A\" ]; then\n    LINES_PCT=$(pct_change \"$BASE_LINES\" \"$CURR_LINES\" | tr -d '%')\n    if (( $(echo \"${LINES_PCT#-} > 10\" | bc -l 2>/dev/null || echo 0) )); then\n        echo \"  - Code size changed by $(format_change \"$(pct_change \"$BASE_LINES\" \"$CURR_LINES\")\")\"\n    fi\nfi\n\nif [ \"$BASE_TIME\" != \"null\" ] && [ \"$CURR_TIME\" != \"null\" ]; then\n    TIME_PCT=$(pct_change \"$BASE_TIME\" \"$CURR_TIME\" | tr -d '%')\n    if (( $(echo \"${TIME_PCT#-} > 10\" | bc -l 2>/dev/null || echo 0) )); then\n        echo \"  - Build time changed by $(format_change \"$(pct_change \"$BASE_TIME\" \"$CURR_TIME\")\" true)\"\n    fi\nfi\n\necho \"\"\necho \"For detailed line-by-line comparison, use jq:\"\necho \"  diff <(jq . $BASELINE) <(jq . $CURRENT)\"\n"
  },
  {
    "path": "package.yaml",
    "content": "name:             morloc\nversion:          0.81.0\nhomepage:         https://github.com/morloc-project/morloc\nsynopsis:         A multi-lingual, typed, workflow language\ndescription: |\n  Morloc is a typed, multi-lingual workflow language that composes functions\n  across Python, C++, R, and other languages under a unified type system.\n  The compiler generates interprocess communication code, serialization\n  logic, and a dispatch binary from a single morloc source file.\n  See the GitHub README <https://github.com/morloc-project/morloc#readme>\n  for full documentation.\ncategory:         Language, Compiler, Code Generation\ntested-with:      GHC == 9.6.6\nlicense:          Apache-2.0\nlicense-file:     LICENSE\nauthor:           \"Zebulun Arendsee\" \nmaintainer:       \"z@morloc.io\"\ngithub:           \"morloc-project/morloc\"\nbug-reports:      https://github.com/morloc-project/morloc/issues\ncopyright:        \"2026 Zebulun Arendsee\"\n\nextra-source-files:\n  - README.md\n  - ChangeLog.md\n  # libraries\n  - data/lang/cpp/pool.cpp\n  # universal c header\n  - data/morloc/morloc.h\n  # shared language config\n  - data/lang/languages.yaml\n  # per-language files\n  - data/lang/c/lang.yaml\n  - data/lang/cpp/lang.yaml\n  - data/lang/cpp/cppmorloc.hpp\n  - data/lang/cpp/cppmorloc.cpp\n  - data/lang/cpp/morloc_pch.hpp\n  - data/lang/cpp/pool.cpp\n  - data/lang/cpp/init.sh\n  - data/lang/py/lang.yaml\n  - data/lang/py/pymorloc.c\n  - data/lang/py/setup.py\n  - data/lang/py/Makefile\n  - data/lang/py/pool.py\n  - data/lang/py/init.sh\n  - data/lang/r/lang.yaml\n  - data/lang/r/rmorloc.c\n  - data/lang/r/pool.R\n  - data/lang/r/init.sh\n  - data/lang/julia/juliabridge.c\n  - data/lang/julia/MorlocRuntime.jl\n  - data/lang/julia/lang.yaml\n  - data/lang/julia/pool.jl\n  - data/lang/julia/init.sh\n\ndefault-extensions:\n  - TypeOperators\n\ndependencies:\n  - base\n  - aeson\n  - binary\n  - array\n  - bytestring\n  - containers\n  - directory\n  - directory-tree\n  - extra\n  - filepath\n  - file-embed\n  - haskell-src-meta\n  - http-conduit\n  - http-types\n  - mtl\n  - parsec\n  - partial-order\n  - pretty-simple\n  - prettyprinter\n  - process\n  - raw-strings-qq\n  - safe\n  - scientific\n  - template-haskell\n  - text\n  - time\n  - unordered-containers\n  - vector\n  - yaml\n  - zip-archive\n\nghc-options:\n  - -Wall\n  - -Wcompat\n  - -fwarn-unused-binds\n  - -fwarn-unused-imports \n  - -fwarn-tabs\n  - -fwarn-incomplete-uni-patterns\n  - -fwarn-incomplete-record-updates\n  - -fwarn-redundant-constraints\n  - -fno-warn-missing-signatures\n  - -fno-warn-unused-do-bind\n  - -fno-warn-orphans\n  - -haddock\n\nlibrary:\n  source-dirs: library\n  build-tools:\n    - happy\n\nexecutables:\n  morloc:\n    main:          Main.hs\n    source-dirs:   executable\n    ghc-options:\n      - -Wall\n      - -threaded\n      - -rtsopts\n      - -with-rtsopts=-N\n      - -O2\n      - -haddock\n    dependencies:\n      - morloc\n      - base\n      - optparse-applicative\n      - temporary\n      - text\n\n  morloc-codegen-generic:\n    main:          Main.hs\n    source-dirs:   exe/morloc-codegen-generic\n    ghc-options:\n      - -Wall\n      - -O2\n    dependencies:\n      - morloc\n      - base\n      - aeson\n      - binary\n      - bytestring\n      - text\n      - yaml\n\ntests:\n  morloc-test:\n    main:          Main.hs\n    source-dirs:   test-suite\n    ghc-options:\n      - -threaded\n      - -rtsopts\n      - -with-rtsopts=-N\n    dependencies:\n      - morloc\n      - base\n      - QuickCheck\n      - tasty\n      - tasty-golden\n      - tasty-hunit\n      - tasty-quickcheck\n\n  morloc-integration-test:\n    main:          Main.hs\n    source-dirs:   test-suite/integration\n    ghc-options:\n      - -threaded\n      - -rtsopts\n      - -with-rtsopts=-N\n    dependencies:\n      - base\n      - async\n      - directory\n      - filepath\n      - process\n      - tasty\n      - tasty-hunit\n      - temporary\n      - text\n      - time\n\nbenchmarks:\n  morloc-bench:\n    main:          Bench.hs\n    source-dirs:   bench\n    ghc-options:\n      - -threaded\n      - -rtsopts\n      - -with-rtsopts=-N\n      - -O2\n    dependencies:\n      - morloc\n      - base\n      - tasty-bench\n      - text\n      - filepath\n"
  },
  {
    "path": "scripts/build-rust.sh",
    "content": "#!/usr/bin/env bash\n# Build static Rust binaries and/or container images locally.\n#\n# Usage:\n#   ./scripts/build-rust.sh rust       Build static binaries to out/\n#   ./scripts/build-rust.sh tiny       Build morloc-tiny container\n#   ./scripts/build-rust.sh full       Build morloc-full container (requires tiny)\n#   ./scripts/build-rust.sh all        Build binaries + both containers\n#   ./scripts/build-rust.sh export     Export morloc-full image to tarball\n#\n# Environment:\n#   MORLOC_VERSION   Container image tag (default: edge)\nset -euo pipefail\n\ncd \"$(dirname \"$0\")/..\"\n\nMORLOC_VERSION=\"${MORLOC_VERSION:-edge}\"\n\ncmd_rust() {\n    echo \"=== Building static Rust binaries ===\"\n    podman build -t morloc-rust-build -f container/static-build/Dockerfile .\n    mkdir -p out\n    podman run --rm -v \"$(pwd)/out:/out\" morloc-rust-build\n    echo \"=== Output in out/ ===\"\n    ls -lh out/\n}\n\ncmd_tiny() {\n    echo \"=== Building morloc-tiny:${MORLOC_VERSION} ===\"\n    make -C container MORLOC_VERSION=\"$MORLOC_VERSION\" build-tiny\n}\n\ncmd_full() {\n    echo \"=== Building morloc-full:${MORLOC_VERSION} ===\"\n    make -C container MORLOC_VERSION=\"$MORLOC_VERSION\" build-full\n}\n\ncmd_all() {\n    cmd_rust\n    cmd_tiny\n    cmd_full\n}\n\ncmd_export() {\n    local tarball=\"/tmp/morloc-full-${MORLOC_VERSION}.tar\"\n    echo \"=== Exporting morloc-full:${MORLOC_VERSION} to ${tarball} ===\"\n    podman save \"ghcr.io/morloc-project/morloc/morloc-full:${MORLOC_VERSION}\" -o \"$tarball\"\n    ls -lh \"$tarball\"\n}\n\nusage() {\n    echo \"Usage: $(basename \"$0\") <command>\"\n    echo \"\"\n    echo \"Commands:\"\n    echo \"  rust     Build static Rust binaries (morloc-manager, morloc-nexus, libmorloc.so)\"\n    echo \"  tiny     Build morloc-tiny container\"\n    echo \"  full     Build morloc-full container (requires tiny)\"\n    echo \"  all      Build everything (binaries + containers)\"\n    echo \"  export   Export morloc-full image to /tmp/ tarball\"\n    echo \"\"\n    echo \"Environment:\"\n    echo \"  MORLOC_VERSION=edge  (default)\"\n}\n\ncase \"${1:-}\" in\n    rust)   cmd_rust ;;\n    tiny)   cmd_tiny ;;\n    full)   cmd_full ;;\n    all)    cmd_all ;;\n    export) cmd_export ;;\n    -h|--help|\"\")\n        usage\n        exit 0\n        ;;\n    *)\n        echo \"Unknown command: $1\" >&2\n        usage >&2\n        exit 1\n        ;;\nesac\n"
  },
  {
    "path": "scripts/bump-version.sh",
    "content": "#!/usr/bin/env bash\n# Sync version from ChangeLog.md to package.yaml and Cargo.toml files.\n# Usage: after adding a new ChangeLog.md entry, run:\n#   ./scripts/bump-version.sh\nset -euo pipefail\n\ncd \"$(dirname \"$0\")/..\"\n\nif [[ -n \"${1:-}\" ]]; then\n  VERSION=\"$1\"\nelse\n  VERSION=$(head -1 ChangeLog.md | grep -oP '^\\d+\\.\\d+\\.\\d+[^ ]*')\n  if [[ -z \"$VERSION\" ]]; then\n    echo \"Error: could not parse version from first line of ChangeLog.md\" >&2\n    echo \"Expected format: X.Y.Z [YYYY-MM-DD]\" >&2\n    exit 1\n  fi\nfi\n\necho \"Syncing version $VERSION ...\"\n\nsed -i \"s/^version:          .*/version:          $VERSION/\" package.yaml\necho \"  updated package.yaml\"\n\n# Regenerate morloc.cabal from package.yaml\nif command -v hpack >/dev/null 2>&1; then\n  hpack\nelif stack exec -- hpack --version >/dev/null 2>&1; then\n  stack exec -- hpack\nelse\n  # Direct sed fallback: update the version line in morloc.cabal\n  sed -i \"s/^version:        .*/version:        $VERSION/\" morloc.cabal\nfi\necho \"  updated morloc.cabal\"\n\nfor f in data/rust/morloc-{nexus,manifest,runtime}/Cargo.toml; do\n  sed -i \"s/^version = \\\".*\\\"/version = \\\"$VERSION\\\"/\" \"$f\"\n  echo \"  updated $f\"\ndone\n\necho \"Done. Verify with: git diff\"\n"
  },
  {
    "path": "spec/SPEC.md",
    "content": "# Morloc Language Specification\n\nThis document is the master table of contents for the Morloc formal specification. It describes the language, its type system, its runtime behavior, and its compiler architecture.\n\n## Conventions\n\n- Syntax examples use `morloc` code blocks\n- Type rules use standard inference notation\n- Cross-references use `[[file]]` links\n- This spec describes *what* the language does, not how the compiler implements it\n\n## Table of Contents\n\n### Language Reference\n\nSurface syntax and semantics of morloc programs.\n\n- [[language/LANGUAGE.md]] -- Overview: design philosophy and core concepts\n- [[language/lexical-structure.md]] -- Tokens, literals, comments, whitespace, indentation\n- [[language/expressions.md]] -- Application, lambda, composition, where-clauses, accessors\n- [[language/declarations.md]] -- Functions, type aliases, records, typeclasses, instances\n- [[language/operators.md]] -- Fixity declarations, precedence, and associativity\n- [[language/annotations.md]] -- Docstrings and CLI argument metadata tags\n\n### Type System\n\nThe formal type system underlying morloc's multi-language guarantees.\n\n- [[types/TYPES.md]] -- Overview: two-level design and role in cross-language safety\n- [[types/primitive-types.md]] -- Built-in types and sized variants\n- [[types/type-declarations.md]] -- Type aliases, language-specific mappings, terminal types\n- [[types/records.md]] -- Record, object, and table types\n- [[types/typeclasses.md]] -- Typeclass declarations, instances, and constraint resolution\n- [[types/polymorphism.md]] -- Parametric polymorphism and quantification\n- [[types/subtyping.md]] -- Subtyping rules and instantiation\n- [[types/inference.md]] -- Bidirectional type inference\n\n### Module System\n\nHow morloc code is organized, shared, and resolved.\n\n- [[modules/MODULES.md]] -- Overview: namespaces, planes, and the dependency DAG\n- [[modules/imports-and-exports.md]] -- Module declarations, imports, exports, visibility\n- [[modules/resolution.md]] -- Path resolution: local, system, and plane lookups\n- [[modules/packages.md]] -- Package metadata, versioning, and installation\n\n### Cross-Language Interoperability\n\nMechanisms enabling function composition across Python, C++, and R.\n\n- [[interop/INTEROP.md]] -- Overview: unifying multiple languages under one type system\n- [[interop/foreign-functions.md]] -- Source declarations, function binding, renaming\n- [[interop/type-mappings.md]] -- General-to-concrete type resolution per language\n- [[interop/serialization.md]] -- Msgpack protocol, schema encoding, packing rules\n- [[interop/implementation-selection.md]] -- Realization algorithm and language selection\n\n### Runtime System\n\nHow compiled morloc programs execute.\n\n- [[runtime/RUNTIME.md]] -- Overview: the nexus-pool model\n- [[runtime/execution-model.md]] -- Nexus lifecycle, pool management, dispatch, errors\n- [[runtime/ipc.md]] -- Unix socket protocol, message format, data flow\n- [[runtime/manifest.md]] -- JSON manifest schema\n- [[runtime/cli.md]] -- Automatic CLI generation from type signatures\n\n### Compiler Architecture\n\nThe compilation pipeline from source to executable.\n\n- [[compiler/COMPILER.md]] -- Overview: stages, IRs, and design principles\n- [[compiler/pipeline.md]] -- End-to-end compilation flow\n- [[compiler/parsing.md]] -- Lexing, tokenization, expression parsing\n- [[compiler/typechecking.md]] -- Bidirectional inference, subtyping, context threading\n- [[compiler/codegen.md]] -- Two-phase lower-then-print model and per-language translators\n- [[compiler/build.md]] -- Manifest writing, pool generation, compilation, deployment\n"
  },
  {
    "path": "spec/compiler/COMPILER.md",
    "content": "# Compiler Architecture\n\nThe morloc compiler transforms `.loc` source files into executable programs consisting of a nexus binary, a JSON manifest, and language-specific pool files. The compilation pipeline is structured as a sequence of transformations over progressively lower-level intermediate representations.\n\n## Design Principles\n\n- **Separation of concerns.** Parsing, type checking, realization, code generation, and building are distinct phases with well-defined interfaces.\n- **Language-parametric code generation.** A shared imperative IR and a configurable lowering engine allow new target languages to be added by supplying a configuration record and a printer, without modifying the core pipeline.\n- **Compile-time language selection.** The compiler resolves which language implements each function before any code is generated. There is no runtime dispatch logic for language selection.\n\n## Intermediate Representations\n\n| IR | Phase | Description |\n|----|-------|-------------|\n| `ExprI` | Parsing | Indexed expression AST with unique IDs per node |\n| `AnnoS TypeU` | Type checking | Annotated syntax tree with general types |\n| `AnnoS Type Lang` | Realization | Annotated tree with concrete types and language tags |\n| `PolyExpr` | Parameterize/Express | Polymorphic manifold tree |\n| `MonoExpr` | Segment | Monomorphic per-language tree with boundary markers |\n| `SerialExpr` | Serialize | Tree with serialization/deserialization operations |\n| `IStmt`/`IExpr` | Lower | Shared imperative IR |\n| `MDoc` | Print | Rendered source text |\n\n## Subfiles\n\n- [[pipeline.md]] -- End-to-end compilation flow\n- [[parsing.md]] -- Lexing, tokenization, expression parsing\n- [[typechecking.md]] -- Bidirectional inference, subtyping, context threading\n- [[codegen.md]] -- Lower-then-print model and per-language translators\n- [[build.md]] -- Manifest writing, pool generation, compilation, deployment\n"
  },
  {
    "path": "spec/compiler/build.md",
    "content": "# Build System\n\nThe build phase writes generated code to disk, compiles what needs compiling, and assembles the final executable program.\n\n## System Initialization\n\nBefore any program can be built, the runtime must be initialized with `morloc init`. This one-time step compiles:\n\n1. **libmorloc.so** -- shared C library providing socket communication, shared memory, msgpack serialization, and error handling.\n2. **morloc-nexus** -- static C binary (the nexus), linked against libmorloc.\n3. **libcppmorloc.a** -- static C++ library with template-based serialization.\n4. **pymorloc** -- Python C extension for msgpack serialization.\n5. **rmorloc** -- R C extension for msgpack serialization.\n\nThese artifacts are installed to `~/.local/share/morloc/`:\n\n```\nbin/morloc-nexus        -- pre-compiled nexus binary\ninclude/morloc.h        -- C runtime header\ninclude/cppmorloc.hpp   -- C++ serialization header\nlib/libmorloc.so        -- C runtime library\nlib/libcppmorloc.a      -- C++ serialization library\nlib/libpymorloc.so      -- Python C extension\nlib/librmorloc.so       -- R C extension\n```\n\n## Program Build Process\n\nWhen the user runs `morloc make -o foo script.loc`, the build phase:\n\n1. **Writes the manifest**: `foo.manifest` (JSON file).\n2. **Copies the nexus binary**: copies `~/.local/share/morloc/bin/morloc-nexus` to `./foo`.\n3. **Writes pool source files**: `pool.py`, `pool.cpp`, `pool.R` as needed.\n4. **Compiles C++ pools**: invokes the C++ compiler to produce `pool-cpp.out`.\n5. **Sets permissions**: marks interpreted pools and the nexus as executable.\n\n## Output Structure\n\nAfter a successful build:\n\n```\n./foo                  -- nexus binary (copy of morloc-nexus)\n./foo.manifest         -- JSON manifest\n./pool.py              -- Python pool (if Python functions used)\n./pool.cpp             -- C++ pool source (if C++ functions used)\n./pool-cpp.out         -- compiled C++ pool\n./pool.R               -- R pool (if R functions used)\n```\n\n## Compilation Commands\n\n### C++ Pools\n\n```\ng++ -O2 --std=c++17 -o pool-cpp.out pool.cpp \\\n  -I~/.local/share/morloc/include \\\n  -L~/.local/share/morloc/lib \\\n  -lmorloc -lcppmorloc -lpthread\n```\n\nThe C++ compiler, optimization level, and standard are configurable. The pool links against both libmorloc (for socket communication) and libcppmorloc (for serialization).\n\n### Python Pools\n\nNo compilation. The generated `pool.py` is an executable script that imports `pymorloc` for serialization and the user's source modules for function implementations.\n\n### R Pools\n\nNo compilation. The generated `pool.R` is an executable script that loads `rmorloc` via `dyn.load` and sources the user's R files.\n\n## Runtime Directory\n\nAt execution time, the nexus creates a temporary directory for socket files:\n\n```\n/tmp/morloc.XXXXXX/\n  pipe-py             -- Python pool socket\n  pipe-cpp            -- C++ pool socket\n  pipe-r              -- R pool socket\n```\n\nThis directory is cleaned up when the program exits.\n\n## Dependencies\n\n| Requirement | When Needed |\n|-------------|-------------|\n| C compiler (gcc/clang) | `morloc init` only |\n| C++ compiler (g++/clang++) | Programs using C++ functions |\n| Python 3 | Programs using Python functions |\n| R | Programs using R functions |\n| libmorloc.so | Always (runtime communication) |\n\n## Error Handling\n\nThe build system reports:\n- Missing compilers with actionable error messages.\n- Compilation failures with the compiler's full output.\n- Missing libraries or headers with paths that were searched.\n"
  },
  {
    "path": "spec/compiler/codegen.md",
    "content": "# Code Generation\n\nCode generation transforms the type-checked, realized program into a JSON manifest and language-specific pool source files. It uses a two-phase architecture -- lower to a shared imperative IR, then print to target language syntax -- to maximize code reuse across languages.\n\n## Sub-Phases\n\nCode generation proceeds through five sub-phases:\n\n### 1. Parameterize\n\nThread function arguments through expression trees. Each manifold (a callable unit in the generated code) receives its argument list explicitly. Unused arguments are removed.\n\n### 2. Express\n\nCreate polymorphic manifold trees that represent the program's call structure. Each manifold captures a function, its arguments, and its body. Lambdas, lists, tuples, and records are normalized into a uniform manifold representation.\n\n### 3. Segment\n\nBreak polymorphic trees at language boundaries. Each manifold is assigned to a specific language. Where a manifold in language A calls a function in language B, a boundary marker is inserted. The result is a set of monomorphic, per-language manifold trees.\n\nManifold forms after segmentation:\n\n| Form | Description |\n|------|-------------|\n| LocalRoot | Entry point callable from the nexus |\n| LocalForeign | Calls a function in another pool |\n| RemoteWorker | Called by another pool via the nexus |\n\n### 4. Serialize\n\nInsert serialization and deserialization operations at every language boundary. For each cross-language call, the compiler:\n\n1. Determines the msgpack schema for each argument and the return value.\n2. Wraps the caller's output in a serialize call.\n3. Wraps the callee's input in a deserialize call.\n\nThe result is a tree where every value crossing a boundary is explicitly packed/unpacked.\n\n### 5. Emit\n\nTranslate each serialized manifold tree into target language source code. This phase uses the two-phase lower-then-print architecture described below.\n\n## Two-Phase Architecture\n\n### Lowering\n\nThe lowering phase converts the compiler's manifold AST into a shared imperative IR (`IStmt` / `IExpr`). The IR supports:\n\n- Variable assignment\n- Function calls (local and remote)\n- Serialize/deserialize calls\n- List/map iteration\n- Lambda expressions\n- Return statements\n\nLowering is parameterized by a configuration record (`LowerConfig`) with approximately 30 fields covering:\n\n| Category | Examples |\n|----------|---------|\n| Type rendering | How to render type names, template parameters |\n| Accessors | Record field access, tuple indexing syntax |\n| Constructors | List, tuple, record literal syntax |\n| Calls | Local function calls, remote pool calls |\n| Serialization | Pack/unpack function names and calling conventions |\n| Statements | Let binding, return, function definition syntax |\n\nA single generic fold (`defaultFoldRules`) walks the AST and produces the IR using the language's `LowerConfig`. This fold is written once and shared by all languages.\n\n### Printing\n\nThe printing phase converts the imperative IR into rendered source text. Each language provides:\n\n- `printExpr`: render an IR expression as source text.\n- `printStmt`: render an IR statement as source text.\n- `printProgram`: assemble a complete pool source file from rendered fragments.\n\n## Per-Language Specifics\n\n### C++\n\n- **Stateful**: uses a monadic translator that tracks generated struct types, forward declarations, and serializer functions.\n- **Type-aware**: `LowerConfig` type fields produce concrete C++ type strings (used for variable declarations, template parameters).\n- **Struct generation**: anonymous record types produce `struct` definitions and corresponding serializer/deserializer functions.\n\n### Python\n\n- **Stateless**: uses a pure translator with only a counter for unique names.\n- **No types**: `LowerConfig` type fields return nothing (Python is dynamically typed).\n- **Import handling**: source file paths are converted to Python import statements.\n\n### R\n\n- **Stateless**: similar to Python.\n- **R conventions**: 1-indexed access, `c()` for vectors, `$` for field access, `list()` for records.\n\n## Pool Template Assembly\n\nEach language has a pool template with `<<<BREAK>>>` markers. The code generator splits the template at these markers and interleaves generated code:\n\n```\n[template header]\n[generated imports]\n[template middle]\n[generated function definitions]\n[template dispatch section]\n[generated dispatch table]\n[template footer]\n```\n\nThe result is a complete, self-contained pool source file.\n\n## Manifest Generation\n\nIn parallel with pool generation, the compiler produces a JSON manifest describing:\n\n- Pool definitions (language, executable command, socket name)\n- Command definitions (subcommand name, manifold ID, pool index, schemas)\n- Argument metadata (kind, metavar, type, default, description)\n- Pure expression trees (for commands that need no pool)\n\nSee [[../runtime/manifest.md]].\n"
  },
  {
    "path": "spec/compiler/parsing.md",
    "content": "# Parsing\n\nThe parser transforms morloc source text into an indexed expression AST. It uses parser combinators with stateful tracking of fixity tables, expression indices, and indentation.\n\n## Lexical Analysis\n\nThe lexer and parser are combined in a single pass using parser combinators (Megaparsec). There is no separate tokenization step; the parser directly consumes characters and builds AST nodes.\n\nKey lexer features:\n\n- **Indentation tracking**: the parser maintains a minimum indentation position. Continuation lines must be indented beyond the start of their expression.\n- **Docstring recognition**: `--'` comments are captured as documentation and attached to subsequent declarations.\n- **String interpolation**: `#{}` within string literals introduces embedded expressions.\n- **Number formats**: decimal, hexadecimal (`0x`), octal (`0o`), and binary (`0b`) integer literals; decimal and scientific floating-point literals.\n\n## Parser State\n\nThe parser carries mutable state through parsing:\n\n| Field | Purpose |\n|-------|---------|\n| Fixity table | Maps operator names to (associativity, precedence) |\n| Expression index | Counter for assigning unique IDs to AST nodes |\n| Variable index | Counter for generating fresh variable names |\n| Generics | Set of type variables in the current generic scope |\n| Module config | Settings specific to the current module |\n| Source positions | Maps expression IDs to source locations (for error reporting) |\n\n## Top-Level Forms\n\nThe parser recognizes these top-level constructs:\n\n1. **Module declaration**: `module name (exports)`\n2. **Import**: `import name (terms)`\n3. **Type signature**: `f a b :: Type`\n4. **Function definition**: `f x y = body`\n5. **Type alias**: `type Name = Type` or `type Lang => Name = \"concrete\"`\n6. **Record/object/table declaration**: `record Name where ...`\n7. **Typeclass declaration**: `class Name a where ...`\n8. **Instance declaration**: `instance Name Type where ...`\n9. **Fixity declaration**: `infixl 6 +, -`\n10. **Source declaration**: `source Lang from \"file\" (\"names\")`\n\n## Expression Parsing\n\nExpressions are parsed by a layered grammar:\n\n1. **Atoms**: literals, variables, parenthesized expressions, lists, tuples, records.\n2. **Application**: juxtaposition of atoms (left-associative).\n3. **Infix expressions**: operators interleaved with applications, resolved by precedence climbing.\n4. **Lambda**: `\\vars -> body`\n5. **Where clauses**: `expr where bindings`\n\n## Precedence Climbing\n\nInfix operator parsing uses the precedence climbing algorithm (a form of Pratt parsing):\n\n1. Parse a left-hand operand (application or atom).\n2. Look ahead for an operator. If its precedence is below the current minimum, stop.\n3. Consume the operator. Determine the minimum precedence for the right operand:\n   - Left-associative: current precedence + 1\n   - Right-associative: current precedence\n   - Non-associative: current precedence + 1\n4. Recursively parse the right operand at the new minimum.\n5. Build an application node: `op(lhs, rhs)`.\n6. Loop back to step 2 with the application as the new left-hand side.\n\nAll infix expressions desugar to prefix function application: `a + b` becomes `(+) a b`.\n\n## Indexed Expressions\n\nEvery AST node is wrapped in an `ExprI` that pairs it with a unique integer index:\n\n```\nExprI index Expr\n```\n\nThese indices serve as keys for attaching metadata (source positions, type annotations, language tags) in later phases without modifying the AST structure.\n\n## Module Parsing\n\nParsing is recursive over the module dependency graph:\n\n1. Parse the entry-point file.\n2. For each import, resolve the module path, parse it (accumulating state), and add it to the DAG.\n3. Continue until all transitive imports are parsed.\n\nThe resulting DAG, combined with the accumulated parser state, is the input to the link phase.\n"
  },
  {
    "path": "spec/compiler/pipeline.md",
    "content": "# Compilation Pipeline\n\nThe morloc compiler transforms source files into an executable through seven major phases. Each phase produces an intermediate representation consumed by the next.\n\n## Phase Summary\n\n```\n.loc files\n  |  1. Parse\n  v\nExprI (indexed AST) + DAG (module dependencies)\n  |  2. Link & Merge\n  v\nUnified compiler state (signatures, types, sources)\n  |  3. Restructure & Treeify\n  v\nAnnoS TypeU Many Int (annotated syntax tree, general types)\n  |  4. Typecheck\n  v\nAnnoS Type One Lang (concrete types, language tags)\n  |  5. Realize\n  v\nValidated program (all implementations resolved)\n  |  6. Generate\n  v\nJSON manifest + language-specific pool source code\n  |  7. Build\n  v\nExecutable (nexus binary + manifest + pool files)\n```\n\n## 1. Parse\n\nThe parser reads `.loc` files and produces an indexed expression AST (`ExprI`). Each subexpression receives a unique integer ID. The parser handles indentation-sensitive syntax, operator precedence (via precedence climbing), and module structure.\n\nParsing is recursive over the module DAG: when an import is encountered, the imported module is located, parsed, and added to the dependency graph. Parser state (fixity tables, expression counters) accumulates across modules.\n\n## 2. Link and Merge\n\n**Link** transfers terms, types, typeclasses, and source declarations from the parsed DAG into the compiler's global state. Each module's exports are filtered according to its export list and the importing module's import list.\n\n**Merge** resolves cases where the same function name is imported from multiple modules with different language implementations. If the general types match, the implementations are combined (implementation polymorphism). If the general types conflict, an error is reported.\n\n## 3. Restructure and Treeify\n\n**Restructure** expands type aliases, resolving general type names to their definitions.\n\n**Treeify** converts the flat expression list into an annotated syntax tree (`AnnoS`), threading type annotations and language information through the tree structure.\n\n## 4. Typecheck\n\nThe typechecker applies bidirectional type inference to the annotated syntax tree. It:\n\n- Infers types for unannotated expressions (synthesis).\n- Verifies annotated expressions match their declared types (checking).\n- Resolves existential type variables.\n- Checks subtyping at polymorphic boundaries.\n- Eta-expands lambdas that return function types.\n\nThe output is a fully typed tree with concrete types and language annotations.\n\nSee [[typechecking.md]] for details.\n\n## 5. Realize\n\nRealization validates that every function in the typed program has at least one foreign implementation, selects language implementations to minimize serialization boundaries, and checks that all required concrete type mappings exist.\n\nSee [[../interop/implementation-selection.md]].\n\n## 6. Generate\n\nCode generation transforms the realized program into:\n\n- A JSON manifest describing commands, pools, and argument schemas.\n- Source code for each language pool.\n\nThis phase proceeds through several sub-phases:\n\n1. **Parameterize**: thread function arguments through expression trees.\n2. **Express**: create polymorphic manifold trees.\n3. **Segment**: split trees at language boundaries into monomorphic per-language segments.\n4. **Serialize**: insert msgpack serialization/deserialization operations.\n5. **Emit**: translate each segment to target language source code via the shared imperative IR.\n\nSee [[codegen.md]] for details.\n\n## 7. Build\n\nThe build phase writes generated files to disk:\n\n- Copies the pre-compiled nexus binary.\n- Writes the JSON manifest.\n- Writes pool source files.\n- Compiles C++ pools with the system C++ compiler.\n- Sets executable permissions on interpreted pools.\n\nSee [[build.md]] for details.\n"
  },
  {
    "path": "spec/compiler/typechecking.md",
    "content": "# Type Checking\n\nThe morloc typechecker implements bidirectional type checking based on Dunfield and Krishnaswami (2013). It infers types for unannotated expressions, verifies annotated expressions, and resolves polymorphism through subtyping and existential instantiation.\n\n## Strategy\n\nThe typechecker makes a single pass over the annotated syntax tree, alternating between two modes:\n\n- **Synthesis mode**: infer the type of an expression from its structure.\n- **Checking mode**: verify that an expression matches an expected type.\n\nThe Sub rule bridges the modes: when checking, if no specific rule applies, synthesize the type and verify it is a subtype of the expected type.\n\n## Type Representations\n\nThe typechecker operates on general types (`TypeU`), which include:\n\n| Form | Description |\n|------|-------------|\n| `VarU a` | Universal type variable |\n| `ExistU a-hat constraints` | Existential (unsolved) type variable |\n| `ForallU a T` | Universal quantification |\n| `FunU [A1,...,An] R` | Multi-argument function type |\n| `AppU F [A1,...,An]` | Type constructor application |\n| `NamU kind name params fields` | Named/record type |\n\n## Context Threading\n\nThe typechecker threads an ordered context (Gamma) through every judgment. The context records:\n\n- **Universal markers**: type variables currently in scope.\n- **Term annotations**: the type of each bound variable.\n- **Unsolved existentials**: type variables awaiting solution.\n- **Solved existentials**: type variables resolved to a concrete type.\n- **Scope markers**: boundaries for scoped bindings.\n\nContext operations:\n- **Extend**: add a new binding.\n- **Apply**: substitute all solved existentials into a type.\n- **Cut**: remove bindings beyond a scope marker (when exiting a quantifier's scope).\n- **Lookup**: find a term's type or an existential's solution.\n\n## Subtyping\n\nThe subtyping judgment `A <: B` checks whether A is at least as polymorphic as B:\n\n- **Reflexivity**: `A <: A`.\n- **Functions**: contravariant in arguments, covariant in returns.\n- **Forall-left**: instantiate with a fresh existential.\n- **Forall-right**: abstract over a fresh universal.\n- **Existentials**: delegate to instantiation.\n\nFor multi-argument functions, all arguments are checked contravariantly in a single batch before applying accumulated context updates. This avoids quadratic behavior.\n\nSee [[../types/subtyping.md]] for the formal rules.\n\n## Instantiation\n\nWhen an existential must equal a concrete type:\n\n- **Solve**: if the type is a monotype and passes the occurs check, solve the existential directly.\n- **Function decomposition**: split the existential into argument and return existentials.\n- **Forall decomposition**: instantiate the quantifier and continue.\n\n## Expression Rules\n\n### Literals\n\nLiterals synthesize their natural type: `42 => Int`, `3.14 => Real`, `\"hello\" => Str`, `True => Bool`.\n\n### Variables\n\nA variable synthesizes the type found at its binding in the context.\n\n### Application\n\nTo typecheck `f x`:\n1. Synthesize the type of `f`.\n2. If `f : forall a. A`, instantiate `a` with a fresh existential.\n3. If `f : FunU [A1,...] R`, check `x` against `A1`; result is the remaining function or return type.\n4. If `f : ExistU a-hat`, decompose into `a-hat1 -> a-hat2` and retry.\n\n### Lambda\n\nChecked against `FunU [A1,...] R`: bind parameters to `A1,...`, check body against `R`.\n\nSynthesized (no expected type): create fresh existentials for parameters, synthesize the body. If the body's type is a function, eta-expand to make all arguments explicit.\n\n### Where Clauses\n\nEach binding in a `where` clause is synthesized independently. The resulting types are added to the context before typechecking the main expression.\n\n## Error Reporting\n\nType errors include source location information (derived from expression indices) and describe the mismatch:\n\n| Error | Cause |\n|-------|-------|\n| Subtype error | Types incompatible in subtype check |\n| Instantiation error | Cannot solve existential |\n| Occurs check failure | Infinite type (e.g., `a = [a]`) |\n| Application of non-function | Applying a value that is not a function |\n| Too many arguments | More arguments than the function accepts |\n\n## Multi-Stage Checking\n\nThe full type checking pipeline has two stages:\n\n1. **Frontend typecheck**: operates on general types (`TypeU`). Produces a tree annotated with general types and potentially multiple language implementations.\n2. **Value check**: resolves which language implementations to use, matching general types to concrete implementations. Verifies that implementations exist for all required functions.\n"
  },
  {
    "path": "spec/interop/INTEROP.md",
    "content": "# Cross-Language Interoperability\n\nMorloc's central design goal is composing functions across Python, C++, and R within a single program. This section describes how the language, type system, and runtime cooperate to make cross-language calls transparent to the programmer.\n\n## Why Serialization Boundaries Exist\n\nEach supported language has its own memory layout, calling conventions, and type system. A Python `list` and a C++ `std::vector` cannot share memory directly. Morloc bridges this gap through serialization: when data crosses a language boundary, it is packed into a language-neutral binary format (msgpack), transmitted, and unpacked on the other side.\n\nThe compiler's job is to:\n\n1. Determine *where* language boundaries fall in a composition.\n2. Insert serialization/deserialization operations at those boundaries.\n3. Choose language implementations to *minimize* the number of boundaries.\n\n## How It Works\n\nA function like `map` may have implementations in Python, C++, and R. When the programmer writes `map f xs`, the compiler examines `f` to determine which languages can provide it. If `f` is only available in C++, the compiler selects C++'s `map` as well, avoiding a serialization boundary between `map` and `f`.\n\nThis process -- called *realization* -- happens entirely at compile time. The generated program contains no runtime language selection logic.\n\n## Subfiles\n\n- [[foreign-functions.md]] -- Source declarations, function binding, renaming\n- [[type-mappings.md]] -- How general types resolve to Python, C++, and R types\n- [[serialization.md]] -- Msgpack protocol, schema encoding, packing rules\n- [[implementation-selection.md]] -- Realization algorithm and boundary minimization\n"
  },
  {
    "path": "spec/interop/foreign-functions.md",
    "content": "# Foreign Functions\n\nForeign function declarations bind functions from Python, C++, or R source files to morloc names. This is morloc's FFI (Foreign Function Interface).\n\n## Source Declarations\n\nThe `source` keyword declares where a foreign function lives:\n\n```morloc\nsource Py from \"module.py\" (\"add\", \"mul\")\nsource Cpp from \"module.hpp\" (\"add\", \"mul\")\nsource R from \"module.R\" (\"add\", \"mul\")\n```\n\nEach declaration specifies:\n- **Language**: `Py`, `Cpp`, or `R`\n- **Source file**: path to the implementation file (relative to the module)\n- **Function list**: names of functions to import from that file\n\nAfter a source declaration, the imported names must be given morloc type signatures:\n\n```morloc\nsource Py from \"stats.py\" (\"mean\", \"stdev\")\n\nmean :: [Real] -> Real\nstdev :: [Real] -> Real\n```\n\n## Renaming\n\nForeign functions can be renamed on import to match morloc naming conventions or avoid conflicts:\n\n```morloc\nsource Py from \"module.py\" (\"python_add\" as add)\nsource Cpp from \"module.hpp\" (\"cpp_multiply\" as mul)\n```\n\nThe foreign name appears first, followed by `as` and the morloc name.\n\n## Built-in Imports\n\nFunctions from a language's standard library can be imported without a file path:\n\n```morloc\nsource Py (\"abs\", \"len\", \"sorted\")\nsource Cpp (\"std::sort\" as sort)\n```\n\nThe compiler generates the appropriate import/include statement for the target language.\n\n## Operator Binding\n\nOperators can be bound to foreign functions using parenthesized syntax:\n\n```morloc\nsource Py from \"pipe.py\" (\"pipe\" as (|>))\n```\n\n## Multiple Implementations\n\nThe same morloc function may have source declarations in multiple languages:\n\n```morloc\nsource Py from \"stats.py\" (\"mean\")\nsource Cpp from \"stats.hpp\" (\"mean\")\nsource R from \"stats.R\" (\"mean\")\n\nmean :: [Real] -> Real\n```\n\nThis creates implementation polymorphism: the compiler selects the most efficient implementation at compile time. See [[implementation-selection.md]].\n\n## Interaction with Pure Definitions\n\nA function may have both a pure morloc definition and foreign implementations:\n\n```morloc\nmean :: [Real] -> Real\nmean xs = div (sum xs) (size xs)           -- pure morloc\nsource Cpp from \"stats.hpp\" (\"mean\")       -- C++ override\n```\n\nWhen the compiler can use the C++ implementation (e.g., the caller is also in C++), it prefers the native implementation over the composed morloc version.\n\n## Source File Resolution\n\nSource file paths are resolved relative to the module directory. For installed packages, this means relative to the package's installation directory.\n"
  },
  {
    "path": "spec/interop/implementation-selection.md",
    "content": "# Implementation Selection\n\nWhen a function has implementations in multiple languages, the compiler must choose which to use. This process -- called *realization* -- minimizes the number of serialization boundaries in the generated program.\n\n## The Realization Problem\n\nConsider:\n\n```morloc\nimport root-py (map, add)\nimport root-cpp (map)\n\ndoubleAll xs = map (add 1) xs\n```\n\n`map` has Python and C++ implementations. `add` has only Python. The compiler must choose: if it selects C++'s `map`, then `add 1` must be serialized into C++ and back -- an unnecessary boundary. If it selects Python's `map`, everything runs in one language with no serialization.\n\n## Selection Algorithm\n\nThe realization algorithm works as follows:\n\n1. **Build the dependency graph.** For each function call, record which implementations are available and which functions it calls.\n\n2. **Propagate language constraints.** Functions with only one language implementation constrain their callers. In the example above, `add` being Python-only forces `map` to prefer Python.\n\n3. **Score candidate implementations.** For each function with multiple implementations, score each candidate by counting the serialization boundaries it would introduce.\n\n4. **Select minimum-boundary implementations.** Choose the implementation that minimizes total serialization cost.\n\n## Collapse Behavior\n\nWhen a specialized function (available in only one language) is nested inside a polymorphic function (available in several languages), the outer function \"collapses\" to the same language:\n\n```morloc\nimport root-py (map, filter)\nimport root-cpp (map, filter, applyKernel)\n\nprocess imgs = map applyKernel (filter isValid imgs)\n```\n\nSince `applyKernel` is C++ only, both `map` and `filter` collapse to C++, avoiding two serialization boundaries.\n\n## Explicit Language Control\n\nWhen the programmer wants to force a specific language, they can use distinct names:\n\n```morloc\nimport foopy (pyAdd)\nimport foocpp (cppMul)\n\nmixedOps x = pyAdd (cppMul x 5) 10\n```\n\nHere the language boundary between `cppMul` and `pyAdd` is explicit and intentional. The compiler inserts serialization at that boundary.\n\n## Compile-Time Only\n\nImplementation selection is entirely static. The generated program contains no runtime dispatch logic for language choice. Each function call in the generated code targets a specific pool in a specific language.\n\n## Validation\n\nDuring realization, the compiler validates that:\n\n- Every function in the program has at least one implementation.\n- Every selected implementation has the necessary concrete type mappings.\n- The serialization schemas are consistent at every boundary.\n\nIf validation fails, the compiler reports which functions lack implementations or which type mappings are missing.\n\n## Inspecting Selections\n\nThe compiler's implementation choices can be inspected:\n\n```bash\nmorloc dump script.loc       -- show intermediate representations\n```\n\nThe dump output includes the realized program with language annotations on each function node.\n"
  },
  {
    "path": "spec/interop/serialization.md",
    "content": "# Serialization\n\nWhen data crosses a language boundary, it is serialized to a binary format, transmitted, and deserialized on the other side. Morloc uses MessagePack (msgpack) as its serialization protocol.\n\n## Schema Encoding\n\nEach type has a compact schema string that describes its msgpack representation. Schemas are embedded in the manifest and used by both the nexus and language pools.\n\n### Schema Syntax\n\n| Schema | Type | Msgpack Format |\n|--------|------|----------------|\n| `z` | `Unit` | nil |\n| `b` | `Bool` | boolean |\n| `i1` | `Int8` | int8 |\n| `i2` | `Int16` | int16 |\n| `i4` | `Int32` | int32 |\n| `i8` | `Int64` / `Int` | int64 |\n| `u1` | `UInt8` | uint8 |\n| `u2` | `UInt16` | uint16 |\n| `u4` | `UInt32` | uint32 |\n| `u8` | `UInt64` | uint64 |\n| `f4` | `Float32` | float32 |\n| `f8` | `Float64` / `Real` | float64 |\n| `s` | `Str` | string |\n| `[X]` | `List X` | array of X |\n| `(X,Y)` | `(X, Y)` | array [X, Y] |\n| `{k1:X,k2:Y}` | record | map {\"k1\": X, \"k2\": Y} |\n\n### Annotated Schemas\n\nSchemas may carry a display name prefix for CLI help text:\n\n```\n<double>f8        -- Real, displayed as \"double\"\n<integer>i8       -- Int, displayed as \"integer\"\n```\n\n## Packing Rules\n\n### Primitives\n\nPrimitives are packed directly as their msgpack counterparts: integers as msgpack integers (at the declared width), floats as msgpack floats, booleans as msgpack booleans, strings as msgpack strings.\n\n### Lists\n\nA `List a` is packed as a msgpack array. Each element is packed according to its element schema.\n\n### Tuples\n\nA tuple `(a, b, c)` is packed as a fixed-length msgpack array `[a, b, c]`.\n\n### Records\n\nRecords are packed as msgpack maps with string keys:\n\n```\n{name = \"Alice\", age = 27}  -->  {\"name\": \"Alice\", \"age\": 27}\n```\n\nIn C++, records may be packed as positional tuples for efficiency when both sides agree on field order.\n\n## Serialization Insertion\n\nThe compiler automatically inserts serialization at language boundaries during the Serialize phase of code generation. For each cross-language call:\n\n1. The caller's pool serializes the arguments using the argument schemas.\n2. The serialized bytes are sent over the Unix socket.\n3. The callee's pool deserializes the bytes into native types.\n4. After execution, the result is serialized and sent back.\n5. The caller deserializes the result.\n\n## Language Bindings\n\nEach language has a serialization library (compiled during `morloc init`):\n\n- **Python**: `pymorloc` -- C extension providing `pack`/`unpack` functions\n- **C++**: `cppmorloc` -- Template header with type-safe serialization\n- **R**: `rmorloc` -- C extension for R type serialization\n\nThese libraries implement schema-driven serialization: given a schema string and a native value, they produce msgpack bytes, and vice versa.\n\n## Cross-Language Data Representation\n\nThe same logical value may have different native representations:\n\n| Morloc Value | Python | C++ | R | Msgpack |\n|-------------|--------|-----|---|---------|\n| `42` | `int(42)` | `int(42)` | `42L` | `0x2a` (positive fixint) |\n| `[1,2,3]` | `[1,2,3]` | `vector{1,2,3}` | `list(1,2,3)` | array of 3 ints |\n| `{x=1}` | `{\"x\":1}` | `struct{x:1}` | `list(x=1)` | map {\"x\": 1} |\n\nThe serialization layer normalizes these representations through the common msgpack format.\n"
  },
  {
    "path": "spec/interop/type-mappings.md",
    "content": "# Type Mappings\n\nType mappings define how morloc's general types resolve to concrete representations in each target language. Every general type used at a language boundary must have a concrete mapping for that language.\n\n## Mapping Declaration Syntax\n\n```morloc\ntype <Lang> => <Name> <params> = \"<concrete-string>\" <params>\n```\n\nExamples:\n\n```morloc\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\ntype R => Int = \"integer\"\n\ntype Py => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\ntype R => List a = \"list\" a\n```\n\n## Parameter Substitution\n\nIn the concrete type string, `$1`, `$2`, etc. are replaced by the rendered concrete types of the corresponding parameters:\n\n```morloc\ntype Cpp => Map k v = \"std::map<$1,$2>\" k v\n```\n\n`Map Str Int` in C++ becomes `std::map<std::string,int>`.\n\nFor languages where containers are unparameterized (Python, R), parameters are listed but do not appear in the string:\n\n```morloc\ntype Py => Map k v = \"dict\" k v\n```\n\n## Standard Mappings\n\n### Primitive Types\n\n| Morloc | Python | C++ | R |\n|--------|--------|-----|---|\n| `Bool` | `\"bool\"` | `\"bool\"` | `\"logical\"` |\n| `Int` | `\"int\"` | `\"int\"` | `\"integer\"` |\n| `Int8` | `\"int\"` | `\"int8_t\"` | `\"integer\"` |\n| `Int16` | `\"int\"` | `\"int16_t\"` | `\"integer\"` |\n| `Int32` | `\"int\"` | `\"int32_t\"` | `\"integer\"` |\n| `Int64` | `\"int\"` | `\"int64_t\"` | `\"integer\"` |\n| `UInt8` | `\"int\"` | `\"uint8_t\"` | `\"integer\"` |\n| `UInt16` | `\"int\"` | `\"uint16_t\"` | `\"integer\"` |\n| `UInt32` | `\"int\"` | `\"uint32_t\"` | `\"integer\"` |\n| `UInt64` | `\"int\"` | `\"uint64_t\"` | `\"integer\"` |\n| `Real` | `\"float\"` | `\"double\"` | `\"numeric\"` |\n| `Float32` | `\"float\"` | `\"float\"` | `\"numeric\"` |\n| `Float64` | `\"float\"` | `\"double\"` | `\"numeric\"` |\n| `Str` | `\"str\"` | `\"std::string\"` | `\"character\"` |\n| `Unit` | `\"None\"` | `\"void\"` | `\"NULL\"` |\n\n### Collection Types\n\n| Morloc | Python | C++ | R |\n|--------|--------|-----|---|\n| `List a` / `[a]` | `\"list\"` | `\"std::vector<$1>\"` | `\"list\"` |\n| `(a, b)` | `\"tuple\"` | `\"std::tuple<$1,$2>\"` | `\"list\"` |\n| `(a, b, c)` | `\"tuple\"` | `\"std::tuple<$1,$2,$3>\"` | `\"list\"` |\n\n### Record Types\n\nRecords use per-declaration mappings:\n\n```morloc\nrecord Py => Person = \"dict\"\nrecord Cpp => Person = \"Person\"       -- generates a struct\nrecord R => Person = \"list\"\n```\n\n## Resolution Process\n\nWhen the compiler needs the concrete type of a general type for a specific language:\n\n1. Fully expand general type aliases.\n2. Look up the language-specific mapping for the resulting type constructor.\n3. Recursively resolve type parameters.\n4. Substitute resolved parameters into the concrete string.\n\nIf no mapping exists for a required type in a required language, compilation fails with an error identifying the unmapped type.\n"
  },
  {
    "path": "spec/language/LANGUAGE.md",
    "content": "# Language Reference\n\nMorloc is a typed, functional workflow language for composing functions across Python, C++, and R under a unified type system. Programs are written in a declarative, ML-style syntax. The compiler resolves types, selects language implementations, and generates executables that orchestrate cross-language calls at runtime.\n\n## Design Philosophy\n\n- **Language-agnostic composition.** Functions from different languages compose as naturally as functions within a single language. The programmer writes workflows; the compiler handles language boundaries.\n- **Types as contracts.** A single general type system mediates between languages. Each language maps general types to its own concrete types. The compiler verifies consistency across these mappings.\n- **No recursion, no effects.** Morloc programs describe pure data-flow pipelines. Iteration is expressed through higher-order functions (`map`, `fold`). Side effects are confined to foreign function implementations.\n- **Serialization is implicit.** When a function in one language calls a function in another, the compiler inserts serialization and deserialization automatically. The programmer never writes marshalling code.\n\n## Core Concepts\n\n- **Modules** organize code into namespaces with explicit exports. See [[LANGUAGE.md#modules]] and [[../modules/MODULES.md]].\n- **Expressions** are the building blocks: function application, lambda abstraction, composition, and where-clauses. See [[expressions.md]].\n- **Declarations** define functions, type signatures, type aliases, records, typeclasses, and foreign function bindings. See [[declarations.md]].\n- **Operators** support user-defined infix syntax with fixity declarations. See [[operators.md]].\n- **Annotations** attach documentation and CLI metadata to exported functions. See [[annotations.md]].\n\n## Subfiles\n\n- [[lexical-structure.md]] -- Tokens, literals, comments, whitespace, indentation\n- [[expressions.md]] -- Expression forms\n- [[declarations.md]] -- Top-level declaration forms\n- [[operators.md]] -- Operator syntax and fixity\n- [[annotations.md]] -- Docstrings and metadata tags\n"
  },
  {
    "path": "spec/language/annotations.md",
    "content": "# Annotations\n\nMorloc uses docstring comments and metadata tags to annotate exported functions. These annotations drive automatic CLI generation: the nexus translates them into command-line argument parsers, help text, and usage messages.\n\n## Docstring Syntax\n\nA docstring comment is a single line beginning with `--'` immediately preceding a type signature:\n\n```morloc\n--' Add two numbers\nadd :: Int -> Int -> Int\n```\n\nMulti-line docstrings use consecutive `--'` lines:\n\n```morloc\n--' Compute the mean of a list of numbers\n--' Returns 0 for an empty list\nmean :: [Real] -> Real\n```\n\nTriple-quoted block docstrings use `\"\"\"`:\n\n```morloc\n\"\"\"\nAdd two numbers\n\n@arg x First number\n@arg y Second number\n\"\"\"\nadd :: Int -> Int -> Int\n```\n\n## Metadata Tags\n\nMetadata tags within docstrings control how function arguments map to CLI parameters.\n\n### `@arg`\n\nAssociates a name and description with a positional argument:\n\n```morloc\n\"\"\"\n@arg x The input value\n@arg y The scaling factor\n\"\"\"\nscale :: Real -> Real -> Real\n```\n\nThis generates: `./program scale <x> <y>`\n\n### `@opt`\n\nMarks an argument as an optional CLI parameter with a default value:\n\n```morloc\n\"\"\"\n@opt n Number of iterations (default: 10)\n\"\"\"\niterate :: Int -> [Real] -> [Real]\n```\n\nThis generates: `./program iterate --n=10 <input>`\n\n### `@flag`\n\nMarks a `Bool` argument as a flag:\n\n```morloc\n\"\"\"\n@flag verbose Enable verbose output\n\"\"\"\nprocess :: Bool -> [Str] -> [Str]\n```\n\nThis generates: `./program process --verbose <input>`\n\nFlags also generate a negation form: `--no-verbose`.\n\n## Effect on CLI Generation\n\nWhen a function is exported from the main module, its docstring and type signature together determine the generated CLI interface:\n\n- Each function becomes a subcommand: `./program <function> [args]`\n- Positional arguments follow the subcommand in order\n- Optional arguments use `--name=value` syntax\n- Flags use `--name` / `--no-name` syntax\n- Record arguments are expanded into grouped options\n- Help text is derived from docstring content\n\nSee [[../runtime/cli.md]] for the full CLI generation rules.\n"
  },
  {
    "path": "spec/language/declarations.md",
    "content": "# Declarations\n\nTop-level declarations define the structure of a morloc program: modules, imports, functions, types, and foreign bindings.\n\n## Module Declaration\n\nEvery morloc file may begin with a module declaration specifying the module name and its exports:\n\n```morloc\nmodule main (foo, bar)     -- export foo and bar\nmodule utilities (*)       -- export everything\n```\n\nIf no module declaration is present, the file is treated as an anonymous module.\n\n## Import Declarations\n\nImports bring names from other modules into scope:\n\n```morloc\nimport root-py                  -- import all exports\nimport foo (bar, baz)           -- import specific names\nimport math (sin, cos, pi)      -- selective import\n```\n\nMultiple imports of the same function from different language modules create implementation polymorphism:\n\n```morloc\nimport root-py (map, filter)\nimport root-cpp (map, filter)\n-- map and filter now have both Python and C++ implementations\n```\n\n## Type Signatures\n\nType signatures declare the general type of a function:\n\n```morloc\nadd :: Int -> Int -> Int\nmap a b :: (a -> b) -> [a] -> [b]\n```\n\nType variables (lowercase) introduce parametric polymorphism. They may be listed after the function name and before `::` to indicate universal quantification.\n\n## Function Definitions\n\nFunctions are defined by equation:\n\n```morloc\ndouble x = (x, x)\nadd3 x y z = x + y + z\n```\n\nA function may have both a morloc definition and one or more foreign implementations. The compiler selects among available implementations at compile time.\n\n```morloc\nmean :: [Real] -> Real\nmean xs = div (sum xs) (size xs)         -- pure morloc definition\nsource Cpp from \"stats.hpp\" (\"mean\")     -- C++ implementation\n```\n\n## Type Alias Declarations\n\nType aliases give names to type expressions:\n\n```morloc\ntype Filename = Str\ntype Matrix a = [[a]]\n```\n\nLanguage-specific type declarations map general types to concrete representations:\n\n```morloc\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\ntype R => Int = \"integer\"\n\ntype Py => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\ntype R => List a = \"list\" a\n```\n\nThe `$1`, `$2`, ... syntax in language-specific type strings refers to the positional type parameters. See [[../types/type-declarations.md]].\n\n## Record Declarations\n\nRecords define named product types with labeled fields:\n\n```morloc\nrecord Person where\n  name :: Str\n  age :: Int\n```\n\nLanguage-specific record mappings:\n\n```morloc\nrecord Py => Person = \"dict\"\nrecord Cpp => Person = \"Person\"\nrecord R => Person = \"list\"\n```\n\nObject and table declarations are variants of record declarations. See [[../types/records.md]].\n\n## Typeclass Declarations\n\nTypeclasses define overloaded interfaces:\n\n```morloc\nclass Monoid a where\n  empty a :: a\n  op a :: a -> a -> a\n```\n\n## Instance Declarations\n\nInstances provide typeclass implementations for specific types:\n\n```morloc\ninstance Monoid Int where\n  empty = 0\n  source Cpp from \"monoid.hpp\" (\"addInt\" as op)\n  source Py from \"monoid.py\" (\"addInt\" as op)\n```\n\nSee [[../types/typeclasses.md]].\n\n## Source Declarations (Foreign Function Interface)\n\nSource declarations bind foreign functions to morloc names:\n\n```morloc\nsource Py from \"module.py\" (\"add\", \"mul\")\nsource Cpp from \"module.hpp\" (\"add\", \"mul\")\nsource R from \"module.R\" (\"add\", \"mul\")\n```\n\nRenaming on import:\n\n```morloc\nsource Py from \"module.py\" (\"python_add\" as add)\n```\n\nImporting built-in functions (no file path):\n\n```morloc\nsource Py (\"abs\", \"len\")\n```\n\nSee [[../interop/foreign-functions.md]].\n\n## Fixity Declarations\n\nFixity declarations specify operator precedence and associativity:\n\n```morloc\ninfixl 6 +, -\ninfixr 5 :\ninfix  4 ==, !=\n```\n\nSee [[operators.md]].\n"
  },
  {
    "path": "spec/language/expressions.md",
    "content": "# Expressions\n\nMorloc expressions describe data transformations. Every expression has a type, inferred or checked by the type system.\n\n## Function Application\n\nApplication is by juxtaposition. Arguments follow the function, separated by whitespace:\n\n```morloc\nf x\nmap add [1, 2, 3]\nadd 1 2\n```\n\nApplication is left-associative: `f x y` parses as `(f x) y`.\n\nFunctions may be partially applied:\n\n```morloc\naddFive = add 5       -- partial application of add\nincrement = (+) 1     -- partial application of operator\n```\n\n## Lambda Expressions\n\nLambdas are introduced with `\\` and use `->` to separate parameters from the body:\n\n```morloc\n\\x -> x + 1\n\\x y -> x + y\n```\n\nLambdas may appear anywhere an expression is expected:\n\n```morloc\nmap (\\x -> x * 2) xs\n```\n\n## Function Composition\n\nThe `.` operator composes functions right-to-left:\n\n```morloc\nprocess = show . filter isPositive . map transform\n```\n\nThe above is equivalent to `\\x -> show (filter isPositive (map transform x))`.\n\nThe `$` operator provides low-precedence right-associative application, reducing parentheses:\n\n```morloc\nresult = show $ filter isPositive $ map transform xs\n```\n\n## Where Clauses\n\nA `where` clause introduces local bindings scoped to the enclosing definition:\n\n```morloc\nhypotenuse a b = sqrt (sqA + sqB) where\n  sqA = a * a\n  sqB = b * b\n```\n\nLocal bindings may be functions:\n\n```morloc\nfoo x = result where\n  helper y = y + 1\n  result = helper (helper x)\n```\n\nThere is no `let ... in` syntax; use `where` instead.\n\n## Record Field Access\n\nThe `.` operator in prefix position accesses a record field:\n\n```morloc\n.name alice        -- extract the \"name\" field\n.age alice         -- extract the \"age\" field\n```\n\nField accessors may be composed:\n\n```morloc\ngetName = .name\nnames = map getName people\n```\n\n## Record Construction\n\nRecords are constructed with brace syntax:\n\n```morloc\nalice = {name = \"Alice\", age = 27}\n```\n\n## Tuple Construction\n\nTuples are constructed with parentheses:\n\n```morloc\npair = (1, \"hello\")\ntriple = (True, 3.14, \"x\")\n```\n\n## List Construction\n\nLists use bracket syntax:\n\n```morloc\nxs = [1, 2, 3]\nempty = []\nnested = [[1, 2], [3, 4]]\n```\n\n## Type Ascription\n\nAn expression may be annotated with its type using `::`:\n\n```morloc\n(42 :: Int)\n```\n\n## Operator Sections\n\nOperators can be used in prefix position by enclosing them in parentheses:\n\n```morloc\n(+) 1 2       -- prefix application\n(+ 1)         -- right section: \\x -> x + 1\n```\n\n## Limitations\n\n- **No recursion.** Use higher-order functions (`map`, `fold`, `filter`) for iteration.\n- **No conditionals.** Use pattern matching or foreign functions for branching.\n- **No side effects.** Morloc expressions are pure; effects are confined to foreign implementations.\n"
  },
  {
    "path": "spec/language/lexical-structure.md",
    "content": "# Lexical Structure\n\n## Character Set\n\nAll morloc source files must contain only ASCII characters. Non-ASCII characters (including Unicode em-dashes, smart quotes, etc.) are not permitted and may cause silent truncation.\n\n## Comments\n\n```morloc\n-- Line comment (to end of line)\n--' Docstring comment (attaches to next declaration)\n{- Block comment (nestable) -}\n```\n\nDocstring comments (`--'`) have semantic meaning: they provide documentation that propagates to generated CLI help text. See [[annotations.md]].\n\n## Identifiers\n\n**Term identifiers** begin with a lowercase letter or underscore, followed by alphanumeric characters, underscores, or single quotes:\n\n```\nfoo, x', my_function, _unused\n```\n\n**Type identifiers** begin with an uppercase letter:\n\n```\nInt, Bool, List, Person\n```\n\n**Module names** consist of alphanumeric segments separated by hyphens:\n\n```\nroot, root-py, root-cpp, math\n```\n\n**Operator identifiers** consist of one or more operator characters:\n\n```\n+  -  *  /  .  $  |>  ==  !=  >=  ++\n```\n\nOperator characters include: `! # % & * + - . / < = > ? @ \\ ^ | ~ :`\n\n## Literals\n\n### Numeric Literals\n\n```morloc\n42          -- Int (decimal)\n0xFF        -- Int (hexadecimal)\n0o77        -- Int (octal)\n0b1010      -- Int (binary)\n3.14        -- Real (floating point)\n1.0e-3      -- Real (scientific notation)\n```\n\n### String Literals\n\n```morloc\n\"hello\"              -- String literal\n\"line1\\nline2\"       -- Escape sequences: \\n, \\t, \\\\, \\\"\n\"value: #{expr}\"     -- String interpolation\n```\n\n### Boolean Literals\n\n```morloc\nTrue\nFalse\n```\n\n### Collection Literals\n\n```morloc\n[1, 2, 3]                      -- List\n(1, \"hello\", True)              -- Tuple\n{name = \"Alice\", age = 27}      -- Record\n```\n\n## Whitespace and Indentation\n\nMorloc is indentation-sensitive, following conventions similar to Haskell:\n\n- Top-level declarations must start at column 0.\n- Continuation lines of an expression must be indented further than the start of that expression.\n- `where` clauses introduce a new indentation block; all bindings in the block must align.\n\n```morloc\nfoo x = result where\n  helper y = y + 1    -- indented under where\n  z = 42              -- aligned with helper\n```\n\n## Keywords\n\nThe following identifiers are reserved:\n\n```\nmodule  import  from  source  export  where  type  record  object  table\nclass  instance  infixl  infixr  infix  True  False\n```\n\n## Separators and Delimiters\n\n```\n(  )    -- Grouping, tuples, operator sections\n[  ]    -- Lists\n{  }    -- Records\n,       -- Element separator\n::      -- Type annotation\n=       -- Definition, record field binding\n->      -- Function type arrow, lambda arrow\n\\       -- Lambda introduction\n.       -- Record field access (prefix), composition (infix)\n$       -- Low-precedence application (infix)\n=>      -- Language-specific type mapping\n```\n"
  },
  {
    "path": "spec/language/operators.md",
    "content": "# Operators\n\nMorloc supports user-defined infix operators with explicit precedence and associativity declarations.\n\n## Operator Characters\n\nOperators are identifiers composed entirely of the characters:\n\n```\n! # % & * + - . / < = > ? @ \\ ^ | ~ :\n```\n\nExamples: `+`, `*`, `.`, `$`, `|>`, `==`, `!=`, `>=`, `++`, `>>=`\n\n## Fixity Declarations\n\nEvery operator has three properties: associativity, precedence, and name. These are declared with fixity statements:\n\n```morloc\ninfixl 6 +, -       -- left-associative, precedence 6\ninfixr 5 :          -- right-associative, precedence 5\ninfix  4 ==, !=     -- non-associative, precedence 4\n```\n\n**Precedence** ranges from 0 (loosest) to 9 (tightest). The default for an undeclared operator is `infixl 9`.\n\n**Associativity** determines grouping when operators of the same precedence appear in sequence:\n\n| Associativity | Grouping | Example |\n|---------------|----------|---------|\n| `infixl` (left) | `(a + b) + c` | arithmetic operators |\n| `infixr` (right) | `a : (b : c)` | cons, composition |\n| `infix` (none) | `a == b == c` is an **error** | comparison operators |\n\n## Precedence Rules\n\nWhen two operators appear in the same expression, the one with higher precedence binds tighter:\n\n```morloc\ninfixl 7 *\ninfixl 6 +\n\n1 + 2 * 3       -- parses as: 1 + (2 * 3)\n```\n\nWhen two operators have the same precedence:\n\n- If both are `infixl`, they group left: `(a + b) - c`\n- If both are `infixr`, they group right: `a : (b : c)`\n- If they have conflicting associativity or are both `infix`, it is a **parse error**\n\n## Standard Operators\n\n| Operator | Fixity | Purpose |\n|----------|--------|---------|\n| `.` | `infixr 9` | Function composition |\n| `$` | `infixr 0` | Low-precedence application |\n| `+`, `-` | `infixl 6` | Arithmetic |\n| `*`, `/` | `infixl 7` | Arithmetic |\n| `==`, `!=` | `infix 4` | Comparison |\n\n## Desugaring\n\nAll infix expressions desugar to prefix function application:\n\n```morloc\na + b          -- desugars to: (+) a b\na + b * c      -- desugars to: (+) a ((*) b c)\n```\n\nOperators in prefix position are enclosed in parentheses:\n\n```morloc\n(+) 1 2        -- prefix application\n```\n\n## Defining Custom Operators\n\nA custom operator requires a fixity declaration, a type signature, and an implementation:\n\n```morloc\ninfixl 6 |>\n\n(|>) a b :: a -> (a -> b) -> b\nsource Py from \"pipe.py\" (\"pipe\" as (|>))\n\nresult = xs |> filter isPositive |> map transform\n```\n\n## Position Independence\n\nFixity declarations may appear anywhere in the module -- before or after the expressions that use the operator. The parser applies fixity information retroactively.\n\n## Precedence Climbing\n\nThe parser uses a precedence climbing algorithm (Pratt parsing) to resolve operator expressions. The algorithm recursively parses operands, accepting only operators whose precedence meets or exceeds a minimum threshold. Left-associative operators increment the threshold for the right operand; right-associative operators do not.\n"
  },
  {
    "path": "spec/modules/MODULES.md",
    "content": "# Module System\n\nMorloc code is organized into modules with explicit exports and imports. Modules form a directed acyclic graph (DAG) of dependencies. The module system supports hierarchical naming, namespace isolation, and a plane-based organization for package distribution.\n\n## Core Concepts\n\n**Modules** are the unit of compilation and distribution. Each `.loc` file defines at most one module. A module declares its name, its exports, and its imports from other modules.\n\n**Planes** are top-level namespaces that partition the module universe. The default plane contains the standard library and user-installed packages. Custom planes allow organizations to maintain private module collections.\n\n**The dependency DAG** tracks which modules import which others. The compiler resolves this DAG during parsing, loading each module exactly once and accumulating state (fixity tables, type definitions, typeclasses) across the graph.\n\n## State Accumulation\n\nAs modules are loaded, three kinds of state accumulate:\n\n- **Parser state**: fixity tables propagate across modules so that operator precedence is consistent.\n- **Compiler state**: type signatures, typeclass definitions, type aliases, and source declarations are collected globally.\n- **Dependency graph**: the DAG records import relationships for linking and merge phases.\n\n## Subfiles\n\n- [[imports-and-exports.md]] -- Module declarations, import forms, export lists, visibility\n- [[resolution.md]] -- Path resolution: local, system, and plane lookups\n- [[packages.md]] -- Package metadata, versioning, and installation\n"
  },
  {
    "path": "spec/modules/imports-and-exports.md",
    "content": "# Imports and Exports\n\n## Module Declaration\n\nA module declaration names the module and lists its exports:\n\n```morloc\nmodule main (foo, bar)\n```\n\nThe export list determines which names are visible to importers. The wildcard `(*)` exports all top-level definitions:\n\n```morloc\nmodule utilities (*)\n```\n\nIf no module declaration is present, the file is an anonymous module. Anonymous modules cannot be imported by other modules but may serve as the entry point for `morloc make`.\n\n## Export Lists\n\nOnly names listed in the export list are available to importing modules. This includes:\n- Function names\n- Type names\n- Operator names (in parentheses)\n\n```morloc\nmodule math (sin, cos, pi, (+))\n```\n\nNames not in the export list are private to the module.\n\n## Import Forms\n\n### Import All\n\nImport all exports from a module:\n\n```morloc\nimport root-py\n```\n\n### Selective Import\n\nImport specific names:\n\n```morloc\nimport foo (bar, baz)\n```\n\nOnly `bar` and `baz` are brought into scope. Other exports of `foo` are not accessible.\n\n### Multiple Imports of the Same Name\n\nImporting the same function name from multiple language-specific modules creates implementation polymorphism:\n\n```morloc\nimport root-py (map, filter, fold)\nimport root-cpp (map, filter, fold)\nimport root-r (map, filter, fold)\n```\n\nAfter these imports, `map`, `filter`, and `fold` each have three language implementations. The compiler selects among them during realization. See [[../interop/implementation-selection.md]].\n\n## Visibility Rules\n\n- A name is visible in a module if it is defined there or imported.\n- Imported names do not automatically re-export. To re-export, the name must appear in the module's own export list.\n- Name collisions between imports are resolved during the merge phase: if two imports provide the same name with different general types, it is an error. If they provide the same name with the same general type but different language implementations, they are merged (implementation polymorphism).\n\n## Import Side Effects\n\nImporting a module causes its type declarations, fixity declarations, and typeclass definitions to become visible in the importing module. This is necessary for correct parsing (fixity affects precedence) and type checking (type aliases and typeclasses must be in scope).\n"
  },
  {
    "path": "spec/modules/packages.md",
    "content": "# Packages\n\nA package is a distributable collection of morloc modules. Packages provide metadata for dependency management, versioning, and installation.\n\n## Package Metadata\n\nEach package contains a `package.yaml` file in its root directory:\n\n```yaml\nname: math\nversion: 0.1.0\nhomepage: https://github.com/morloc-project/math\nsynopsis: Mathematical functions for morloc\ndescription: Provides trigonometric, exponential, and other math functions\nbug-reports: https://github.com/morloc-project/math/issues\nlicense: MIT\nauthor: Morloc Project\ndependencies:\n  - root\n```\n\n## Fields\n\n| Field | Required | Description |\n|-------|----------|-------------|\n| `name` | Yes | Package name (must match directory name) |\n| `version` | Yes | Semantic version string |\n| `homepage` | No | URL to the project page |\n| `synopsis` | No | One-line description |\n| `description` | No | Longer description |\n| `license` | No | License identifier |\n| `author` | No | Package author |\n| `dependencies` | No | List of required packages |\n\n## Installation\n\nPackages are installed with `morloc install`:\n\n```bash\nmorloc install math                          -- from default repository\nmorloc install github:user/repo              -- from GitHub\nmorloc install /path/to/local/package        -- from local path\n```\n\nInstallation copies the package contents to the module library:\n\n```\n~/.local/share/morloc/src/morloc/plane/<plane>/<package-name>/\n```\n\n## Dependencies\n\nWhen a package declares dependencies, `morloc install` ensures those dependencies are also installed. Dependencies are resolved transitively: if `A` depends on `B` and `B` depends on `C`, installing `A` also installs `B` and `C`.\n\n## Standard Library Packages\n\nThe standard library is distributed as a set of packages:\n\n| Package | Purpose |\n|---------|---------|\n| `internal` | Compiler internal definitions |\n| `root` | Language-agnostic core signatures |\n| `root-py` | Python implementations of core functions |\n| `root-cpp` | C++ implementations of core functions |\n| `root-r` | R implementations of core functions |\n| `math` | Mathematical functions |\n\nThese are installed during `morloc init`:\n\n```bash\nmorloc init -f\nmorloc install internal root root-py root-cpp root-r math\n```\n\n## Versioning\n\nPackages use semantic versioning (MAJOR.MINOR.PATCH). The current module system does not enforce version constraints at resolution time; dependency version fields are informational.\n"
  },
  {
    "path": "spec/modules/resolution.md",
    "content": "# Module Resolution\n\nWhen a module imports another module by name, the compiler resolves that name to a file path. Resolution follows a three-case algorithm based on the relationship between the importing and imported module names.\n\n## Case 1: No Context (Top-Level File)\n\nWhen the entry-point file imports a module, or when no parent module context exists, the compiler searches three locations in order:\n\n1. **Local**: `./foo.loc` or `./foo/main.loc` (relative to the importing file)\n2. **System**: `$MORLOC_LIB/morloc/foo/main.loc` (core library)\n3. **Plane**: `$MORLOC_LIB/<plane>/foo/main.loc` (plane-specific library)\n\nThe first match wins. `$MORLOC_LIB` defaults to `~/.local/share/morloc/src/`.\n\n## Case 2: No Common Prefix\n\nWhen module `foo.bar.baz` imports `bif.buf` and the names share no common prefix, the compiler searches only system and plane locations:\n\n- `$MORLOC_LIB/morloc/bif/buf/main.loc`\n- `$MORLOC_LIB/<plane>/bif/buf/main.loc`\n\nLocal paths are not searched in this case. This prevents a local file from accidentally shadowing a system library module.\n\n## Case 3: Common Prefix\n\nWhen module `foo.bar.baz` imports `foo.bif` and the names share a common prefix (`foo`), the compiler resolves the import relative to the shared prefix:\n\n- From `foo/bar/baz/main.loc`, resolve `../../bif/main.loc`\n\nThis ensures that modules within the same package can reference siblings reliably, regardless of where the package is installed.\n\n## Hierarchical Naming\n\nModule names use dots as hierarchy separators: `foo.bar.baz`. Each segment maps to a directory level:\n\n```\nfoo.bar.baz  -->  foo/bar/baz/main.loc\n```\n\nThe leaf file is always `main.loc`.\n\n## Plane Lookup\n\nThe default plane is `default`. The plane can be configured, allowing organizations to maintain private module namespaces. A module `foo` in plane `myorg` resolves to:\n\n```\n$MORLOC_LIB/myorg/foo/main.loc\n```\n\n## Search Order Summary\n\n| Context | Search Order |\n|---------|-------------|\n| Top-level import | Local, System, Plane |\n| No common prefix | System, Plane |\n| Common prefix | Relative to prefix |\n"
  },
  {
    "path": "spec/runtime/RUNTIME.md",
    "content": "# Runtime System\n\nCompiled morloc programs execute as a **nexus-pool** architecture: a single C orchestrator (the nexus) dispatches function calls to language-specific worker processes (pools) via Unix domain sockets and msgpack serialization.\n\n## Design Rationale\n\nThe nexus-pool model separates orchestration from computation:\n\n- **The nexus** is a pre-compiled static C binary, built once during `morloc init`. It is data-driven: a per-program JSON manifest tells it which commands exist, which pools to start, and how to parse arguments. The nexus handles CLI parsing, argument serialization, pool lifecycle, dispatch, and result presentation.\n\n- **Pools** are language-specific processes (one per language per program). Each pool loads generated code for its language, listens for requests from the nexus, executes functions, and returns serialized results. Pools for interpreted languages (Python, R) run the interpreter directly; C++ pools are compiled to native executables.\n\nThis separation means the nexus never changes between programs -- only the manifest and pool code differ. It also means language runtimes are isolated in their own processes, preventing memory corruption across language boundaries.\n\n## Components\n\n| Component | Language | Lifecycle |\n|-----------|----------|-----------|\n| Nexus binary | C | Built once (`morloc init`), copied per program |\n| Manifest | JSON | Generated per program by `morloc make` |\n| Python pool | Python + C extension | Generated per program |\n| C++ pool | C++ | Generated and compiled per program |\n| R pool | R + C extension | Generated per program |\n\n## Subfiles\n\n- [[execution-model.md]] -- Nexus lifecycle, pool management, dispatch flow, errors\n- [[ipc.md]] -- Unix socket protocol, message format, data flow\n- [[manifest.md]] -- JSON manifest schema\n- [[cli.md]] -- Automatic CLI generation from type signatures\n"
  },
  {
    "path": "spec/runtime/cli.md",
    "content": "# CLI Generation\n\nMorloc automatically generates command-line interfaces from exported function signatures and their annotations. Each exported function becomes a subcommand of the compiled program.\n\n## Basic Structure\n\nA compiled program `foo` with exported functions `bar` and `baz` produces:\n\n```\n./foo bar [args]\n./foo baz [args]\n./foo --help\n./foo bar --help\n```\n\n## Argument Mapping\n\nFunction parameters map to CLI arguments based on their types and annotations.\n\n### Positional Arguments\n\nBy default, function parameters become positional arguments in declaration order:\n\n```morloc\n--' Scale a value\nscale :: Real -> Real -> Real\n```\n\n```\n./foo scale 3.14 2.0\n```\n\n### Optional Arguments\n\nParameters annotated with `@opt` become named optional arguments with defaults:\n\n```morloc\n\"\"\"\n@opt n Number of iterations (default: 10)\n\"\"\"\niterate :: Int -> [Real] -> [Real]\n```\n\n```\n./foo iterate --n=20 \"[1.0, 2.0]\"\n```\n\n### Flags\n\n`Bool` parameters annotated with `@flag` become boolean flags:\n\n```morloc\n\"\"\"\n@flag verbose Enable verbose output\n\"\"\"\nprocess :: Bool -> [Str] -> [Str]\n```\n\n```\n./foo process --verbose '[\"a\", \"b\"]'\n./foo process --no-verbose '[\"a\", \"b\"]'\n```\n\n### Record Arguments\n\nWhen a function takes a record type, its fields are expanded into a group of CLI options:\n\n```morloc\nrecord Config where\n  threshold :: Real\n  maxIter :: Int\n\n--' Run analysis\nanalyze :: Config -> [Real] -> [Real]\n```\n\n```\n./foo analyze --threshold=0.5 --max-iter=100 \"[1.0, 2.0]\"\n```\n\n## Help Text\n\nHelp text is generated from:\n\n- The function's docstring (description)\n- `@arg` annotations (per-argument descriptions)\n- Type signatures (type information and metavar names)\n- Default values (for optional arguments and flags)\n\n```\n$ ./foo bar --help\nbar - Scale a value\n\nUsage: foo bar <x> <y>\n\nArguments:\n  x    First number (Real)\n  y    Second number (Real)\n```\n\n## Input Format\n\nArguments are parsed according to their type schemas:\n\n- **Numeric types**: parsed as numbers (`42`, `3.14`)\n- **Strings**: parsed as-is or quoted\n- **Booleans**: `true`/`false` for positional; `--flag`/`--no-flag` for flags\n- **Lists**: JSON array syntax (`[1, 2, 3]`)\n- **Records**: JSON object syntax or expanded into named options\n- **Tuples**: JSON array syntax (`[1, \"hello\"]`)\n\n## Output Format\n\nReturn values are printed to stdout. The nexus deserializes the pool's msgpack response and renders it as human-readable text. Strings are printed without quotes; numbers, lists, and records are printed in JSON-like format.\n"
  },
  {
    "path": "spec/runtime/execution-model.md",
    "content": "# Execution Model\n\n## Overview\n\nA compiled morloc program executes as a set of cooperating processes: one nexus process and zero or more pool processes. The nexus is the entry point; pools execute language-specific code on demand.\n\n## Nexus Lifecycle\n\n1. **Startup.** The user invokes the program: `./foo subcommand [args]`. The nexus binary loads its manifest from `./foo.manifest`.\n\n2. **CLI parsing.** The nexus parses command-line arguments according to the manifest's argument definitions for the named subcommand. Argument types, defaults, and help text are all manifest-driven.\n\n3. **Pool startup.** The nexus starts the pool processes required for the subcommand. Each pool is a separate OS process communicating via a Unix domain socket. Only pools listed in the command's `needed_pools` are started.\n\n4. **Argument serialization.** The nexus serializes parsed arguments into msgpack format using the argument schemas from the manifest.\n\n5. **Dispatch.** The nexus sends the serialized arguments to the appropriate pool, identified by the command's pool index and manifold ID.\n\n6. **Result handling.** The nexus receives the serialized result from the pool, deserializes it, and prints it to stdout.\n\n7. **Shutdown.** After the command completes, the nexus terminates all pool processes and exits.\n\n## Pure Commands\n\nSome commands are *pure*: they require no pool and are evaluated entirely within the nexus using a built-in expression evaluator. Pure commands have an expression tree in the manifest instead of a pool reference. The nexus evaluates the tree directly, supporting literals, function application, lambda expressions, and string interpolation.\n\n## Pool Process Management\n\nEach pool is a long-running process that:\n\n1. Starts up and loads its generated code (imports for Python/R, compiled code for C++).\n2. Opens a Unix domain socket and waits for connections.\n3. On each request: deserializes arguments, dispatches to the appropriate function by manifold ID, serializes the result, and sends it back.\n4. Exits when the nexus closes the connection or sends a termination signal.\n\nPools are created in a temporary directory (`/tmp/morloc.XXXXXX/`) with socket files named by language (e.g., `pipe-py`, `pipe-cpp`).\n\n## Worker Dispatch Strategy\n\nEach pool forks multiple worker processes (typically `nproc - 1`) to handle requests concurrently. The dispatcher must route incoming client connections to an available worker.\n\n### Shared Queue (Current Approach)\n\nThe Python and R pools use a single Unix socketpair as a shared job queue. The dispatcher writes client file descriptors (via `SCM_RIGHTS` / `sendmsg`) to one end; all workers block on `recvmsg` on the other end. The kernel delivers each fd to exactly one waiting worker.\n\nThis design handles re-entrant callbacks correctly. When a worker makes a `foreign_call` to another pool and that pool calls back, the callback arrives as a new client connection. Since the blocked worker never calls `recvmsg`, the callback is picked up by an idle worker. Busy workers are invisible to the dispatch mechanism.\n\nThe C++ pool achieves equivalent semantics using a shared queue protected by a mutex and condition variable, with threads instead of processes.\n\n### Why Not Round-Robin\n\nA naive round-robin dispatcher assigns connections to workers in fixed order (W0, W1, ..., W0, ...). This works for unidirectional calls but deadlocks under re-entrant callbacks:\n\n1. Worker W0 receives a job and makes a `foreign_call` to Pool B.\n2. Pool B processes the call and issues callbacks back to Pool A. Each callback is a new connection.\n3. The round-robin dispatcher sends callbacks to W1, W2, ..., Wn in order.\n4. When all N workers are blocked in `foreign_call`, the (N+1)th callback wraps around to W0 -- which is still blocked. The callback sits unread in W0's pipe while Pool B waits for the response.\n\nThis circular dependency hangs all workers. The failure threshold is exactly N simultaneous bidirectional calls where N equals the worker count.\n\n### Depth Limitation\n\nEven with the shared queue, deep cross-language call chains are limited by the worker count. Each hop in a chain like `rId (pyId (rId (pyId ...)))` blocks a worker until the deeper computation returns. A depth-D chain (D alternating cross-language calls) requires roughly D/2 workers in each pool simultaneously. Chains deeper than `2 * (nproc - 1)` will deadlock due to worker exhaustion.\n\n## Dispatch Flow\n\n```\nUser CLI input\n  --> Nexus: parse args, serialize\n  --> Socket: send to pool\n  --> Pool: deserialize, call function\n  --> Pool: serialize result\n  --> Socket: send to nexus\n  --> Nexus: deserialize, print\n```\n\nFor cross-language function calls *within* the same command, the pool-to-pool path goes through the nexus:\n\n```\nPool A: serialize result --> Nexus --> Pool B: deserialize, call, serialize --> Nexus --> Pool A\n```\n\n## Error Propagation\n\nErrors at any stage propagate back to the user:\n\n- **CLI parse errors**: the nexus prints usage information and exits.\n- **Pool startup failure**: the nexus reports which pool failed to start.\n- **Function execution errors**: the pool sends an error response (status code 1) with an error message; the nexus prints the message and exits with a non-zero code.\n- **Communication errors**: socket failures or unexpected disconnections cause the nexus to report the failure and terminate.\n\n## Resource Cleanup\n\nThe nexus is responsible for cleaning up all resources:\n\n- Terminating pool processes (via signals)\n- Removing Unix domain socket files\n- Removing the temporary directory\n"
  },
  {
    "path": "spec/runtime/ipc.md",
    "content": "# Inter-Process Communication\n\nThe nexus and pools communicate over Unix domain sockets using a binary message protocol built on msgpack.\n\n## Transport\n\nEach pool opens a Unix domain socket in the program's temporary directory:\n\n```\n/tmp/morloc.XXXXXX/pipe-py\n/tmp/morloc.XXXXXX/pipe-cpp\n/tmp/morloc.XXXXXX/pipe-r\n```\n\nThe nexus connects to these sockets to send requests and receive responses. Each socket carries bidirectional traffic for one pool.\n\n## Request Format\n\nA request from the nexus to a pool is a msgpack-encoded structure:\n\n```\n{\n  function_id: <int>,         -- manifold ID identifying the function\n  args: [<msgpack>, ...]      -- serialized arguments\n}\n```\n\nThe `function_id` is an integer assigned by the compiler. Each function in a pool has a unique manifold ID. The pool uses this ID to dispatch to the correct function.\n\nArguments are pre-serialized by the nexus according to the argument schemas in the manifest. Each argument is an opaque msgpack byte sequence from the pool's perspective until it deserializes with the expected schema.\n\n## Response Format\n\nA response from a pool to the nexus:\n\n```\n{\n  status: <int>,              -- 0 = success, 1 = error\n  result: <msgpack>           -- serialized return value or error message\n}\n```\n\nOn success, `result` contains the function's return value serialized according to the return schema. On error, `result` contains a string error message.\n\n## Data Flow\n\n### Simple Command\n\n```\nNexus                              Pool\n  |                                  |\n  |-- request(mid=3, args=[...]) --> |\n  |                                  | deserialize args\n  |                                  | call function #3\n  |                                  | serialize result\n  | <-- response(status=0, ...) --   |\n  |                                  |\n```\n\n### Cross-Language Call\n\nWhen function A (in Pool X) calls function B (in Pool Y):\n\n```\nNexus                Pool X              Pool Y\n  |                    |                   |\n  |-- request -------> |                   |\n  |                    | call A            |\n  |                    | A needs B         |\n  | <-- call B ------- |                   |\n  |-- request(B) -----------------------> |\n  |                                        | call B\n  | <-- response(B) --------------------- |\n  |-- response(B) --> |                   |\n  |                    | A continues      |\n  | <-- response(A) - |                   |\n```\n\nThe nexus mediates all cross-pool communication. Pools never communicate directly with each other.\n\n## Packet Format\n\nMessages are framed as length-prefixed packets:\n\n1. **Length header**: 4-byte big-endian unsigned integer specifying the payload size.\n2. **Payload**: msgpack-encoded request or response.\n\nThis framing allows the receiver to read exactly the right number of bytes for each message.\n\n## Connection Lifecycle\n\n1. The nexus starts a pool process.\n2. The pool creates a Unix socket and begins listening.\n3. The nexus connects to the socket.\n4. Request/response pairs are exchanged.\n5. On completion, the nexus closes the connection.\n6. The pool detects the closed connection and exits.\n"
  },
  {
    "path": "spec/runtime/manifest.md",
    "content": "# Manifest Schema\n\nThe manifest is a JSON file that drives the nexus at runtime. It describes the pools, commands, argument schemas, and pure expression trees for one compiled morloc program.\n\n## Top-Level Structure\n\n```json\n{\n  \"version\": 1,\n  \"pools\": [ ... ],\n  \"commands\": [ ... ]\n}\n```\n\n## Pool Definitions\n\nEach pool entry describes a language-specific worker process:\n\n```json\n{\n  \"lang\": \"cpp\",                   -- language identifier\n  \"exec\": [\"./pool-cpp.out\"],      -- command to start the pool\n  \"socket\": \"pipe-cpp\"             -- Unix socket basename\n}\n```\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `lang` | string | Language name (`\"cpp\"`, `\"python3\"`, `\"r\"`) |\n| `exec` | [string] | Command and arguments to start the pool |\n| `socket` | string | Socket filename (created in temp directory) |\n\n## Command Definitions\n\nEach command corresponds to an exported function. Commands come in two forms.\n\n### Remote Commands\n\nA remote command dispatches to a pool:\n\n```json\n{\n  \"name\": \"foo\",\n  \"type\": \"remote\",\n  \"mid\": 1,                        -- manifold ID for dispatch\n  \"pool\": 0,                       -- index into pools array\n  \"needed_pools\": [0],             -- pool indices to start\n  \"arg_schemas\": [\"<double>f8\"],   -- msgpack schemas per argument\n  \"return_schema\": \"<double>f8\",   -- msgpack schema for return value\n  \"desc\": [\"description\"],         -- docstring lines\n  \"return_type\": \"Real\",           -- display type name\n  \"return_desc\": [],               -- return value documentation\n  \"args\": [ ... ]                  -- argument definitions\n}\n```\n\n### Pure Commands\n\nA pure command is evaluated by the nexus without any pool:\n\n```json\n{\n  \"name\": \"greeting\",\n  \"type\": \"pure\",\n  \"arg_schemas\": [\"s\"],\n  \"return_schema\": \"s\",\n  \"desc\": [],\n  \"return_type\": \"Str\",\n  \"return_desc\": [],\n  \"args\": [ ... ],\n  \"expr\": { ... }                  -- expression tree\n}\n```\n\n## Argument Definitions\n\n### Positional\n\n```json\n{\n  \"kind\": \"pos\",\n  \"metavar\": \"X\",\n  \"type_desc\": \"Real\",\n  \"quoted\": false,\n  \"desc\": []\n}\n```\n\n### Optional\n\n```json\n{\n  \"kind\": \"opt\",\n  \"metavar\": \"N\",\n  \"type_desc\": \"Int\",\n  \"quoted\": false,\n  \"short\": \"n\",\n  \"long\": \"count\",\n  \"default\": \"1\",\n  \"desc\": []\n}\n```\n\n### Flag\n\n```json\n{\n  \"kind\": \"flag\",\n  \"short\": \"v\",\n  \"long\": \"verbose\",\n  \"long_rev\": \"no-verbose\",\n  \"default\": \"false\",\n  \"desc\": []\n}\n```\n\n### Group (Record Argument)\n\n```json\n{\n  \"kind\": \"grp\",\n  \"metavar\": \"CONFIG\",\n  \"desc\": [],\n  \"group_opt\": {\"short\": null, \"long\": \"config\"},\n  \"entries\": [\n    {\"key\": \"x\", \"arg\": { ... }}\n  ]\n}\n```\n\n## Expression Tree\n\nPure commands contain an expression tree that the nexus evaluates directly. Node types:\n\n| Tag | Fields | Description |\n|-----|--------|-------------|\n| `lit` | `schema`, `lit_type`, `value` | Numeric/boolean literal |\n| `str` | `schema`, `value` | String literal |\n| `container` | `schema`, `elements` | List or tuple |\n| `app` | `schema`, `func`, `args` | Function application |\n| `lambda` | `vars`, `body` | Lambda abstraction |\n| `bound` | `schema`, `var` | Bound variable reference |\n| `interpolation` | `schema`, `strings` | String interpolation |\n| `pattern` | `schema`, `pattern` | Accessor/setter pattern |\n\n### Selector Patterns\n\nPatterns use selectors for structural access:\n\n```json\n{\"type\": \"end\"}                                            -- terminal\n{\"type\": \"idx\", \"selectors\": [{\"index\": 0, \"sub\": ...}]}  -- tuple index\n{\"type\": \"key\", \"selectors\": [{\"key\": \"name\", \"sub\": ...}]} -- record key\n```\n"
  },
  {
    "path": "spec/types/TYPES.md",
    "content": "# Type System\n\nMorloc employs a two-level type system that mediates between a language-agnostic *general* type layer and language-specific *concrete* type layers. This design enables the compiler to verify cross-language composition statically: a function's general signature is checked once, while its concrete realizations are checked per language.\n\n## Two-Level Design\n\n**General types** (`TypeU`) are the programmer-facing types: `Int`, `Real`, `[a]`, `a -> b`. They carry no language-specific information. Type checking, inference, and subtyping operate at this level.\n\n**Concrete types** (`Type`) are language-specific: Python's `int`, C++'s `std::vector<int>`, R's `integer`. The compiler resolves general types to concrete types through type declarations (e.g., `type Py => Int = \"int\"`). Concrete types determine serialization format and foreign function signatures.\n\nA third, minimal representation (`TypeF`) is used at code generation boundaries where only structural shape matters.\n\n## Role in Cross-Language Safety\n\nWhen a Python function's output feeds into a C++ function's input, the compiler verifies:\n\n1. Both functions share a compatible general type at the boundary.\n2. Each side has a concrete type mapping for that general type.\n3. A serialization schema exists to convert between the concrete representations.\n\nIf any check fails, the program is rejected at compile time.\n\n## Judgment Forms\n\nThe type system uses bidirectional type checking with two primary judgments:\n\n- **Synthesis**: given an expression, infer its type.\n- **Checking**: given an expression and an expected type, verify compatibility.\n\nSubtyping connects the two: a synthesized type may be a subtype of the expected type. See [[inference.md]] and [[subtyping.md]].\n\n## Subfiles\n\n- [[primitive-types.md]] -- Built-in types and sized variants\n- [[type-declarations.md]] -- Type aliases and language-specific mappings\n- [[records.md]] -- Record, object, and table types\n- [[typeclasses.md]] -- Typeclass declarations and instances\n- [[polymorphism.md]] -- Parametric polymorphism and quantification\n- [[subtyping.md]] -- Subtyping rules and instantiation\n- [[inference.md]] -- Bidirectional type inference\n"
  },
  {
    "path": "spec/types/inference.md",
    "content": "# Type Inference\n\nMorloc uses bidirectional type checking based on the Dunfield-Krishnaswami algorithm (\"Complete and Easy Bidirectional Typechecking for Higher-Rank Polymorphism\", 2013). This provides sound and complete type inference for higher-rank polymorphism without requiring type annotations on lambda parameters.\n\n## Judgment Forms\n\n### Synthesis\n\n```\nGamma |- e => A -| Delta\n```\n\nGiven context Gamma and expression `e`, infer type `A` and produce updated context Delta.\n\nSynthesis applies to:\n- **Literals**: synthesize the literal's type (e.g., `42 => Int`)\n- **Variables**: look up the type in the context\n- **Applications**: synthesize the function type, then check arguments\n- **Annotations**: use the declared type\n\n### Checking\n\n```\nGamma |- e <= A -| Delta\n```\n\nGiven context Gamma, expression `e`, and expected type `A`, verify that `e` has type `A` and produce updated context Delta.\n\nChecking applies to:\n- **Lambdas with known argument types**: push argument types into the context, check the body\n- **Expressions with type annotations**: verify the annotation matches\n\n### The Sub Rule\n\nWhen checking, if no specific checking rule applies, the checker falls back to synthesis plus subtyping:\n\n```\n  Gamma |- e => A -| Theta       Theta |- [Theta]A <: [Theta]B -| Delta\n  -----------------------------------------------------------------------\n                      Gamma |- e <= B -| Delta\n```\n\nThis bridges synthesis and checking: synthesize the actual type, then verify it is a subtype of the expected type.\n\n## Context\n\nThe context (Gamma) is an **ordered list** of bindings. Order matters: variables can only reference bindings that appear earlier.\n\n| Entry | Meaning |\n|-------|---------|\n| Universal variable marker | A type variable is in scope |\n| Term annotation `x : A` | Variable `x` has type `A` |\n| Unsolved existential `a-hat` | An unknown type, to be solved |\n| Solved existential `a-hat = tau` | Existential resolved to `tau` |\n| Scope marker | Boundary for cutting |\n\n### Context Operations\n\n- **Extend**: add a binding to the end of the context\n- **Apply**: substitute all solved existentials in a type\n- **Cut**: remove all bindings after a scope marker (used when exiting a quantifier scope)\n- **Lookup**: find a variable's type or an existential's solution\n- **Fresh variable**: generate a new existential and add it to the context\n\n### Monotonicity\n\nOnce an existential is solved, it remains solved. Later operations can only add more solutions, never retract them. This property enables batched processing: the compiler can process multiple function arguments before applying the accumulated solutions.\n\n## Type Checking Strategy\n\n### Literals\n\nLiterals synthesize their obvious type: integers as `Int`, floating-point as `Real`, strings as `Str`, booleans as `Bool`.\n\n### Variables\n\nA variable synthesizes the type recorded in the context at its binding site.\n\n### Lambda Expressions\n\nWhen a lambda is *checked* against a function type `A -> B`, the parameter is bound to type `A` and the body is checked against `B`.\n\nWhen a lambda is *synthesized* (no expected type), fresh existentials are created for parameters. The body is synthesized, and if it returns a function type, the lambda is eta-expanded to make all arguments explicit.\n\n### Application\n\nTo synthesize `f x`:\n\n1. Synthesize the type of `f`, yielding `A`.\n2. Apply the context to `A`.\n3. If `A` is a function type `A1 -> A2`, check `x` against `A1`; the result type is `A2`.\n4. If `A` is a universal `forall a. B`, instantiate `a` with a fresh existential and retry.\n5. If `A` is an existential, decompose it into a function existential and retry.\n\n### Where Clauses\n\nBindings in a `where` clause are type-checked as local definitions. Each binding's type is synthesized and added to the context before the main expression is checked.\n\n## Occurs Check\n\nBefore solving an existential `a-hat = tau`, the checker verifies that `a-hat` does not appear free in `tau`. This prevents infinite types (e.g., `a = [a]`).\n\n## Performance\n\nTwo optimizations avoid exponential behavior:\n\n- **Batched function subtyping**: all arguments of a multi-argument function are subtype-checked before applying context updates, reducing O(n^2) to O(n).\n- **Direct eta expansion**: when a lambda body returns a function, the expanded form is constructed directly without re-synthesizing, reducing O(2^n) to O(n) for nested lambdas.\n"
  },
  {
    "path": "spec/types/polymorphism.md",
    "content": "# Polymorphism\n\nMorloc supports parametric polymorphism: functions and types may be parameterized over type variables that are instantiated at each use site.\n\n## Parametric Polymorphism\n\nA polymorphic function operates uniformly over all types. Type variables (lowercase identifiers) in a signature are implicitly universally quantified:\n\n```morloc\nid a :: a -> a\nmap a b :: (a -> b) -> [a] -> [b]\nfst a b :: (a, b) -> a\n```\n\nThe type variable names after the function name and before `::` make the quantification explicit: `map a b :: ...` means \"for all types `a` and `b`, ...\".\n\n## Universal Quantification\n\nUniversal quantification (`forall`) is the standard form of polymorphism. A universally quantified type promises that the function works for *any* instantiation of the type variable:\n\n```\nforall a. a -> a\n```\n\nIn morloc syntax, universal quantification is implicit in type signatures. The type variables listed between the function name and `::` are universally quantified.\n\n## Existential Quantification\n\nExistential types arise internally during type inference. An existential variable (`a-hat`) represents an unknown type that the checker must solve. The programmer does not write existentials directly.\n\nExistentials may carry constraints:\n- **Type parameter constraints**: the existential is known to be parameterized (e.g., `a-hat` applied to some types).\n- **Record constraints**: the existential is known to have certain fields.\n- **Openness**: open existentials may acquire more constraints; closed existentials are fully determined.\n\n## Higher-Rank Types\n\nMorloc's type system supports higher-rank polymorphism, where polymorphic types may appear in argument positions:\n\n```\n(forall a. a -> a) -> Int\n```\n\nThis type requires a function that is polymorphic -- not merely a function at some specific type. The bidirectional type checker handles higher-rank types through its synthesis/checking discipline: polymorphic arguments are checked, not synthesized.\n\n## Multi-Arity Functions\n\nMorloc normalizes curried functions to multi-argument form internally:\n\n```\na -> b -> c    normalizes to    FunU [a, b] c\n```\n\nThis does not affect the surface syntax, where `->` is right-associative as expected. The normalization is a compiler optimization that simplifies type checking for multi-argument functions.\n\n## Eta Expansion\n\nWhen a lambda body returns a function type, the compiler eta-expands:\n\n```morloc\n\\x -> f         -- where f : a -> b\n-- becomes\n\\x y -> f y     -- the hidden argument is made explicit\n```\n\nThis ensures that all functions are fully applied in the generated code, which is necessary for correct cross-language dispatch.\n"
  },
  {
    "path": "spec/types/primitive-types.md",
    "content": "# Primitive Types\n\nMorloc provides a set of built-in types that map to native representations across all supported languages.\n\n## Core Types\n\n| Type | Description |\n|------|-------------|\n| `Unit` | The unit type (no meaningful value) |\n| `Bool` | Boolean (`True` or `False`) |\n| `Int` | Default-width signed integer |\n| `Real` | Default-width floating-point number |\n| `Str` | Unicode string |\n\n## Sized Integer Types\n\n| Type | Width | Signedness |\n|------|-------|------------|\n| `Int8` | 8-bit | Signed |\n| `Int16` | 16-bit | Signed |\n| `Int32` | 32-bit | Signed |\n| `Int64` | 64-bit | Signed |\n| `UInt8` | 8-bit | Unsigned |\n| `UInt16` | 16-bit | Unsigned |\n| `UInt32` | 32-bit | Unsigned |\n| `UInt64` | 64-bit | Unsigned |\n\nThe unsized `Int` type is equivalent to the platform's default integer width. For serialization, `Int` uses the msgpack `i8` (64-bit signed) schema.\n\n## Sized Floating-Point Types\n\n| Type | Precision |\n|------|-----------|\n| `Float32` | IEEE 754 single precision |\n| `Float64` | IEEE 754 double precision |\n\nThe unsized `Real` type is equivalent to `Float64`.\n\n## Collection Types\n\n| Type | Description |\n|------|-------------|\n| `List a` or `[a]` | Homogeneous ordered collection |\n| `Tuple` | Fixed-length heterogeneous product (e.g., `(Int, Str)`) |\n\nList syntax `[a]` is sugar for `List a`.\n\nTuple types are written with parentheses and commas: `(Int, Str, Bool)`.\n\n## Cross-Language Mapping\n\nEach primitive type must have a concrete mapping for every language in which it is used. The standard library provides these mappings:\n\n| Morloc | Python | C++ | R |\n|--------|--------|-----|---|\n| `Bool` | `bool` | `bool` | `logical` |\n| `Int` | `int` | `int` | `integer` |\n| `Real` | `float` | `double` | `numeric` |\n| `Str` | `str` | `std::string` | `character` |\n| `[a]` | `list` | `std::vector<A>` | `list` |\n\nSee [[type-declarations.md]] for how these mappings are declared and [[../interop/type-mappings.md]] for the complete mapping tables.\n"
  },
  {
    "path": "spec/types/records.md",
    "content": "# Records\n\nRecords are named product types with labeled fields. Morloc provides three record-like forms: `record`, `object`, and `table`.\n\n## Record Declaration\n\nA `record` declares a type with named fields:\n\n```morloc\nrecord Person where\n  name :: Str\n  age :: Int\n```\n\nThis introduces the type `Person` with fields `name` (of type `Str`) and `age` (of type `Int`).\n\n## Object and Table Declarations\n\n`object` and `table` are variants of `record` that convey intended semantics:\n\n- **`object`** -- a record representing an opaque object (fields may include functions)\n- **`table`** -- a record representing columnar/tabular data\n\nAll three forms have identical syntax and structural behavior. The distinction is advisory: it informs serialization strategy and documentation but does not affect type checking.\n\n## Language-Specific Mappings\n\nRecords require concrete type mappings, just like other types:\n\n```morloc\nrecord Py => Person = \"dict\"\nrecord Cpp => Person = \"Person\"\nrecord R => Person = \"list\"\n```\n\nIn C++, the compiler generates a `struct` definition. In Python and R, records map to dictionaries and named lists, respectively.\n\n## Construction\n\nRecords are constructed with brace syntax, binding field names to values:\n\n```morloc\nalice = {name = \"Alice\", age = 27}\n```\n\nAll fields must be provided at construction.\n\n## Field Access\n\nThe `.` operator in prefix position extracts a field:\n\n```morloc\n.name alice       -- \"Alice\"\n.age alice        -- 27\n```\n\nField accessors are first-class functions and may be composed or passed as arguments:\n\n```morloc\nnames = map .name people\n```\n\n## Records with Function Fields\n\nRecord fields may have function types:\n\n```morloc\nrecord Tools where\n  f :: Int -> Int\n  g :: Bool -> Int\n\ntools = {f = add 1, g = \\x -> if x then 1 else 0}\n.f tools 5       -- 6\n```\n\n## Parameterized Records\n\nRecords may be parameterized:\n\n```morloc\nrecord Pair a b where\n  fst :: a\n  snd :: b\n```\n\n## Serialization\n\nWhen records cross language boundaries, they are serialized as msgpack maps (keyed by field name) or as positional tuples, depending on the target language's conventions. The compiler inserts appropriate pack/unpack operations. See [[../interop/serialization.md]].\n"
  },
  {
    "path": "spec/types/subtyping.md",
    "content": "# Subtyping\n\nMorloc's subtyping relation captures when one type is *more polymorphic* than another. This is not subtyping in the object-oriented sense; it governs how polymorphic types may be used where less polymorphic types are expected.\n\n## Subtyping Judgment\n\nThe judgment `A <: B` means \"A is at least as polymorphic as B\" -- any value of type A can be used where a value of type B is expected.\n\n## Rules\n\n### Reflexivity\n\nA type is a subtype of itself:\n\n```\nA <: A\n```\n\n### Function Types (Contravariant Arguments)\n\nFunction subtyping reverses the direction for arguments:\n\n```\n  B1 <: A1       A2 <: B2\n  -------------------------\n  A1 -> A2  <:  B1 -> B2\n```\n\nArguments are contravariant: if `f : A1 -> A2` is used where `g : B1 -> B2` is expected, then `f` must accept a *wider* range of inputs (`B1 <: A1`) and produce a *narrower* range of outputs (`A2 <: B2`).\n\nFor multi-argument functions, each argument is checked contravariantly and the return type covariantly.\n\n### Universal Quantification (Left)\n\nA universally quantified type on the left is instantiated with a fresh existential:\n\n```\n  [a-hat/a]A <: B\n  -----------------\n  forall a. A <: B\n```\n\nThis means a polymorphic type can be used where a specific type is expected, by choosing an appropriate instantiation.\n\n### Universal Quantification (Right)\n\nA universally quantified type on the right requires the left side to work for all instantiations:\n\n```\n  A <: B            (a is fresh)\n  ----------------\n  A <: forall a. B\n```\n\n### Existential Instantiation\n\nWhen an unsolved existential variable meets a concrete type, the subtyping relation solves the existential:\n\n```\n  a-hat not in FV(A)       A <=: a-hat\n  --------------------------------------\n           a-hat <: A\n```\n\nThe `<=:` (instantiation) relation handles the mechanics of solving existentials. See [[#instantiation]].\n\n## Instantiation\n\nThe instantiation judgment `A <=: a-hat` (or `a-hat <=: A`) solves the existential variable `a-hat` to a specific type.\n\n### Solve\n\nIf A is a monotype (no quantifiers, no unsolved existentials):\n\n```\n  a-hat <=: tau     solves as     a-hat = tau\n```\n\nSubject to the occurs check: `a-hat` must not appear in `tau` (prevents infinite types).\n\n### Function Decomposition\n\nIf the existential must be a function type:\n\n```\n  a-hat = a-hat1 -> a-hat2       A1 <=: a-hat1       a-hat2 <=: A2\n  -------------------------------------------------------------------\n                        A1 -> A2 <=: a-hat\n```\n\nThe existential is decomposed into fresh existentials for the argument and return types.\n\n## Context Threading\n\nSubtyping and instantiation thread a context (Gamma) through the judgment. Each step may add solved existentials to the context, which subsequent steps can observe. The context is an ordered list; variables can only reference bindings earlier in the list. See [[inference.md]] for context operations.\n"
  },
  {
    "path": "spec/types/type-declarations.md",
    "content": "# Type Declarations\n\nType declarations establish the relationship between morloc's general types and their language-specific concrete representations.\n\n## General Type Aliases\n\nA type alias introduces a new name for an existing type expression:\n\n```morloc\ntype Filename = Str\ntype Matrix a = [[a]]\ntype Pair a b = (a, b)\n```\n\nAliases are expanded during type resolution. They do not create new types.\n\n## Language-Specific Type Declarations\n\nThe `type Lang => Name = \"concrete\" params` form declares how a general type maps to a concrete type string in a specific language:\n\n```morloc\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\ntype R => Int = \"integer\"\n```\n\nFor parameterized types, type parameters appear after the concrete string. Positional references (`$1`, `$2`, ...) in the string are substituted with the rendered concrete type parameters:\n\n```morloc\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Cpp => Map k v = \"std::map<$1,$2>\" k v\n```\n\nHere, `List Int` in C++ becomes `std::vector<int>`, and `Map Str Int` becomes `std::map<std::string,int>`.\n\nFor languages without parameterized type syntax, parameters are listed but ignored in the string:\n\n```morloc\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\n```\n\n## Terminal vs. Non-Terminal Types\n\nA type is **terminal** if it should not be further reduced during type evaluation. Concrete language types (those declared with `type Lang => ...`) are terminal -- the compiler stops resolving once it reaches them.\n\nA type is **non-terminal** if it is an alias that should be expanded. General type aliases (`type Name = ...`) are non-terminal.\n\nThis distinction matters during type evaluation: the compiler repeatedly applies type aliases until it reaches a terminal type or a fixed point.\n\n## Type Evaluation\n\nType evaluation resolves a general type to its concrete representation for a given language through the following process:\n\n1. Look up the type name in the scope.\n2. If a general alias exists and is non-terminal, substitute and recurse.\n3. If a language-specific mapping exists, substitute parameters and return the concrete type.\n4. If no mapping is found, report an error (the type cannot be realized in that language).\n\n## Scope\n\nType declarations accumulate in a scope: a mapping from type names to their definitions. Each definition records the parameters, the resolved type, and whether it is terminal. Multiple definitions for the same name (at different arities or for different languages) coexist in the scope.\n"
  },
  {
    "path": "spec/types/typeclasses.md",
    "content": "# Typeclasses\n\nTypeclasses define families of types that share a common interface. They enable ad-hoc polymorphism: the same function name can have different implementations depending on the type at which it is used.\n\n## Class Declaration\n\nA typeclass is declared with the `class` keyword, listing its methods and their signatures:\n\n```morloc\nclass Monoid a where\n  empty a :: a\n  op a :: a -> a -> a\n```\n\nThe type variable `a` after the method name indicates which type the method is parameterized over. Each method's type signature may reference `a` and other type variables.\n\n```morloc\nclass Eq a where\n  eq a :: a -> a -> Bool\n```\n\n## Instance Declaration\n\nAn instance provides implementations for a typeclass at a specific type:\n\n```morloc\ninstance Monoid Int where\n  empty = 0\n  source Cpp from \"monoid.hpp\" (\"addInt\" as op)\n  source Py from \"monoid.py\" (\"addInt\" as op)\n```\n\nInstance methods may be defined as:\n- Pure morloc expressions (e.g., `empty = 0`)\n- Foreign function bindings (e.g., `source Cpp from ... (\"fn\" as method)`)\n- References to existing functions\n\n```morloc\ninstance Eq Int where\n  eq = Int::eq\n\ninstance Eq Str where\n  eq = Str::eq\n```\n\n## Constraint Resolution\n\nWhen a function uses a typeclass method, the compiler resolves which instance to apply based on the concrete type at the call site:\n\n```morloc\nfold a b :: (b -> a -> b) -> b -> [a] -> b\n\nsum :: [Int] -> Int\nsum = fold op empty\n```\n\nHere, `op` and `empty` resolve to the `Monoid Int` instance. The compiler statically selects the appropriate implementation for each language.\n\n## Constraints in Signatures\n\nTypeclass constraints restrict the types at which a polymorphic function may be used. In morloc, constraints are currently resolved implicitly by the type checker rather than declared explicitly in signatures. The checker verifies that all typeclass methods used in a function body have instances available for the inferred type.\n\n## Multi-Language Instances\n\nA single instance may provide implementations in multiple languages:\n\n```morloc\ninstance Monoid Int where\n  empty = 0\n  source Py from \"monoid.py\" (\"addInt\" as op)\n  source Cpp from \"monoid.hpp\" (\"addInt\" as op)\n  source R from \"monoid.R\" (\"addInt\" as op)\n```\n\nThe compiler selects the language-specific implementation during realization, following the same rules as for ordinary foreign functions. See [[../interop/implementation-selection.md]].\n"
  },
  {
    "path": "stack.yaml",
    "content": "# This file was automatically generated by 'stack init'\n#\n# Some commonly used options have been documented as comments in this file.\n# For advanced use and comprehensive documentation of the format, please see:\n# https://docs.haskellstack.org/en/stable/yaml_configuration/\n\n# Resolver to choose a 'specific' stackage snapshot or a compiler version.\n# A snapshot resolver dictates the compiler version and the set of packages\n# to be used for project dependencies. For example:\n#\n# resolver: lts-3.5\n# resolver: nightly-2015-09-21\n# resolver: ghc-7.10.2\n# resolver: ghcjs-0.1.0_ghc-7.10.2\n# resolver:\n#  name: custom-snapshot\n#  location: \"./custom-snapshot.yaml\"\nresolver: lts-22.44\n\n# User packages to be built.\n# Various formats can be used as shown in the example below.\n#\n# packages:\n# - some-directory\n# - https://example.com/foo/bar/baz-0.0.2.tar.gz\n# - location:\n#    git: https://github.com/commercialhaskell/stack.git\n#    commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a\n# - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a\n#   extra-dep: true\n#  subdirs:\n#  - auto-update\n#  - wai\n#\n# A package marked 'extra-dep: true' will only be built if demanded by a\n# non-dependency (i.e. a user package), and its test suites and benchmarks\n# will not be run. This is useful for tweaking upstream packages.\npackages:\n- .\n\nbuild:\n  test: true\n# # Dependency packages to be pulled from upstream that are not in the resolver\n# # (e.g., acme-missiles-0.3)\n# extra-deps:\n#  - partial-order-0.2.0.0@sha256:a0d6ddc9ebcfa965a5cbcff1d06d46a79d44ea5a0335c583c2a51bcb41334487,2275\n#  - containers-0.6.8@sha256:bb2bec1bbc6b39a7c97cd95e056a5698ec45beb5d8feb6caae12af64e4bd823c,2670\n#  - binary-0.8.9.3@sha256:8b03c7fd5a7f6803280fba87e38d534beb1dc92fec975de5bd36200633996ef2,6576\n#  - parsec-3.1.18.0@sha256:dfbb9835b8abc966b6bbd34340ef5122227b4cf4480062b85ca4c4704f054f98,4535\n#  - text-2.1.3@sha256:5094b1264f717da458c5fa6690ca5eea90e568e464e33a6defeddeb7810b8053,11202\n\n# Override default flag values for local packages and extra-deps\n# flags: {}\n\n# Extra package databases containing global packages\n# extra-package-dbs: []\n\n# Control whether we use the GHC we find on the path\n# system-ghc: true\n#\n# Require a specific version of stack, using version ranges\n# require-stack-version: -any # Default\n# require-stack-version: \">=1.6\"\n#\n# Override the architecture used by stack, especially useful on Windows\n# arch: i386\n# arch: x86_64\n#\n# Extra directories used by stack for building\n# extra-include-dirs: [/path/to/dir]\n# extra-lib-dirs: [/path/to/dir]\n#\n# Allow a newer minor version of GHC than the snapshot specifies\n# compiler-check: newer-minor\n"
  },
  {
    "path": "test-suite/.gitignore",
    "content": "stack.yaml.lock\ntags\n*.hi\n*.o\n.stack-work/\n.history\nmorloc.cabal\n*.out\nrun/\n.idea/\n*.iml\npool.*\nnexus.*\nz/\nz\nzzz\nzzz*\nz.*\n# ignore debugging files\n*.aux\n*.hp\n*.prof\n*.ps\n"
  },
  {
    "path": "test-suite/GoldenMakefileTests.hs",
    "content": "{- |\nModule      : GoldenMakefileTests\nDescription : Run golden tests that build and execute full morloc programs\n-}\nmodule GoldenMakefileTests\n  ( goldenMakefileTest\n  ) where\n\nimport qualified System.Directory as SD\nimport qualified System.IO as SI\nimport qualified System.Process as SP\nimport Test.Tasty\nimport Test.Tasty.Golden\n\ngoldenMakefileTest :: String -> String -> TestTree\ngoldenMakefileTest msg testdir =\n  let dir = testdir\n      expFile = testdir ++ \"/exp.txt\"\n      obsFile = testdir ++ \"/obs.txt\"\n   in goldenVsFile\n        msg\n        expFile\n        obsFile\n        (makeManifoldFile dir)\n\nmakeManifoldFile :: String -> IO ()\nmakeManifoldFile path = do\n  abspath <- SD.makeAbsolute path\n  devnull <- SI.openFile \"/dev/null\" SI.WriteMode\n  _ <-\n    SP.runProcess\n      \"make\" -- command\n      [\"-C\", abspath, \"--quiet\"] -- arguments\n      Nothing -- optional path to working diretory\n      Nothing -- optional environment\n      Nothing -- stdin handle\n      (Just devnull) -- stdout handle\n      (Just devnull) -- stderr handle\n      >>= SP.waitForProcess\n\n  SP.callProcess \"make\" [\"-C\", abspath, \"--quiet\", \"clean\"]\n"
  },
  {
    "path": "test-suite/Main.hs",
    "content": "-- \\|\n-- Module      : Main\n-- Description : Test suite entry point combining unit, property, and golden tests\nimport qualified System.Directory as SD\nimport Test.Tasty\n\nimport GoldenMakefileTests (goldenMakefileTest)\nimport PropertyTests (propertyTests)\nimport UnitTypeTests\n\nmain :: IO ()\nmain = do\n  wd <- SD.getCurrentDirectory >>= SD.makeAbsolute\n  let golden = \\msg f -> goldenMakefileTest msg (wd ++ \"/test-suite/golden-tests/\" ++ f)\n  defaultMain $\n    testGroup\n      \"Morloc tests\"\n      [ unitTypeTests\n      , unitValuecheckTests\n      , typeOrderTests\n      , typeAliasTests\n      , propertyTests\n      , whereTests\n      , orderInvarianceTests\n      , whitespaceTests\n      , infixOperatorTests\n      , substituteTVarTests\n      , subtypeTests\n      , complexityRegressionTests\n      , effectSubtypeTests\n      , effectSynthesisTests\n      , effectErrorTests\n      , namespaceErrorTests\n      , typeclassTests\n      , natErrorTests\n      , natArithTests\n      , natLabelTests\n      , natKindPromotionTests\n      , letBindingTests\n      , aliasConstructorTests\n\n      -- -- These tests pass locally and when I run the same container that I\n      -- -- use in github actions. Yet these tests freeze in an infinite loop\n      -- -- with no STDERR output on github. I have no idea why. But for now I'm\n      -- -- just going to comment them out. Rememver uncomment them on dev cycles\n      -- -- so that they are tested somewhere, at least.\n      -- , golden \"specialization-1-c\" \"specialization-1-c\"\n      -- , golden \"specialization-2-c\" \"specialization-2-c\"\n      -- , golden \"specialization-1-py - numpy\" \"specialization-1-py\"\n      -- , golden \"specialization-2-py - bytes/bytearray\" \"specialization-2-py\"\n      -- , golden \"specialization-1-r\" \"specialization-1-r\"\n\n      , golden \"multiprocessing-py-1\" \"multiprocessing-py-1\"\n\n      , -- bug regression tests from doc-agents code-tester (v0.74.0)\n        -- Each test asserts correct behavior; currently FAIL until bug is fixed\n        golden \"bug-load-type-infer\" \"bug-load-type-infer\"\n      , golden \"bug-intrinsic-schema-crash\" \"bug-intrinsic-schema-crash\"\n\n      , golden \"thunk-basic\" \"thunk-basic\"\n      , golden \"thunk-effects\" \"thunk-effects\"\n      , golden \"thunk-do\" \"thunk-do\"\n      , golden \"thunk-let\" \"thunk-let\"\n      , golden \"thunk-interop\" \"thunk-interop\"\n      , golden \"thunk-nullary-interop\" \"thunk-nullary-interop\"\n      , golden \"thunk-force\" \"thunk-force\"\n      , golden \"thunk-export\" \"thunk-export\"\n      , golden \"thunk-choose\" \"thunk-choose\"\n      , golden \"thunk-export-guard\" \"thunk-export-guard\"\n      , golden \"thunk-guard-cross\" \"thunk-guard-cross\"\n      , golden \"thunk-cross-force\" \"thunk-cross-force\"\n      , golden \"thunk-eval-forall\" \"thunk-eval-forall\"\n      , golden \"thunk-eval-hk\" \"thunk-eval-hk\"\n      , golden \"two-module\" \"two-module\"\n      , golden \"records-alias\" \"records-alias\"\n      , golden \"infix\" \"infix\"\n      , golden \"infix-import\" \"infix-import\"\n      , golden \"infix-generic\" \"infix-generic\"\n      , golden \"infix-polyglot\" \"infix-polyglot\"\n      , golden \"infix-typeclass-import\" \"infix-typeclass-import\"\n      , golden \"infix-typeclass-polyglot\" \"infix-typeclass-polyglot\"\n      , golden \"infix-typeclass-simple\" \"infix-typeclass-simple\"\n      , golden \"claude-test-1\" \"claude-test-1\"\n      , golden \"claude-test-2\" \"claude-test-2\"\n      , golden \"claude-test-3\" \"claude-test-3\"\n      , golden \"claude-test-4\" \"claude-test-4\"\n      -- , golden \"claude-test-5\" \"claude-test-5\"\n      , golden \"claude-test-6\" \"claude-test-6\"\n      , golden \"claude-test-7\" \"claude-test-7\"\n      , golden \"claude-test-8\" \"claude-test-8\"\n      , golden \"claude-test-9\" \"claude-test-9\"\n      , golden \"claude-test-10\" \"claude-test-10\"\n      , golden \"claude-test-11\" \"claude-test-11\"\n      , golden \"claude-test-12\" \"claude-test-12\"\n      , golden \"claude-test-13\" \"claude-test-13\"\n      , golden \"claude-test-14\" \"claude-test-14\"\n      , golden \"claude-test-15\" \"claude-test-15\"\n      , golden \"claude-test-16\" \"claude-test-16\"\n      , golden \"claude-test-17\" \"claude-test-17\"\n      , golden \"claude-test-18\" \"claude-test-18\"\n      , golden \"claude-test-19\" \"claude-test-19\"\n      , golden \"claude-test-20\" \"claude-test-20\"\n      , golden \"tensor-nat-labeled\" \"tensor-nat-labeled\"\n      , golden \"tensor-nat-basic\" \"tensor-nat-basic\"\n      , golden \"slurm-label-codegen\" \"slurm-label-codegen\"\n      , golden \"let-crosslang\" \"let-crosslang\"\n      , golden \"functional-data-1\" \"functional-data-1\"\n      , golden \"functional-data-2\" \"functional-data-2\"\n      , golden \"functional-data-3a\" \"functional-data-3a\"\n      , golden \"functional-data-3b\" \"functional-data-3b\"\n      , golden \"functional-data-3c\" \"functional-data-3c\"\n      , golden \"functional-data-3d-py\" \"functional-data-3d-py\"\n      , golden \"functional-data-3d-c\" \"functional-data-3d-c\"\n      , golden \"functional-data-3d-r\" \"functional-data-3d-r\"\n      , golden \"functional-data-3e\" \"functional-data-3e\"\n      , golden \"functional-data-3f\" \"functional-data-3f\"\n      , golden \"functional-data-4\" \"functional-data-4\"\n      , golden \"functional-data-5\" \"functional-data-5\"\n      , golden \"pattern-getters\" \"pattern-getters\"\n      , golden \"pattern-setters\" \"pattern-setters\"\n      , golden \"holes-func\" \"holes-func\"\n      , golden \"holes-record\" \"holes-record\"\n      , golden \"holes-simple\" \"holes-simple\"\n      , golden \"type-annotations-1\" \"type-annotations-1\"\n      , golden \"native-morloc-1\" \"native-morloc-1\"\n      , golden \"native-morloc-2\" \"native-morloc-2\"\n      , golden \"native-morloc-3\" \"native-morloc-3\"\n      , golden \"native-morloc-4\" \"native-morloc-4\"\n      , golden \"native-morloc-5\" \"native-morloc-5\"\n      , golden \"native-morloc-6\" \"native-morloc-6\"\n      , golden \"native-morloc-7\" \"native-morloc-7\"\n      , golden \"native-morloc-8\" \"native-morloc-8\"\n      , golden \"native-morloc-9\" \"native-morloc-9\"\n      , golden \"nexus-let-pure\" \"nexus-let-pure\"\n      , golden \"nexus-let-lambda\" \"nexus-let-lambda\"\n      , golden \"demo-trimming\" \"demo-trimming\"\n      , golden \"formatting\" \"formatting\"\n      , golden \"record-docstrings\" \"record-docstrings\"\n      , golden \"command-groups\" \"command-groups\"\n      , golden \"typeclasses-1\" \"typeclasses-1\"\n      , golden \"typeclasses-2\" \"typeclasses-2\"\n      , golden \"typeclasses-3\" \"typeclasses-3\"\n      , golden \"typeclasses-4\" \"typeclasses-4\"\n      , golden \"typeclasses-5\" \"typeclasses-5\"\n      , golden \"typeclasses-6\" \"typeclasses-6\"\n      , golden \"typeclasses-7\" \"typeclasses-7\"\n      , golden \"typeclasses-8\" \"typeclasses-8\"\n      , golden \"typeclasses-9\" \"typeclasses-9\"\n      , golden \"typeclass-stress\" \"typeclass-stress\"\n      , golden \"alias-dedup-1\" \"alias-dedup-1\"\n      , golden \"alias-no-cross-instance\" \"alias-no-cross-instance\"\n      , golden \"alias-concrete-bugs\" \"alias-concrete-bugs\"\n      , golden \"alias-constructor-equiv\" \"alias-constructor-equiv\"\n      , golden \"alias-array-monoid\" \"alias-array-monoid\"\n      , golden \"poly-list-1\" \"poly-list-1\"\n      , golden \"higher-kinded-types\" \"higher-kinded-types\"\n      , golden \"string-encoding\" \"string-encoding\"\n      , golden \"string-encoding-utf8\" \"string-encoding-utf8\"\n      , golden \"string-json-parsing\" \"string-json-parsing\"\n      , golden \"string-multiline\" \"string-multiline\"\n      , golden \"string-interpolation\" \"string-interpolation\"\n      , golden \"string-escape\" \"string-escape\"\n      , golden \"string-pretty\" \"string-pretty\"\n      , golden \"unicode-source\" \"unicode-source\"\n      , golden \"unicode-source-cpp\" \"unicode-source-cpp\"\n      , golden \"unicode-interpolation\" \"unicode-interpolation\"\n      , golden \"unicode-interop\" \"unicode-interop\"\n      , golden \"unicode-edge-cases\" \"unicode-edge-cases\"\n      , golden \"file-input-py\" \"file-input-py\"\n      , golden \"file-input-c\" \"file-input-c\"\n      , golden \"file-input-r\" \"file-input-r\"\n      , golden \"packer-definitions-1\" \"packer-definitions-1\"\n      , golden \"packer-definitions-2\" \"packer-definitions-2\"\n      , golden \"packer-definitions-3\" \"packer-definitions-3\"\n      , golden \"packer-definitions-4\" \"packer-definitions-4\"\n      , golden \"packer-definitions-5\" \"packer-definitions-5\"\n      , golden \"import-1\" \"import-1\"\n      , golden \"import-2\" \"import-2\"\n      , -- tests the bug solved involving the lambdaScope function in\n        -- Generate.hs:reserialize. See that commit message.\n        golden \"edge-cases-1\" \"edge-cases-1\"\n      , golden \"edge-cases-2\" \"edge-cases-2\"\n      , golden \"type-synthesis-1\" \"type-synthesis-1\"\n      , golden \"type-synthesis-2\" \"type-synthesis-2\"\n      , golden \"argument-form-1-c\" \"argument-form-1-c\"\n      , golden \"argument-form-1-py\" \"argument-form-1-py\"\n      , golden \"argument-form-1-r\" \"argument-form-1-r\"\n      , golden \"argument-form-2-c\" \"argument-form-2-c\"\n      , golden \"argument-form-2-py\" \"argument-form-2-py\"\n      , golden \"argument-form-2-r\" \"argument-form-2-r\"\n      , -- see github issue #7\n        golden \"argument-form-3-c\" \"argument-form-3-c\"\n      , golden \"argument-form-3-py\" \"argument-form-3-py\"\n      , golden \"argument-form-3-r\" \"argument-form-3-r\"\n      , golden \"composition\" \"composition\"\n      , golden \"generic-hofs-1\" \"generic-hofs-1\"\n      , golden \"generic-hofs-2\" \"generic-hofs-2\"\n      , golden \"eta-reduction-1\" \"eta-reduction-1\"\n      , golden \"eta-reduction-2\" \"eta-reduction-2\"\n      , golden \"eta-reduction-3\" \"eta-reduction-3\"\n      , golden \"eta-reduction-4\" \"eta-reduction-4\"\n      , golden \"eta-reduction-5\" \"eta-reduction-5\"\n      , golden \"eta-reduction-6\" \"eta-reduction-6\"\n      , golden \"eta-reduction-7\" \"eta-reduction-7\"\n      , golden \"eta-reduction-8-py\" \"eta-reduction-8-py\"\n      , golden \"eta-reduction-8-cpp\" \"eta-reduction-8-cpp\"\n      , golden \"path-shadowing-c\" \"path-shadowing-c\"\n      , golden \"path-shadowing-py\" \"path-shadowing-py\"\n      , golden \"path-shadowing-r\" \"path-shadowing-r\"\n      , golden \"local-import-root-py\" \"local-import-root-py\"\n      , golden \"local-import-cousin-py\" \"local-import-cousin-py\"\n      , golden \"local-import-nested-py\" \"local-import-nested-py\"\n      , golden \"argument-form-4-c\" \"argument-form-4-c\"\n      , golden \"argument-form-4-py\" \"argument-form-4-py\"\n      , golden \"argument-form-4-r\" \"argument-form-4-r\"\n      , golden \"argument-form-5-c\" \"argument-form-5-c\"\n      , golden \"argument-form-5-py\" \"argument-form-5-py\"\n      , golden \"argument-form-5-r\" \"argument-form-5-r\"\n      , golden \"argument-form-6-c\" \"argument-form-6-c\"\n      , golden \"argument-form-6-py\" \"argument-form-6-py\"\n      , golden \"argument-form-6-r\" \"argument-form-6-r\"\n      , golden \"argument-form-7-c\" \"argument-form-7-c\"\n      , golden \"argument-form-7-py\" \"argument-form-7-py\"\n      , golden \"argument-form-7-r\" \"argument-form-7-r\"\n      , golden \"argument-form-8-c\" \"argument-form-8-c\"\n      , golden \"argument-form-8-py\" \"argument-form-8-py\"\n      , golden \"argument-form-8-r\" \"argument-form-8-r\"\n      , golden \"interop-1-py\" \"interop-1-py\"\n      , golden \"interop-1-r\" \"interop-1-r\"\n      , golden \"interop-2\" \"interop-2\"\n      , -- 3a\n        golden \"interop-3a-cp\" \"interop-3a-cp\"\n      , golden \"interop-3a-pr\" \"interop-3a-pr\"\n      , golden \"interop-3a-rc\" \"interop-3a-rc\"\n      , golden \"interop-3a-pp\" \"interop-3a-pp\"\n      , -- 3b\n        golden \"interop-3b-cp\" \"interop-3b-cp\"\n      , golden \"interop-3b-pr\" \"interop-3b-pr\"\n      , golden \"interop-3b-rc\" \"interop-3b-rc\"\n      , golden \"interop-3b-pp\" \"interop-3b-pp\"\n      , -- 3c\n        golden \"interop-3c-cp\" \"interop-3c-cp\"\n      , golden \"interop-3c-pr\" \"interop-3c-pr\"\n      , golden \"interop-3c-rc\" \"interop-3c-rc\"\n      , golden \"interop-3c-pp\" \"interop-3c-pp\"\n      , -- 3d\n        golden \"interop-3d-cp\" \"interop-3d-cp\"\n      , golden \"interop-3d-pr\" \"interop-3d-pr\"\n      , golden \"interop-3d-rc\" \"interop-3d-rc\"\n      , golden \"interop-3d-pp\" \"interop-3d-pp\"\n      , -- 3e\n        golden \"interop-3e-cp\" \"interop-3e-cp\"\n      , golden \"interop-3e-pr\" \"interop-3e-pr\"\n      , golden \"interop-3e-rc\" \"interop-3e-rc\"\n      , golden \"interop-3e-pp\" \"interop-3e-pp\"\n      , -- 3f - test serialization type bug\n        golden \"interop-3f\" \"interop-3f\"\n      , -- other random interop tests (I should kill them)\n        golden \"interop-4\" \"interop-4\"\n      , golden \"interop-5\" \"interop-5\"\n      , golden \"interop-6\" \"interop-6\"\n      , golden \"interop-7\" \"interop-7\"\n      , golden \"interop-8-r-to-c\" \"interop-8-r-to-c\"\n      , golden \"interop-8-r-to-py\" \"interop-8-r-to-py\"\n      , golden \"interop-8-py-to-r\" \"interop-8-py-to-r\"\n      , golden \"interop-9\" \"interop-9\"\n      , golden \"interop-10\" \"interop-10\"\n      , golden \"interop-11\" \"interop-11\"\n      , golden \"manifold-form-0\" \"manifold-form-0\"\n      , golden \"manifold-form-0x\" \"manifold-form-0x\"\n      , golden \"manifold-form-1\" \"manifold-form-1\"\n      , golden \"manifold-form-2\" \"manifold-form-2\"\n      , golden \"manifold-form-2x\" \"manifold-form-2x\"\n      , golden \"manifold-form-3\" \"manifold-form-3\"\n      , golden \"manifold-form-3x\" \"manifold-form-3x\"\n      , golden \"manifold-form-4_c\" \"manifold-form-4_c\"\n      , golden \"manifold-form-4_py\" \"manifold-form-4_py\"\n      , golden \"manifold-form-4_r\" \"manifold-form-4_r\"\n      , golden \"manifold-form-5_c\" \"manifold-form-5_c\"\n      , golden \"manifold-form-5_py\" \"manifold-form-5_py\"\n      , golden \"manifold-form-5_r\" \"manifold-form-5_r\"\n      , golden \"manifold-form-6_c\" \"manifold-form-6_c\"\n      , golden \"manifold-form-6_py\" \"manifold-form-6_py\"\n      , golden \"manifold-form-6_r\" \"manifold-form-6_r\"\n      , -- see github issue #9\n        golden \"manifold-form-7_c\" \"manifold-form-7_c\"\n      , golden \"manifold-form-7_py\" \"manifold-form-7_py\"\n      , golden \"manifold-form-7_r\" \"manifold-form-7_r\"\n      , -- test records\n        golden \"records-primitive\" \"records-primitive\"\n      , golden \"records-complex-1\" \"records-complex-1\"\n      , golden \"records-complex-2\" \"records-complex-2\"\n      , golden \"records-nested\" \"records-nested\"\n      , golden \"records-alias\" \"records-alias\"\n      , golden \"selection-1\" \"selection-1\"\n      , golden \"selection-2\" \"selection-2\"\n      , golden \"selection-3\" \"selection-3\"\n      , golden \"selection-4\" \"selection-4\"\n      , -- import two instances in one languages for a function\n        -- this is also a test of a function that is defind in a local file\n        -- -- With the new stricter implementation, these tests no longer pass\n        -- -- They can be reinstated when the morloc compiler learns to\n        -- -- distinguish the functions reasonably\n        -- , golden \"multiple-instances-1-c\" \"multiple-instances-1-c\"\n        -- , golden \"multiple-instances-1-py\" \"multiple-instances-1-py\"\n        -- , golden \"multiple-instances-1-r\" \"multiple-instances-1-r\"\n        -- multiple sources and a declaration\n        golden \"multiple-instances-2-c\" \"multiple-instances-2-c\"\n      , golden \"multiple-instances-2-py\" \"multiple-instances-2-py\"\n      , golden \"multiple-instances-2-r\" \"multiple-instances-2-r\"\n      , golden \"multi-lang-mempty-py\" \"multi-lang-mempty-py\"\n      , golden \"bare-selector-args\" \"bare-selector-args\"\n      , golden \"bare-selector-chain\" \"bare-selector-chain\"\n      , -- tests of module forms\n        -- where *-sid\n        --   s - number of sourced instances\n        --   i - number of imported instances\n        --   d - number of declared instances\n        golden \"module-form-00n\" \"module-form-00n\"\n      , golden \"module-form-011\" \"module-form-011\"\n      , golden \"module-form-01n\" \"module-form-01n\"\n      , golden \"module-form-0n0\" \"module-form-0n0\"\n      , golden \"module-form-0n1\" \"module-form-0n1\"\n      , golden \"module-form-101\" \"module-form-101\"\n      , golden \"module-form-10n\" \"module-form-10n\"\n      , golden \"module-form-110\" \"module-form-110\"\n      , golden \"module-form-111\" \"module-form-111\"\n      , golden \"module-form-1n0\" \"module-form-1n0\"\n      , golden \"module-form-n00\" \"module-form-n00\"\n      , golden \"module-form-n01\" \"module-form-n01\"\n      , golden \"module-form-n10\" \"module-form-n10\"\n      , -- tests of serialization\n        -- , golden \"c  S\" \"serial-form-1-c\"\n        -- , golden \"py S\" \"serial-form-1-py\"\n        -- , golden \"r  S\" \"serial-form-1-r\"\n        golden \"C serial-form-2-c\" \"serial-form-2-c\"\n      , golden \"C serial-form-2-py\" \"serial-form-2-py\"\n      , golden \"C serial-form-2-r\" \"serial-form-2-r\"\n      , -- , golden \"c  R\" \"serial-form-3-c\"\n        -- , golden \"py R\" \"serial-form-3-py\"\n        -- , golden \"r  R\" \"serial-form-3-r\"\n        -- outer simple type\n        golden \"S(S) serial-form-4-c\" \"serial-form-4-c\"\n      , golden \"S(S) serial-form-4-py\" \"serial-form-4-py\"\n      , golden \"S(S) serial-form-4-r\" \"serial-form-4-r\"\n      , golden \"S(C) serial-form-5-c\" \"serial-form-5-c\"\n      , golden \"S(C) serial-form-5-py\" \"serial-form-5-py\"\n      , golden \"S(C) serial-form-5-r\" \"serial-form-5-r\"\n      , golden \"S(R) serial-form-6-c\" \"serial-form-6-c\"\n      , golden \"S(R) serial-form-6-py\" \"serial-form-6-py\"\n      , golden \"S(R) serial-form-6-r\" \"serial-form-6-r\"\n      , -- outer constructed type\n        golden \"C(S) serial-form-7-c\" \"serial-form-7-c\"\n      , golden \"C(S) serial-form-7-py\" \"serial-form-7-py\"\n      , golden \"C(S) serial-form-7-r\" \"serial-form-7-r\"\n      , golden \"C(C) serial-form-8-c\" \"serial-form-8-c\"\n      , -- , golden \"C(C) serial-form-8-py\" \"serial-form-8-py\"\n        golden \"C(C) serial-form-8-r\" \"serial-form-8-r\"\n      , golden \"C(R) serial-form-9-c\" \"serial-form-9-c\"\n      , golden \"C(R) serial-form-9-py\" \"serial-form-9-py\"\n      , golden \"C(R) serial-form-9-r\" \"serial-form-9-r\"\n      , -- outer record type\n        golden \"R(S) serial-form-10-c\" \"serial-form-10-c\"\n      , golden \"R(S) serial-form-10-py\" \"serial-form-10-py\"\n      , golden \"R(S) serial-form-10-r\" \"serial-form-10-r\"\n      , golden \"R(C) serial-form-11-c\" \"serial-form-11-c\"\n      , golden \"R(C) serial-form-11-py\" \"serial-form-11-py\"\n      , golden \"R(C) serial-form-11-r\" \"serial-form-11-r\"\n      , golden \"R(R) serial-form-12-c\" \"serial-form-12-c\"\n      , golden \"R(R) serial-form-12-py\" \"serial-form-12-py\"\n      , golden \"R(R) serial-form-12-r\" \"serial-form-12-r\"\n      , -- table handling\n        golden \"table-1-c\" \"table-1-c\"\n      , golden \"table-1-py\" \"table-1-py\"\n      , golden \"table-1-r\" \"table-1-r\"\n      , golden \"table-2-c\" \"table-2-c\"\n      , golden \"table-2-py\" \"table-2-py\"\n      , golden \"table-2-r\" \"table-2-r\"\n      , -- object handling\n        golden \"object-1-c\" \"object-1-c\"\n      , golden \"object-1-py\" \"object-1-py\"\n      , golden \"object-1-r\" \"object-1-r\"\n      , -- scoping\n        golden \"scoping-1\" \"scoping-1\"\n      , golden \"scoping-2\" \"scoping-2\"\n      , golden \"scoping-3\" \"scoping-3\"\n      , golden \"scoping-4\" \"scoping-4\"\n      , golden \"scoping-5\" \"scoping-5\"\n      , golden \"scoping-6\" \"scoping-6\"\n      , golden \"scoping-7\" \"scoping-7\"\n      , golden \"scoping-8\" \"scoping-8\"\n      , golden \"scoping-9\" \"scoping-9\"\n      , golden \"scoping-10\" \"scoping-10\"\n      , golden \"scoping-11\" \"scoping-11\"\n      , golden \"scoping-12\" \"scoping-12\"\n      , golden \"scoping-13\" \"scoping-13\"\n      , -- type alias transitive resolution\n        golden \"type-alias-transitive\" \"type-alias-transitive\"\n      , -- type identities\n        golden \"type-identities-c\" \"type-identities-c\"\n      , -- testing packet transmission\n        golden \"packets-large (wait ~10s)\" \"packets-large\"\n      , golden \"packets-interop (wait ~10s)\" \"packets-interop\"\n      , -- many tests of higher-order functions\n        golden \"hofs-1\" \"hofs-1\"\n      , -- test errors\n        golden \"errors (wait ~10s)\" \"errors\"\n      , golden \"feature-integration-1\" \"feature-integration-1\"\n      , golden \"let-expressions\" \"let-expressions\"\n      , golden \"guards-py\" \"guards-py\"\n      , golden \"guards-cpp\" \"guards-cpp\"\n      , golden \"guards-r\" \"guards-r\"\n      , golden \"guards-let-py\" \"guards-let-py\"\n      , golden \"guards-let-cpp\" \"guards-let-cpp\"\n      , golden \"guards-let-r\" \"guards-let-r\"\n      , golden \"guards-inline-cpp\" \"guards-inline-cpp\"\n      , golden \"recursion-direct-py\" \"recursion-direct-py\"\n      , golden \"recursion-direct-cpp\" \"recursion-direct-cpp\"\n      , golden \"recursion-direct-r\" \"recursion-direct-r\"\n      , golden \"recursion-mutual-py\" \"recursion-mutual-py\"\n      , golden \"recursion-mutual-cpp\" \"recursion-mutual-cpp\"\n      , golden \"recursion-mutual-r\" \"recursion-mutual-r\"\n      , golden \"recursion-cross-py-cpp\" \"recursion-cross-py-cpp\"\n      , golden \"recursion-cross-r-cpp\" \"recursion-cross-r-cpp\"\n      , golden \"recursion-helper-py\" \"recursion-helper-py\"\n      , golden \"recursion-helper-cpp\" \"recursion-helper-cpp\"\n      , golden \"recursion-thunk-py\" \"recursion-thunk-py\"\n      , golden \"recursion-thunk-helper-cpp\" \"recursion-thunk-helper-cpp\"\n      , -- optional type tests\n        golden \"optional-py\" \"optional-py\"\n      , golden \"optional-cpp\" \"optional-cpp\"\n      , golden \"optional-r\" \"optional-r\"\n      , golden \"optional-json\" \"optional-json\"\n      , golden \"optional-interop-cp\" \"optional-interop-cp\"\n      , golden \"optional-interop-pr\" \"optional-interop-pr\"\n      , golden \"optional-interop-rc\" \"optional-interop-rc\"\n      , golden \"optional-records-py\" \"optional-records-py\"\n      , golden \"optional-records-cpp\" \"optional-records-cpp\"\n      , golden \"optional-records-r\" \"optional-records-r\"\n      , -- optional coercion tests (a -> ?a)\n        golden \"optional-coerce-py\" \"optional-coerce-py\"\n      , golden \"optional-coerce-cpp\" \"optional-coerce-cpp\"\n      , golden \"optional-coerce-interop\" \"optional-coerce-interop\"\n      , -- effect coercion tests (a -> <IO> a)\n        golden \"effect-coerce-py\" \"effect-coerce-py\"\n      , golden \"effect-coerce-cpp\" \"effect-coerce-cpp\"\n      , -- multi-label and subtyping effect tests\n        golden \"effect-multi-label-py\" \"effect-multi-label-py\"\n      , golden \"effect-subtype-py\" \"effect-subtype-py\"\n      , golden \"effect-error-cpp\" \"effect-error-cpp\"\n      , golden \"effect-accumulate-py\" \"effect-accumulate-py\"\n      , -- inline force operator (!) tests\n        golden \"force-inline-basic\" \"force-inline-basic\"\n      , -- intrinsic tests\n        golden \"intrinsic-agnostic\" \"intrinsic-agnostic\"\n      , golden \"intrinsic-hash\" \"intrinsic-hash\"\n      , golden \"intrinsic-constants\" \"intrinsic-constants\"\n      , golden \"intrinsic-show-read\" \"intrinsic-show-read\"\n      , golden \"intrinsic-show-read-nexus\" \"intrinsic-show-read-nexus\"\n      , golden \"intrinsic-show-ho-r\" \"intrinsic-show-ho-r\"\n      , -- parser stress test: precedence, parentheses, negatives, numeric literals, getters\n        golden \"parser-stress\" \"parser-stress\"\n      , -- stdout flush test: verify Python pool stdout is flushed before shutdown\n        golden \"stdout-flush-py\" \"stdout-flush-py\"\n      , -- namespace import tests\n        golden \"namespace-basic\" \"namespace-basic\"\n      , golden \"namespace-selective\" \"namespace-selective\"\n      , golden \"namespace-separate-impls\" \"namespace-separate-impls\"\n      , golden \"namespace-disambiguation\" \"namespace-disambiguation\"\n      , golden \"namespace-ns-composition\" \"namespace-ns-composition\"\n      , golden \"namespace-ns-hof\" \"namespace-ns-hof\"\n      , golden \"namespace-ns-let\" \"namespace-ns-let\"\n      , golden \"namespace-ns-shadow\" \"namespace-ns-shadow\"\n      , golden \"namespace-ns-multi\" \"namespace-ns-multi\"\n      , golden \"namespace-ns-same-func-name\" \"namespace-ns-same-func-name\"\n      , golden \"namespace-ns-unqualified\" \"namespace-ns-unqualified\"\n      , golden \"namespace-ns-exported\" \"namespace-ns-exported\"\n      , golden \"namespace-ns-reexport\" \"namespace-ns-reexport\"\n      , golden \"namespace-ns-nested-getter\" \"namespace-ns-nested-getter\"\n      , golden \"namespace-ns-guard\" \"namespace-ns-guard\"\n      , golden \"namespace-ns-double-import\" \"namespace-ns-double-import\"\n      , -- %inline pragma tests\n        golden \"inline-op-py\" \"inline-op-py\"\n      , golden \"inline-func-py\" \"inline-func-py\"\n      , golden \"inline-typeclass-py\" \"inline-typeclass-py\"\n      , golden \"inline-ho-py\" \"inline-ho-py\"\n      , golden \"inline-op-ho-py\" \"inline-op-ho-py\"\n      , golden \"inline-deep-py\" \"inline-deep-py\"\n      , golden \"inline-mixed-py\" \"inline-mixed-py\"\n      , golden \"inline-block-py\" \"inline-block-py\"\n      , golden \"inline-old-style-py\" \"inline-old-style-py\"\n      , golden \"inline-cross-lang\" \"inline-cross-lang\"\n      , -- bare operators in old-style source declarations\n        golden \"source-old-op-py\" \"source-old-op-py\"\n      , -- eval mode restriction tests\n        golden \"eval-restrict-source\" \"eval-restrict-source\"\n      , -- memory alignment tests (document misalignment bugs in voidstar format)\n        golden \"memory-optional-double-cpp\" \"memory-optional-double-cpp\"\n      , golden \"memory-optional-double-py\" \"memory-optional-double-py\"\n      , golden \"memory-record-pack-cpp\" \"memory-record-pack-cpp\"\n      , golden \"memory-record-pack-py\" \"memory-record-pack-py\"\n      , golden \"memory-interop-misalign-cp\" \"memory-interop-misalign-cp\"\n      , golden \"memory-nested-misalign-cpp\" \"memory-nested-misalign-cpp\"\n      , golden \"memory-nested-misalign-py\" \"memory-nested-misalign-py\"\n      , golden \"memory-split-block-cpp\" \"memory-split-block-cpp\"\n      , -- arrow immutable table tests (large table passed by reference into map)\n        golden \"arrow-immutable-pr\" \"arrow-immutable-pr\"\n      , golden \"arrow-immutable-rp\" \"arrow-immutable-rp\"\n      , golden \"arrow-immutable-cp\" \"arrow-immutable-cp\"\n      , golden \"arrow-immutable-pc\" \"arrow-immutable-pc\"\n      , -- dense tensor tests\n        golden \"tensor-comprehensive-cpp\" \"tensor-comprehensive-cpp\"\n      , golden \"tensor-comprehensive-cross\" \"tensor-comprehensive-cross\"\n      , golden \"tensor-dimensions\" \"tensor-dimensions\"\n      , -- nat-parameterized type tests\n        golden \"nat-typecheck\" \"nat-typecheck\"\n      ]\n"
  },
  {
    "path": "test-suite/PropertyTests.hs",
    "content": "{- |\nModule      : PropertyTests\nDescription : QuickCheck property tests for internal utility functions\n-}\nmodule PropertyTests\n  ( propertyTests\n  ) where\n\nimport Morloc.Namespace.Prim\n\nimport qualified Data.Set as Set\nimport Test.Tasty\nimport Test.Tasty.QuickCheck as TQC\n\npropertyTests :: TestTree\npropertyTests =\n  testGroup\n    \"internal list function properties\"\n    [ TQC.testProperty \"unique makes unique lists\" prop_unique_unique\n    , TQC.testProperty \"unique preserves original order\" prop_unique_preserves_order\n    , TQC.testProperty \"duplicates makes unique lists\" prop_duplicates_unique\n    , TQC.testProperty \"duplicates preserves original order\" prop_duplicates_preserves_order\n    ]\n\n-- for the uniq family of functions (unique, duplicates, isSorted), I will test\n-- on the numbers 1 to 5. If the desired property holds over this set, they\n-- will hold over any ordered set.\none2five :: [Int] -> [Int]\none2five = map (\\x -> mod (abs x) 5)\n\nprop_unique_unique :: [Int] -> Bool\nprop_unique_unique [] = True\nprop_unique_unique xs =\n  let xs' = one2five xs\n   in length (unique xs') == Set.size (Set.fromList xs')\n\n-- This test asserts that the first element in the original and unique list is\n-- the same. This guarantee alone does not entirely guantee that the original\n-- order is preserved, but it is close.\nprop_unique_preserves_order :: [Int] -> Bool\nprop_unique_preserves_order xs = headMay xs == headMay (unique xs)\n\n-- Each element in the duplicates return list is unique\nprop_duplicates_unique :: [Int] -> Bool\nprop_duplicates_unique [] = True\nprop_duplicates_unique xs =\n  let xs' = duplicates (one2five xs)\n   in length xs' == Set.size (Set.fromList xs')\n\nprop_duplicates_preserves_order :: [Int] -> Bool\nprop_duplicates_preserves_order xs = f Set.empty xs (duplicates xs)\n  where\n    f _ _ [] = True\n    f _ [] _ = False\n    f skipped (y : rs) (y' : rs')\n      -- if the original and duplicated elements match:\n      | y == y' =\n          -- if the current element was previously skipped\n          if Set.member y' skipped\n            -- then the duplicates function failed to respect the initial order\n            then False\n            -- else continue checking on the next elements\n            else f skipped rs rs'\n      -- otherwise store record the skipped value and continue\n      | otherwise = f (Set.insert y skipped) rs (y' : rs')\n"
  },
  {
    "path": "test-suite/UnitTypeTests.hs",
    "content": "{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE QuasiQuotes #-}\n{-# LANGUAGE TemplateHaskell #-}\n{-# LANGUAGE ViewPatterns #-}\n\n{- |\nModule      : UnitTypeTests\nDescription : Unit tests for type operations, subtyping, typechecking, and codegen\n-}\nmodule UnitTypeTests\n  ( subtypeTests\n  , substituteTVarTests\n  , unitTypeTests\n  , unitValuecheckTests\n  , typeOrderTests\n  , typeAliasTests\n  , packerTests\n  , whereTests\n  , orderInvarianceTests\n  , whitespaceTests\n  , infixOperatorTests\n  , complexityRegressionTests\n  , effectSubtypeTests\n  , effectSynthesisTests\n  , effectErrorTests\n  , namespaceErrorTests\n  , typeclassTests\n  , natErrorTests\n  , natArithTests\n  , natLabelTests\n  , natKindPromotionTests\n  , letBindingTests\n  , aliasConstructorTests\n  ) where\n\nimport Morloc (typecheck, typecheckFrontend)\nimport Morloc.Frontend.Namespace\nimport Morloc.Frontend.Typecheck (evaluateAnnoSTypes)\nimport qualified Morloc.Monad as MM\nimport qualified Morloc.Typecheck.Internal as MTI\nimport qualified Morloc.Typecheck.NatSolver as NS\nimport qualified System.Directory as SD\nimport Text.RawString.QQ\n\nimport qualified Data.IntMap.Strict as IntMap\nimport qualified Data.Map as Map\nimport qualified Data.Set as Set\nimport qualified Data.Text as MT\nimport Test.Tasty (TestTree, localOption, mkTimeout, testGroup)\nimport Test.Tasty.HUnit\n\n-- get the toplevel general type of a typechecked expression\ngtypeof :: AnnoS (Indexed TypeU) f c -> TypeU\ngtypeof (AnnoS (Idx _ t) _ _) = t\n\nrunFront :: MT.Text -> IO (Either MorlocError [AnnoS (Indexed TypeU) Many Int])\nrunFront code = do\n  config <- emptyConfig\n  ((x, _), _) <-\n    MM.runMorlocMonad\n      Nothing\n      0\n      config\n      defaultValue\n      (typecheckFrontend Nothing (Code code) >>= mapM evaluateAnnoSTypes)\n  return x\n\n-- | Like runFront but without type alias evaluation, so nat dimensions are preserved.\nrunFrontRaw :: MT.Text -> IO (Either MorlocError [AnnoS (Indexed TypeU) Many Int])\nrunFrontRaw code = do\n  config <- emptyConfig\n  ((x, _), _) <-\n    MM.runMorlocMonad\n      Nothing\n      0\n      config\n      defaultValue\n      (typecheckFrontend Nothing (Code code))\n  return x\n\nrunMiddle ::\n  MT.Text ->\n  IO\n    ( Either\n        MorlocError\n        ( [AnnoS (Indexed Type) One ()]\n        , [AnnoS (Indexed Type) One (Indexed Lang)]\n        )\n    )\nrunMiddle code = do\n  config <- emptyConfig\n  ((x, _), _) <- MM.runMorlocMonad Nothing 0 config defaultValue (typecheck Nothing (Code code))\n  return x\n\nemptyConfig :: IO Config\nemptyConfig = do\n  home <- SD.getHomeDirectory\n  return $\n    Config\n      { configHome = home <> \"/.local/share/morloc\"\n      , configLibrary = home <> \"/.local/share/src/morloc\"\n      , configPlane = \"default\"\n      , configPlaneCore = \"morloclib\"\n      , configTmpDir = home <> \"/.morloc/tmp\"\n      , configBuildConfig = home <> \"/.morloc/.build-config.yaml\"\n      , configLangOverrides = mempty\n      , configRegistry = Nothing\n      }\n\nassertGeneralType :: String -> MT.Text -> TypeU -> TestTree\nassertGeneralType msg code t = testCase msg $ do\n  result <- runFront code\n  case result of\n    (Right [x]) -> assertEqual \"\" (closeExistentials . MTI.cleanTypeName $ t) (closeExistentials . MTI.cleanTypeName . renameExistentials . gtypeof $ x)\n    (Right _) -> error \"Expected exactly one export from main for assertGeneralType\"\n    (Left e) ->\n      error $\n        \"The following error was raised: \" <> show e <> \"\\nin:\\n\" <> show code\n\nrenameExistentials :: TypeU -> TypeU\nrenameExistentials = snd . f (0 :: Int, Map.empty)\n  where\n    f s (VarU v) = (s, VarU v)\n    f (i, m) (ExistU v (ps, pc) (rs, rc)) =\n      case Map.lookup v m of\n        (Just v') -> ((i, m), ExistU v' (ps, pc) (rs, rc))\n        Nothing ->\n          let v' = TV (\"e\" <> MT.pack (show i))\n              i' = i + 1\n              m' = Map.insert v v' m\n              (s', ps') = statefulMap f (i', m') ps\n              (s'', vs') = statefulMap f s' (map snd rs)\n           in (s'', ExistU v' (ps', pc) (zip (map fst rs) vs', rc))\n    f s (ForallU v t) =\n      let (s', t') = f s t\n       in (s', ForallU v t')\n    f s t@(NatVarU _) = (s, t)\n    f s (FunU ts t) =\n      let (s', ts') = statefulMap f s ts\n          (s'', t') = f s' t\n       in (s'', FunU ts' t')\n    f s (AppU t ts) =\n      let (s', t') = f s t\n          (s'', ts') = statefulMap f s' ts\n       in (s'', AppU t' ts')\n    f s (NamU o n vs rs) =\n      let (s', ts') = statefulMap f s (map snd rs)\n       in (s', NamU o n vs (zip (map fst rs) ts'))\n    f s (EffectU effs t) =\n      let (s', t') = f s t\n       in (s', EffectU effs t')\n    f s (OptionalU t) =\n      let (s', t') = f s t\n       in (s', OptionalU t')\n    f s t@(NatLitU _) = (s, t)\n    f s (NatAddU a b) = let (s', a') = f s a; (s'', b') = f s' b in (s'', NatAddU a' b')\n    f s (NatMulU a b) = let (s', a') = f s a; (s'', b') = f s' b in (s'', NatMulU a' b')\n    f s (NatSubU a b) = let (s', a') = f s a; (s'', b') = f s' b in (s'', NatSubU a' b')\n    f s (NatDivU a b) = let (s', a') = f s a; (s'', b') = f s' b in (s'', NatDivU a' b')\n    f s (LabeledU n t) = let (s', t') = f s t in (s', LabeledU n t')\n\ncloseExistentials :: TypeU -> TypeU\ncloseExistentials = f\n  where\n    f (ExistU v (ts, _) (rs, _)) = ExistU v (map f ts, Closed) (map (second f) rs, Closed)\n    f t@(VarU _) = t\n    f t@(NatVarU _) = t\n    f (ForallU v t) = ForallU v (f t)\n    f (FunU ts t) = FunU (map f ts) (f t)\n    f (AppU t ts) = AppU (f t) (map f ts)\n    f (NamU o v ts rs) = NamU o v (map f ts) (map (second f) rs)\n    f (EffectU effs t) = EffectU effs (f t)\n    f (OptionalU t) = OptionalU (f t)\n    f t@(NatLitU _) = t\n    f (NatAddU a b) = NatAddU (f a) (f b)\n    f (NatMulU a b) = NatMulU (f a) (f b)\n    f (NatSubU a b) = NatSubU (f a) (f b)\n    f (NatDivU a b) = NatDivU (f a) (f b)\n    f (LabeledU n t) = LabeledU n (f t)\n\n-- | Assert the general type before alias evaluation (preserves nat dimensions).\nassertRawType :: String -> MT.Text -> TypeU -> TestTree\nassertRawType msg code t = testCase msg $ do\n  result <- runFrontRaw code\n  case result of\n    (Right [x]) -> assertEqual \"\" (closeExistentials . MTI.cleanTypeName $ t) (closeExistentials . MTI.cleanTypeName . renameExistentials . gtypeof $ x)\n    (Right _) -> error \"Expected exactly one export from main for assertRawType\"\n    (Left e) ->\n      error $\n        \"The following error was raised: \" <> show e <> \"\\nin:\\n\" <> show code\n\nassertSubtypeGamma :: String -> [GammaIndex] -> TypeU -> TypeU -> [GammaIndex] -> TestTree\nassertSubtypeGamma msg gs1 a b gs2 = testCase msg $ do\n  let g0 = listToGamma gs1\n  case MTI.subtype Map.empty a b g0 of\n    Left e -> error $ show e\n    Right g -> assertEqual \"\" gs2 (MTI.gammaContextList g)\n\n-- | Convert a list of GammaIndex (newest first) to a Gamma with IntMap.\n-- Uses slot spacing of 256 to match production code.\nlistToGamma :: [GammaIndex] -> Gamma\nlistToGamma gs =\n  let spacing = 256\n      n = length gs\n      -- Newest entry gets highest slot\n      indexed = zip [spacing * (n - 1), spacing * (n - 2) .. 0] gs\n      ctx = IntMap.fromList indexed\n      existMap = Map.fromList [(v, s) | (s, ExistG v _ _) <- indexed]\n  in Gamma\n    { gammaCounter = 0\n    , gammaSlot = spacing * n\n    , gammaContext = ctx\n    , gammaExist = existMap\n    , gammaSolved = Map.empty\n    , gammaDeferred = []\n    , gammaNatSubs = Map.empty\n    , gammaIntVals = Map.empty\n    }\n\nexprTestBad :: String -> MT.Text -> TestTree\nexprTestBad msg code =\n  testCase msg $ do\n    result <- runFront code\n    case result of\n      (Right _) -> assertFailure . MT.unpack $ \"Expected '\" <> code <> \"' to fail\"\n      (Left _) -> return ()\n\nvaluecheckFail :: String -> MT.Text -> TestTree\nvaluecheckFail msg code =\n  testCase msg $ do\n    result <- runMiddle code\n    case result of\n      (Right _) -> assertFailure . MT.unpack $ \"Expected '\" <> code <> \"' to fail\"\n      (Left _) -> return ()\n\nvaluecheckPass :: String -> MT.Text -> TestTree\nvaluecheckPass msg code =\n  testCase msg $ do\n    result <- runMiddle code\n    case result of\n      (Right _) -> return ()\n      (Left _) -> assertFailure . MT.unpack $ \"Expected '\" <> code <> \"' to pass\"\n\n-- Don't test the type of error message, that would incur too much fiddly\n-- overhead as the messages and such are tweaked.\nexpectError :: String -> MT.Text -> TestTree\nexpectError msg code =\n  testCase msg $ do\n    result <- runFront code\n    case result of\n      (Right _) -> assertFailure . MT.unpack $ \"Expected failure\"\n      (Left _) -> return ()\n\ntestEqual :: (Eq a, Show a) => String -> a -> a -> TestTree\ntestEqual msg x y =\n  testCase msg $ assertEqual \"\" x y\n\ntestTrue :: String -> Bool -> TestTree\ntestTrue msg x =\n  testCase msg $ assertEqual \"\" x True\n\ntestFalse :: String -> Bool -> TestTree\ntestFalse msg x =\n  testCase msg $ assertEqual \"\" x False\n\nbool :: TypeU\nbool = VarU (TV \"Bool\")\n\nreal :: TypeU\nreal = VarU (TV \"Real\")\n\nint :: TypeU\nint = VarU (TV \"Int\")\n\nstr :: TypeU\nstr = VarU (TV \"Str\")\n\nfun :: [TypeU] -> TypeU\nfun [] = error \"Cannot infer type of empty list\"\nfun [t] = FunU [] t\nfun ts = FunU (init ts) (last ts)\n\nforallu :: [MT.Text] -> TypeU -> TypeU\nforallu ss t = foldr (\\s -> ForallU (TV s)) t ss\n\nexist :: MT.Text -> TypeU\nexist v = ExistU (TV v) ([], Open) ([], Open)\n\nexistP v ts rs = ExistU (TV v) (ts, Open) (rs, Open)\n\nvar :: MT.Text -> TypeU\nvar s = VarU (TV s)\n\narr :: MT.Text -> [TypeU] -> TypeU\narr s = AppU (VarU (TV s))\n\nlst :: TypeU -> TypeU\nlst t = arr \"List\" [t]\n\ntuple :: [TypeU] -> TypeU\ntuple ts = AppU v ts\n  where\n    v = VarU . TV . MT.pack $ \"Tuple\" ++ show (length ts)\n\nrecord' :: MT.Text -> [(Key, TypeU)] -> TypeU\nrecord' n = NamU NamRecord (TV n) []\n\nsubtypeTests :: TestTree\nsubtypeTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Test subtype within context\"\n      [ -- basic general cases\n        assertSubtypeGamma \"G -| A <: A |- G\" [] a a []\n      , assertSubtypeGamma \"<a>, <b> -| <a> <: <b> |- <a>:<b>, <b>\" [eag, ebg] ea eb [solvedA eb, ebg]\n      , assertSubtypeGamma \"<a>, <b> -| <b> <: <a> |- <a>:<b>, <b>\" [eag, ebg] ea eb [solvedA eb, ebg]\n      , assertSubtypeGamma \"G -| (A -> B) <: (A -> B) |- G\" [] (fun [a, b]) (fun [a, b]) []\n      , assertSubtypeGamma \"G -| [A] <: [A] |- G\" [] (lst a) (lst a) []\n      , assertSubtypeGamma\n          \"G -| {K :: a, L :: b} <: {K :: a, L :: b}\"\n          []\n          (record' \"Foo\" [(Key \"K\", a), (Key \"L\", b)])\n          (record' \"Foo\" [(Key \"K\", a), (Key \"L\", b)])\n          []\n      , assertSubtypeGamma \"<a> -| <a> <: A |- <a>:A\" [eag] ea a [solvedA a]\n      , assertSubtypeGamma \"<a> -| A <: <a> |- <a>:A\" [eag] a ea [solvedA a]\n      , assertSubtypeGamma \"<b> -| [A] <: <b> |- <b>:[A]\" [ebg] (lst a) (eb) [solvedB (lst a)]\n      , assertSubtypeGamma \"<a> -| <a> <: [B] |- <a>:[B]\" [eag] (lst b) (ea) [solvedA (lst b)]\n      , assertSubtypeGamma\n          \"<a>, <b> -| <a> <b> <: [C] |- <a>:[C], <b>:C\"\n          [eag, ebg]\n          (existP \"x1\" [eb] [])\n          (lst c)\n          [solvedA (lst c), solvedB c]\n      , assertSubtypeGamma\n          \"<a>, <b> -|[C] <: <a> <b> |- <a>:[C], <b>:C\"\n          [eag, ebg]\n          (lst c)\n          (existP \"x1\" [eb] [])\n          [solvedA (lst c), solvedB c]\n      , assertSubtypeGamma\n          \"[] -| forall a . a <: A -| a:A\"\n          []\n          (forallu [\"a\"] (var \"a\"))\n          a\n          [SolvedG (TV \"a\") a]\n      , -- nested types\n        assertSubtypeGamma \"<b> -| [A] <: [<b>] |- <b>:A\" [ebg] (lst a) (lst eb) [solvedB a]\n      , assertSubtypeGamma \"<a> -| [<a>] <: [B] |- <a>:B\" [eag] (lst b) (lst ea) [solvedA b]\n      , assertSubtypeGamma\n          \"<a>, <b> -| (A, B) <: (<a>, <b>) |- <a>:A, <b>:B\"\n          [eag, ebg]\n          (tuple [a, b])\n          (tuple [ea, eb])\n          [solvedA a, solvedB b]\n      , assertSubtypeGamma\n          \"<a>, <b> -| (<a>, <b>) <: (A, B) |- <a>:A, <b>:B\"\n          [eag, ebg]\n          (tuple [ea, eb])\n          (tuple [a, b])\n          [solvedA a, solvedB b]\n      , assertSubtypeGamma\n          \"<a>, <b>, <c>, <d> -| (<a>, <b>) <: (<c>, <d>) -| <a>:<c>, <b>:<d>, <c>, <d>\"\n          [eag, ebg, ecg, edg]\n          (tuple [ea, eb])\n          (tuple [ec, ed])\n          [solvedA ec, solvedB ed, ecg, edg]\n      ]\n  where\n    a = var \"A\"\n    b = var \"B\"\n    c = var \"C\"\n    ea = exist \"x1\"\n    eb = exist \"x2\"\n    ec = exist \"x3\"\n    ed = exist \"x4\"\n    eag = ExistG (TV \"x1\") ([], Open) ([], Open)\n    ebg = ExistG (TV \"x2\") ([], Open) ([], Open)\n    ecg = ExistG (TV \"x3\") ([], Open) ([], Open)\n    edg = ExistG (TV \"x4\") ([], Open) ([], Open)\n    solvedA t = SolvedG (TV \"x1\") t\n    solvedB t = SolvedG (TV \"x2\") t\n\nsubstituteTVarTests :: TestTree\nsubstituteTVarTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"test variable substitution\"\n      [ testEqual \"[x/y]Int\" (substituteTVar (TV \"x\") (var \"y\") int) int\n      , testEqual\n          \"[y/x]([x] -> x)\"\n          (substituteTVar (TV \"x\") (var \"y\") (fun [lst (var \"x\"), var \"x\"]))\n          (fun [lst (var \"y\"), var \"y\"])\n      ]\n\nwhitespaceTests :: TestTree\nwhitespaceTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Tests whitespace handling for modules\"\n      [ assertGeneralType\n          \"module indent == 1 and top indent == module indent\"\n          \"module foo (y)\\nx = 1\\ny = 2\"\n          int\n      , assertGeneralType\n          \"module indent == 1 and top indent > module indent\"\n          \"module foo (y)\\n  x = 1\\n  y = 2\"\n          int\n      , assertGeneralType\n          \"module indent > 1 and top indent > module indent\"\n          \" module foo (y)\\n   x = 1\\n   y = 2\"\n          int\n      , assertGeneralType\n          \"module indent > 1 and top indent = module indent\"\n          \"  module foo (y)\\n  x = 1\\n  y = 2\"\n          int\n      , -- indenting main\n        assertGeneralType\n          \"main indent == 1\"\n          \"module main (y)\\nx = 1\\ny = 2\"\n          int\n      , assertGeneralType\n          \"main indent > 1\"\n          \"module main (y)\\n  x = 1\\n  y = 2\"\n          int\n      , -- multiple modules\n        assertGeneralType\n          \"multiple modules at pos 1 with pos > 1 exprs\"\n          [r|\nmodule foo (x)\n  x = True\nmodule bar (y)\n  import foo\n  y = True\nmodule main (z)\n  import bar\n  z = 1\n      |]\n          int\n      ]\n\npackerTests :: TestTree\npackerTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Test building of packer maps\"\n      [testEqual \"packer test\" (1 :: Int) 1]\n\ntypeAliasTests :: TestTree\ntypeAliasTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Test type alias substitutions\"\n      [ assertGeneralType\n          \"general type alias\"\n          [r|\n        module main (f)\n        type Foo = A\n        f :: Foo\n        |]\n          (var \"A\")\n      , assertGeneralType\n          \"parameterized generic\"\n          [r|\n        module main (f)\n        f :: m (a -> b)\n        |]\n          (forallu [\"m___q0\", \"a___q1\", \"b___q2\"] (arr \"m___q0\" [fun [var \"a___q1\", var \"b___q2\"]]))\n      , assertGeneralType\n          \"non-parametric, general type alias\"\n          [r|\n        module main (f)\n        type Foo = A\n        f :: Foo -> B\n        |]\n          (fun [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"deep type substitution: `[Foo] -> B`\"\n          [r|\n        module main (f)\n        type Foo = A\n        f :: [Foo] -> B\n        |]\n          (fun [lst (var \"A\"), var \"B\"])\n      , assertGeneralType\n          \"deep type substitution: `[Foo] -> Foo`\"\n          [r|\n        module main (f)\n        type Foo = A\n        f :: [Foo] -> Foo\n        |]\n          (fun [lst (var \"A\"), var \"A\"])\n      , assertGeneralType\n          \"parametric alias, general type alias\"\n          [r|\n        module main (f)\n        type (Foo a b) = (a,b)\n        f :: Foo X Y -> Z\n        |]\n          (fun [tuple [var \"X\", var \"Y\"], var \"Z\"])\n      , assertGeneralType\n          \"nested types\"\n          [r|\n           module main (foo)\n           type A = B\n           type B = C\n           foo :: A -> B -> C\n        |]\n          (fun [var \"C\", var \"C\", var \"C\"])\n      , assertGeneralType\n          \"state is preserved across binding\"\n          [r|\n           module main (f)\n           type Foo = A\n           g :: Foo -> Int\n           f = g\n        |]\n          (fun [var \"A\", var \"Int\"])\n      , assertGeneralType\n          \"state is inherited across binding\"\n          [r|\n           module main (f)\n           type Foo = A\n           g :: a -> b\n           f :: Foo -> Int\n           f = g  {- yes, g isn't defined -}\n        |]\n          (fun [var \"A\", var \"Int\"])\n      , expectError\n          \"fail on too many type aliases parameters\"\n          [r|\n           type A = B\n           foo :: A Int -> C\n           foo\n        |]\n      , expectError\n          \"fail on too few type aliases parameters\"\n          [r|\n           type (A a) = (a,a)\n           foo :: A -> C\n           foo\n        |]\n      , expectError\n          \"fail on conflicting types (Int vs Str)\"\n          [r|\n           type A = Int\n         \n           module b (A)\n           type A = Str\n         \n           module main (foo)\n           import a (A)\n           import b (A)\n         \n           foo :: A -> A -> A\n        |]\n      , expectError\n          \"fail on conflicting types (Map vs List)\"\n          [r|\n           module a (A)\n           type A a b = Map a b\n           \n           module b (A)\n           type A a b = List (Tuple2 a b)\n           \n           module main (foo)\n           import a (A)\n           import b (A)\n           \n           foo :: A a b -> A a b -> A a b\n        |]\n      , -- import tests ---------------------------------------\n        assertGeneralType\n          \"non-parametric, general type alias, imported\"\n          [r|\n           module m1 (Foo)\n             type Foo = A\n           module main (f)\n             import m1 (Foo)\n             f :: Foo -> B\n        |]\n          (fun [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"non-parametric, general type alias, reimported\"\n          [r|\n           module m3 (Foo)\n             type Foo = A\n           module m2 (Foo)\n             import m3 (Foo)\n           module m1 (Foo)\n             import m2 (Foo)\n           module main (f)\n             import m1 (Foo)\n             f :: Foo -> B\n        |]\n          (fun [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"non-parametric, general type alias, imported aliased\"\n          [r|\n           module m1 (Foo)\n             type Foo = A\n           module main (f)\n             import m1 (Foo as Bar)\n             f :: Bar -> B\n        |]\n          (fun [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"non-parametric, general type alias, reimported aliased\"\n          [r|\n           module m3 (Foo1)\n             type Foo1 = A\n\n           module m2 (Foo2)\n             import m3 (Foo1 as Foo2)\n\n           module m1 (Foo3)\n             import m2 (Foo2 as Foo3)\n\n           module main (f)\n             import m1 (Foo3 as Foo4)\n             f :: Foo4 -> B\n        |]\n          (fun [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"non-parametric, general type alias, duplicate import\"\n          [r|\n           module m2 (Foo)\n             type Foo = A\n\n           module m1 (Foo)\n             type Foo = A\n\n           module main (f)\n             import m1 (Foo)\n             import m2 (Foo)\n             f :: Foo -> B\n        |]\n          (fun [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"parametric alias, general type alias, duplicate import\"\n          [r|\n           module m2 (Foo)\n             type (Foo a b) = (a,b)\n\n           module m1 (Foo)\n             type (Foo c d) = (c,d)\n\n           module main (f)\n             import m1 (Foo)\n             import m2 (Foo)\n             f :: Foo X Y -> Z\n        |]\n          (fun [tuple [var \"X\", var \"Y\"], var \"Z\"])\n      ]\n\nwhereTests :: TestTree\nwhereTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Test of where statements\"\n      [ assertGeneralType\n          \"simple where\"\n          [r|\n            f :: Int\n            f = z where\n                z = 42\n            f\n        |]\n          int\n      , assertGeneralType\n          \"calling simple where\"\n          [r|\n            inc :: Int -> Int\n            f = inc z where\n                z = 42\n            f\n        |]\n          int\n      , assertGeneralType\n          \"calling deeper where\"\n          [r|\n            id :: a -> a\n            inc :: Int -> Int\n            f = id z where\n                z = inc y where\n                  y = 42\n            f\n        |]\n          int\n      ]\n\norderInvarianceTests :: TestTree\norderInvarianceTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Test order invariance\"\n      [ assertGeneralType\n          \"definitions work\"\n          \"x = 42\\nx\"\n          int\n      , assertGeneralType\n          \"terms may be defined before they are used\"\n          \"y = 42\\nx = y\\nx\"\n          int\n      , assertGeneralType\n          \"long chains of substitution are OK too\"\n          \"z = 42\\ny = z\\nx = y\\nx\"\n          int\n      ]\n\ntypeOrderTests :: TestTree\ntypeOrderTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Tests of type partial ordering (subtype)\"\n      [ testFalse\n          \"Str !< Real\"\n          (isSubtypeOf str real)\n      , testFalse\n          \"Real !< Str\"\n          (isSubtypeOf real str)\n      , testFalse\n          \"[Real] !< [Str]\"\n          (isSubtypeOf (lst real) (lst str))\n      , testFalse\n          \"[Str] !< [Real]\"\n          (isSubtypeOf (lst str) (lst real))\n      , testFalse\n          \"Str -> Str -> Str !< Real -> Real -> Real\"\n          (isSubtypeOf (fun [str, str, str]) (fun [real, real, real]))\n      , testFalse\n          \"Real -> Real -> Real !< Str -> Str -> Str\"\n          (isSubtypeOf (fun [real, real, real]) (fun [str, str, str]))\n      , testFalse\n          \"Str -> Str !< Int -> Int -> Int\"\n          (isSubtypeOf (fun [str, str]) (fun [int, int, int]))\n      , testTrue\n          \"a <: Int\"\n          (isSubtypeOf (forallu [\"a\"] (var \"a\")) int)\n      , testFalse\n          \"Int !< forall a . a\"\n          (isSubtypeOf int (forallu [\"a\"] (var \"a\")))\n      , testTrue\n          \"forall a . (Int, a) <: (Int, Str)\"\n          (isSubtypeOf (forallu [\"a\"] (tuple [int, var \"a\"])) (tuple [int, str]))\n      , testTrue\n          \"forall a b . (a, b) <: (Int, Str)\"\n          (isSubtypeOf (forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])) (tuple [int, str]))\n      , testTrue\n          \"forall a . (Int, a) <: forall b . (Int, b)\"\n          ( isSubtypeOf\n              (forallu [\"a\"] (tuple [int, var \"a\"]))\n              (forallu [\"b\"] (tuple [int, var \"b\"]))\n          )\n      , testTrue\n          \"forall a . a <: (Int, Str)\"\n          (isSubtypeOf (forallu [\"a\"] (var \"a\")) (tuple [int, str]))\n      , testTrue\n          \"forall a . a <: forall a b . (a, b)\"\n          (isSubtypeOf (forallu [\"a\"] (var \"a\")) (forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])))\n      , -- cannot compare\n        testFalse\n          \"[Int] !< Int\"\n          (isSubtypeOf (lst int) int)\n      , testFalse\n          \"Int !< [Int]\"\n          (isSubtypeOf int (lst int))\n      , -- partial order of types\n        testTrue\n          \"forall a . [a] <= [Int]\"\n          ((forallu [\"a\"] (lst (var \"a\"))) <= (lst (var \"a\")))\n      , testFalse\n          \"[Int] !< forall a . [a]\"\n          ((lst (var \"a\")) <= (forallu [\"a\"] (lst (var \"a\"))))\n      , testTrue\n          \"forall a . (Int, a) <= (Int, Bool)\"\n          ((forallu [\"a\"] (tuple [int, var \"a\"])) <= (tuple [int, bool]))\n      , testFalse\n          \"(Int, Bool) !<= forall a . (Int, a)\"\n          ((tuple [int, bool]) <= (forallu [\"a\"] (tuple [int, var \"a\"])))\n      , testTrue\n          \"forall a b . (a, b) <= forall c . (Int, c)\"\n          ((forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])) <= (forallu [\"c\"] (tuple [int, var \"c\"])))\n      , testFalse\n          \"forall c . (Int, c) !<= forall a b . (a, b)\"\n          ((forallu [\"c\"] (tuple [int, var \"c\"])) <= (forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])))\n      , testTrue\n          \"forall a . a <= forall a b . (a, b)\"\n          ((forallu [\"a\"] (var \"a\")) <= (forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])))\n      , -- test \"mostSpecific\"\n        testEqual\n          \"mostSpecific [Int, Str, forall a . a] = [Int, Str]\"\n          (mostSpecific [int, str, forallu [\"a\"] (var \"a\")])\n          [int, str]\n      , -- test \"mostGeneral\"\n        testEqual\n          \"mostGeneral [Int, Str, forall a . a] = forall a . a\"\n          (mostGeneral [int, str, forallu [\"a\"] (var \"a\")])\n          [forallu [\"a\"] (var \"a\")]\n      , -- test mostSpecificSubtypes\n        testEqual\n          \"mostSpecificSubtypes: Int against [forall a . a]\"\n          (mostSpecificSubtypes int [forallu [\"a\"] (var \"a\")])\n          [forallu [\"a\"] (var \"a\")]\n      , testEqual\n          \"mostSpecificSubtypes: (Int -> Int)\"\n          ( mostSpecificSubtypes\n              (fun [int, int])\n              [fun [str, str], fun [int, int], forallu [\"a\"] (fun [var \"a\", var \"a\"])]\n          )\n          [fun [int, int]]\n      , testEqual\n          \"mostSpecificSubtypes: empty\"\n          (mostSpecificSubtypes (fun [str, str, str]) [fun [real, real, real]])\n          []\n      , -- test mostSpecificSubtypes for tuples\n        testEqual\n          \"mostSpecificSubtypes: tuples\"\n          ( mostSpecificSubtypes\n              (tuple [int, int])\n              [ forallu [\"a\"] (var \"a\")\n              , forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])\n              , forallu [\"a\", \"b\", \"c\"] (tuple [var \"a\", var \"b\", var \"c\"])\n              ]\n          )\n          [forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])]\n      , -- test mostSpecificSubtypes for tuples\n        testEqual\n          \"mostSpecificSubtypes: with partially generic tuples\"\n          ( mostSpecificSubtypes\n              (forallu [\"a\"] (tuple [int, var \"a\"]))\n              [ forallu [\"a\"] (var \"a\")\n              , forallu [\"a\", \"b\"] (tuple [var \"a\", var \"b\"])\n              , forallu [\"a\"] (tuple [int, var \"a\"])\n              , forallu [\"a\"] (tuple [int, bool])\n              , forallu [\"a\", \"b\", \"c\"] (tuple [var \"a\", var \"b\", var \"c\"])\n              ]\n          )\n          [forallu [\"a\"] (tuple [int, var \"a\"])]\n      ]\n\nunitTypeTests :: TestTree\nunitTypeTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Typechecker unit tests\"\n      -- comments\n      [ assertGeneralType \"block comments (1)\" \"{- -} 42\" int\n      , assertGeneralType \"block comments (2)\" \" {--} 42{-   foo -} \" int\n      , assertGeneralType \"line comments (3)\" \"-- foo\\n 42\" int\n      , -- reals versus integers\n        assertGeneralType \"0 is an int\" \"0\" int\n      , assertGeneralType \"42 is an int\" \"42\" int\n      , assertGeneralType \"-42 is an int\" \"-42\" int\n      , assertGeneralType \"big integers are OK\" \"123456789123456789123456789123456789123456789123456789\" int\n      , assertGeneralType\n          \"big negative integers are OK\"\n          \"-123456789123456789123456789123456789123456789123456789\"\n          int\n      , assertGeneralType \"0.0 is a real\" \"0.0\" real\n      , assertGeneralType \"4.2 is a real\" \"4.2\" real\n      , assertGeneralType \"-4.2 is a real\" \"-4.2\" real\n      , assertGeneralType \"4e1 is a real (scientific notation is real)\" \"4e1\" real\n      , assertGeneralType \"-4e1 is a real\" \"-4e1\" real\n      , assertGeneralType \"-4e-1 is a real\" \"-4e-1\" real\n      , assertGeneralType \"4.2e3000 is a real\" \"4.2e3000\" real\n      , assertGeneralType \"irregular scientific notation is OK\" \"123456789123456789123456789e-3000\" real\n      , assertGeneralType \"reals may be big\" \"123456789123456789123456789.123456789123456789123456789\" real\n      , -- other primitives\n        assertGeneralType \"primitive boolean\" \"True\" bool\n      , assertGeneralType \"primitive string\" \"\\\"this is a string literal\\\"\" str\n      , assertGeneralType \"primitive integer annotation\" \"42 :: Int\" int\n      , assertGeneralType \"primitive boolean annotation\" \"True :: Bool\" bool\n      , assertGeneralType \"primitive double annotation\" \"4.2 :: Real\" real\n      , assertGeneralType\n          \"primitive string annotation\"\n          \"\\\"this is a string literal\\\" :: Str\"\n          str\n      , assertGeneralType \"primitive declaration\" \"x = True\\n4.2\" real\n      , -- containers\n        -- - lists\n        assertGeneralType \"list of one primitive\" \"[1]\" (lst int)\n      , assertGeneralType \"list of many primitives\" \"[1,2,3]\" (lst int)\n      , assertGeneralType \"list of many containers\" \"[(True,1),(False,2)]\" (lst (tuple [bool, int]))\n      , -- - tuples\n        assertGeneralType \"tuple of primitives\" \"(1,2,True)\" (tuple [int, int, bool])\n      , assertGeneralType \"tuple with containers\" \"(1,(2,True))\" (tuple [int, tuple [int, bool]])\n      , -- - records\n        assertGeneralType\n          \"primitive record statement\"\n          [r|\n        {x=42, y=\"yolo\"}\n        |]\n          (existP \"e0\" [] [(Key \"x\", int), (Key \"y\", str)])\n      , assertGeneralType\n          \"primitive record signature\"\n          [r|\n        record Foo = Foo {x :: Int, y :: Str}\n        f :: Int -> Foo\n        f 42\n        |]\n          (record' \"Foo\" [(Key \"x\", int), (Key \"y\", str)])\n      , assertGeneralType\n          \"primitive record declaration\"\n          [r|\n        foo = {x = 42, y = \"yolo\"}\n        foo\n        |]\n          (existP \"e0\" [] [(Key \"x\", int), (Key \"y\", str)])\n      , assertGeneralType\n          \"nested records\"\n          [r|\n        {x = 42, y = {bob = 24601, tod = \"listen now closely and hear how I've planned it\"}}\n        |]\n          (existP \"e0\" [] [(Key \"x\", int), (Key \"y\", existP \"e1\" [] [(Key \"bob\", int), (Key \"tod\", str)])])\n      , assertGeneralType\n          \"records with bound variables\"\n          [r|\n        foo a = {x=a, y=\"yolo\"}\n        foo 42\n        |]\n          (existP \"e0\" [] [(Key \"x\", int), (Key \"y\", str)])\n      , -- functions\n        assertGeneralType\n          \"1-arg function declaration without signature\"\n          [r|\n        f x = True\n        f 42\n        |]\n          bool\n      , assertGeneralType\n          \"2-arg function declaration without signature\"\n          [r|\n        f x y = True\n        f 42 True\n        |]\n          bool\n      , assertGeneralType\n          \"1-arg function signature without declaration\"\n          [r|\n        f :: Int -> Bool\n        f 42\n        |]\n          bool\n      , assertGeneralType\n          \"2-arg function signature without declaration\"\n          [r|\n        f :: Int -> Bool -> Str\n        f 42 True\n        |]\n          str\n      , assertGeneralType\n          \"partial 1-2 function signature without declaration\"\n          [r|\n        f :: Int -> Bool -> Str\n        f 42\n        |]\n          (fun [bool, str])\n      , assertGeneralType\n          \"identity function declaration and application\"\n          [r|\n        f x = x\n        f 42\n        |]\n          int\n      , assertGeneralType\n          \"const declared function\"\n          [r|\n        const x y = x\n        const 42 True\n        |]\n          int\n      , assertGeneralType\n          \"identity signature function\"\n          [r|\n        id :: a -> a\n        id 42\n        |]\n          int\n      , assertGeneralType\n          \"const signature function\"\n          [r|\n        const :: a -> b -> a\n        const 42 True\n        |]\n          int\n      , assertGeneralType\n          \"fst signature function\"\n          [r|\n        fst :: (a,b) -> a\n        fst (42,True)\n        |]\n          int\n      , assertGeneralType\n          \"value to list function\"\n          [r|\n        single :: a -> [a]\n        single 42\n        |]\n          (lst int)\n      , assertGeneralType\n          \"head function\"\n          [r|\n        head :: [a] -> a\n        head [1,2,3]\n        |]\n          int\n      , assertGeneralType\n          \"make list function\"\n          [r|\n        f :: a -> [a]\n        f 1\n        |]\n          (lst int)\n      , assertGeneralType\n          \"make list function\"\n          [r|\n        single :: a -> [a]\n        single 1\n        |]\n          (lst int)\n      , assertGeneralType\n          \"existential function passing\"\n          [r|\n        module main (g)\n        g f = f True\n        |]\n          (fun [fun [bool, exist \"e0\"], exist \"e0\"])\n      , assertGeneralType\n          \"app single function\"\n          [r|\n        app :: (a -> b) -> a -> b\n        f :: a -> [a]\n        app f 42\n        |]\n          (lst int)\n      , assertGeneralType\n          \"app head function\"\n          [r|\n        app :: (a -> b) -> a -> b\n        f :: [a] -> a\n        app f [42]\n        |]\n          int\n      , assertGeneralType\n          \"simple nested call\"\n          [r|\n      f x = x\n      g x = f x\n      g 1\n      |]\n          int\n      , assertGeneralType\n          \"nested calls\"\n          [r|\n      f x y = (x, y)\n      g x y = (x, f 1 y)\n      g True \"hi\"\n      |]\n          (tuple [bool, tuple [int, str]])\n      , assertGeneralType\n          \"zip pair\"\n          [r|\n      pair x y = (x, y)\n      zip :: (x -> y -> z) -> [x] -> [y] -> [z]\n      zip pair [1,2] [True, False]\n      |]\n          (lst (tuple [int, bool]))\n      , assertGeneralType\n          \"nested identity\"\n          [r|\n      id :: a -> a\n      id (id (id 1))\n      |]\n          int\n      , assertGeneralType\n          \"head (head [[1]])\"\n          [r|\n      head :: [a] -> a\n      head (head [[42]])\n      |]\n          int\n      , assertGeneralType\n          \"snd (snd (1,(1,True)))\"\n          [r|\n      snd :: (a, b) -> b\n      snd (snd (1, (1, True)))\n      |]\n          bool\n      , assertGeneralType\n          \"f x y = [x, y]\"\n          [r|\n        f x y = [x, y]\n        f 1\n        |]\n          (fun [int, lst int])\n      , assertGeneralType\n          \"map head function\"\n          [r|\n        map :: (a -> b) -> [a] -> [b]\n        head :: [a] -> a\n        map head [[1],[1,2,3]]\n        |]\n          (lst int)\n      , assertGeneralType\n          \"t a -> a\"\n          [r|\n        gify :: a -> G a\n        out :: f a -> a\n        out (gify 1)\n        |]\n          int\n      , assertGeneralType\n          \"f a b -> b\"\n          [r|\n        gify :: a -> b -> G a b\n        snd :: f a b -> b\n        snd (gify 1 True)\n        |]\n          bool\n      , assertGeneralType\n          \"map id over number list\"\n          [r|\n        map :: (a -> b) -> [a] -> [b]\n        id :: a -> a\n        map id [1,2,3]\n        |]\n          (lst int)\n      , assertGeneralType\n          \"map fst over tuple list\"\n          [r|\n        map :: (a -> b) -> [a] -> [b]\n        fst :: (a,b) -> a\n        map fst [(1,True),(2,False)]\n        |]\n          (lst int)\n      , assertGeneralType\n          \"map fstG over (G a b) list\"\n          [r|\n        gify :: a -> b -> G a b\n        map :: (a -> b) -> [a] -> [b]\n        fstF :: f a b -> a\n        map fstF [gify 1 True, gify 2 False]\n        |]\n          (lst int)\n      , assertGeneralType\n          \"fmap generic fst over functor\"\n          [r|\n        gify :: a -> G a\n        fmap :: (a -> b) -> f a -> f b\n        out :: f a -> a\n        fmap out (gify [1])\n        |]\n          (arr \"G\" [int])\n      , assertGeneralType\n          \"generic parameter reordering\"\n          [r|\n        module m (biz)\n        type M a b c = R b a c\n        foo :: M a b c -> N b c\n        bar :: a -> b -> c -> R a b c\n        da :: Int -> X\n        db :: Int -> Y\n        dc :: Int -> Z\n        baz a b c = foo (bar a b c)\n        -- biz :: N X Z\n        biz = baz (da 1) (db 2) (dc 3)\n        |]\n          (arr \"N\" [var \"X\", var \"Z\"])\n      , assertGeneralType\n          \"variable annotation\"\n          [r|\n        module main (f)\n        f :: Foo\n        |]\n          (var \"Foo\")\n      , -- lambdas\n        assertGeneralType\n          \"function with parameterized types\"\n          [r|\n        module main (f)\n        f :: A B -> C\n        |]\n          (fun [arr \"A\" [var \"B\"], var \"C\"])\n      , assertGeneralType \"fully applied lambda (1)\" \"(\\\\x y -> x) 1 True\" int\n      , assertGeneralType \"fully applied lambda (2)\" \"(\\\\x -> True) 42\" bool\n      , assertGeneralType \"fully applied lambda (3)\" \"(\\\\x -> (\\\\y -> True) x) 42\" bool\n      , assertGeneralType \"fully applied lambda (4)\" \"(\\\\x -> (\\\\y -> x) True) 42\" int\n      , assertGeneralType\n          \"unapplied lambda, polymorphic (1)\"\n          [r|\\x -> True|]\n          (fun [exist \"e0\", bool])\n      , assertGeneralType\n          \"unapplied lambda, polymorphic (2)\"\n          \"(\\\\x y -> x) :: a -> b -> a\"\n          (fun [exist \"e0\", exist \"e1\", exist \"e0\"])\n      , assertGeneralType\n          \"annotated, fully applied lambda\"\n          \"((\\\\x -> x) :: a -> a) True\"\n          bool\n      , assertGeneralType\n          \"annotated, partially applied lambda\"\n          \"((\\\\x y -> x) :: a -> b -> a) True\"\n          (fun [exist \"e0\", bool])\n      , assertGeneralType\n          \"recursive functions are A-OK\"\n          \"\\\\f -> f 5\"\n          (fun [fun [int, exist \"e0\"], exist \"e0\"])\n      , -- applications\n        assertGeneralType\n          \"primitive variable in application\"\n          [r|\n        x = True\n        (\\y -> y) x\n        |]\n          bool\n      , assertGeneralType\n          \"function variable in application\"\n          [r|\n        f x y = x\n        f 42 True\n        |]\n          int\n      , assertGeneralType\n          \"partially applied function variable in application\"\n          [r|\n        f x y = x\n        x = f 42\n        x\n        |]\n          (fun [exist \"e0\", int])\n      , exprTestBad\n          \"applications with too many arguments fail\"\n          [r|\n        f :: a -> a\n        f True 12\n        |]\n      , exprTestBad\n          \"applications with mismatched types fail (1)\"\n          [r|\n        abs :: Int -> Int\n        abs True\n        |]\n      , exprTestBad\n          \"applications with mismatched types fail (2)\"\n          [r|\n        f = 14\n        g = \\x h -> h x\n        (g True) f\n        |]\n      , expectError\n          \"applications of non-functions should fail (1)\"\n          [r|\n        f = 5\n        g = \\x -> f x\n        g 12\n        |]\n      , expectError\n          \"applications of non-functions should fail (2)\"\n          [r|\n        f = 5\n        g = \\h -> h 5\n        g f\n        |]\n      , -- evaluation within containers\n        expectError\n          \"arguments to a function are monotypes\"\n          [r|\n        f :: a -> a\n        g = \\h -> (h 42, h True)\n        g f\n        |]\n      , assertGeneralType\n          \"polymorphism under lambdas (203f8c) (1)\"\n          [r|\n        f :: a -> a\n        g = \\h -> (h 42, h 1234)\n        g f\n        |]\n          (tuple [int, int])\n      , assertGeneralType\n          \"polymorphism under lambdas (203f8c) (2)\"\n          [r|\n        f :: a -> a\n        g = \\h -> [h 42, h 1234]\n        g f\n        |]\n          (lst int)\n      , -- binding\n        assertGeneralType\n          \"annotated variables without definition are legal\"\n          [r|\n        module main (x)\n        x :: Int\n        |]\n          int\n      , assertGeneralType\n          \"unannotated variables with definition are legal\"\n          [r|\n        x = 42\n        x\n        |]\n          int\n      , -- , exprTestBad\n        --     \"unannotated variables without definitions are illegal ('x')\"\n        --     \"x\"\n\n        -- parameterized types\n        assertGeneralType\n          \"parameterized type (n=1)\"\n          [r|\n        module main (xs)\n        xs :: Foo A\n        |]\n          (arr \"Foo\" [var \"A\"])\n      , assertGeneralType\n          \"parameterized type (n=2)\"\n          [r|\n        module main (xs)\n        xs :: Foo A B\n        |]\n          (arr \"Foo\" [var \"A\", var \"B\"])\n      , assertGeneralType\n          \"nested parameterized type\"\n          [r|\n        module main (xs)\n        xs :: Foo (Bar A) [B]\n        |]\n          (arr \"Foo\" [arr \"Bar\" [var \"A\"], arr \"List\" [var \"B\"]])\n      , -- type signatures and higher-order functions\n        assertGeneralType\n          \"type signature: identity function\"\n          [r|\n        f :: a -> a\n        f 42\n        |]\n          int\n      , assertGeneralType\n          \"type signature: apply function with primitives\"\n          [r|\n        apply :: (Int -> Bool) -> Int -> Bool\n        f :: Int -> Bool\n        apply f 42\n        |]\n          bool\n      , assertGeneralType\n          \"type signature: generic apply function\"\n          [r|\n        apply :: (a->b) -> a -> b\n        f :: Int -> Bool\n        apply f 42\n        |]\n          bool\n      , assertGeneralType\n          \"type signature: map\"\n          [r|\n        map :: (a->b) -> [a] -> [b]\n        f :: Int -> Bool\n        map f [5,2]\n        |]\n          (lst bool)\n      , -- shadowing\n        assertGeneralType\n          \"name shadowing in lambda expressions\"\n          [r|\n        f x = (14, x)\n        g x f = f x\n        g True f\n        |]\n          (tuple [int, bool])\n      , assertGeneralType\n          \"function passing without shadowing\"\n          [r|\n        f x = (14, x)\n        g foo = foo True\n        g f\n        |]\n          (tuple [int, bool])\n      , assertGeneralType\n          \"shadowed qualified type variables (7ffd52a)\"\n          [r|\n        f :: a -> a\n        g :: a -> Int\n        g f\n        |]\n          int\n      , assertGeneralType\n          \"non-shadowed qualified type variables (7ffd52a)\"\n          [r|\n        f :: a -> a\n        g :: b -> Int\n        g f\n        |]\n          int\n      , -- lists\n        assertGeneralType \"list of primitives\" \"[1,2,3]\" (lst int)\n      , assertGeneralType\n          \"list containing an applied variable\"\n          [r|\n        f :: a -> a\n        [53, f 34]\n        |]\n          (lst int)\n      , -- NOTE: this test relies on internal renaming implementation\n        assertGeneralType \"empty list\" \"[]\" (lst (exist \"e0\"))\n      , assertGeneralType\n          \"list in function signature and application\"\n          [r|\n        f :: [Int] -> Bool\n        f [1]\n        |]\n          bool\n      , -- , assertGeneralType\n        --     \"list in generic function signature and application\"\n        --     \"f :: [a] -> Bool\\nf [1]\"\n        --     [bool]\n        -- , exprTestBad \"failure on heterogenous list\" \"[1,2,True]\"\n\n        -- tuples\n        assertGeneralType\n          \"tuple of primitives\"\n          [r|\n        (4.2, True)\n        |]\n          (tuple [real, bool])\n      , assertGeneralType\n          \"tuple containing an applied variable\"\n          [r|\n        f :: a -> a\n        (f 53, True)\n        |]\n          (tuple [int, bool])\n      , assertGeneralType\n          \"check 2-tuples type signature\"\n          [r|\n        module main (f)\n        f :: (Int, Str)\n        |]\n          (tuple [int, str])\n      , assertGeneralType \"1-tuples are just for grouping\" \"module main (f)\\nf :: (Int)\" int\n      , -- unit type\n        assertGeneralType\n          \"unit as input\"\n          [r|\n        module main (f)\n        f :: () -> Bool\n        |]\n          (fun [VarU (TV \"Unit\"), bool])\n      , assertGeneralType\n          \"unit as 2rd input\"\n          [r|\n        module main (f)\n        f :: Int -> () -> Bool\n        |]\n          (fun [int, VarU (TV \"Unit\"), bool])\n      , assertGeneralType\n          \"unit as output\"\n          [r|\n        module main (f)\n        f :: Bool -> ()\n        |]\n          (fun [bool, VarU (TV \"Unit\")])\n      , -- FIXME - I really don't like \"Unit\" being a normal var ...\n        -- I am inclined to cast it as the unit type\n        assertGeneralType \"empty tuples are of unit type\" \"module main (f)\\nf :: ()\" (var \"Unit\")\n      , -- extra space\n        assertGeneralType \"leading space\" \" 42\" int\n      , assertGeneralType \"trailing space\" \"42 \" int\n      , -- adding signatures to declarations\n        assertGeneralType\n          \"declaration with a signature (1)\"\n          [r|\n        f :: a -> a\n        f x = x\n        f 42\n        |]\n          int\n      , assertGeneralType\n          \"declaration with a signature (2)\"\n          [r|\n        f :: Int -> Bool\n        f x = True\n        f 42\n        |]\n          bool\n      , assertGeneralType\n          \"declaration with a signature (3)\"\n          [r|\n        f :: Int -> Bool\n        f x = True\n        f\n        |]\n          (fun [int, bool])\n      , expectError\n          \"primitive type mismatch should raise error\"\n          [r|\n        module main (f)\n        f :: Int -> Bool\n        f x = 9999\n        |]\n      , expectError\n          \"catch infinite recursion of list\"\n          [r|\n        module main (f)\n        g :: [a] -> a\n        f :: a -> a\n        f x = g x\n        |]\n      , expectError\n          \"catch infinite recursion of tuple\"\n          [r|\n        module main (f)\n        g :: (a, b) -> a\n        f :: a -> a\n        f x = g x\n        |]\n      , expectError\n          \"check signatures under supposed identity\"\n          [r|\n        module main (f)\n        g :: (a -> b) -> a\n        f :: a -> a\n        f x = g x\n        |]\n      ,\n\n        -- constraint syntax (implicit quantification wraps free vars in ForallU)\n        assertGeneralType\n          \"constraint syntax (1)\"\n          \"module main (f)\\nf :: (Ord a) => a -> a -> a\"\n          (forallu [\"a\"] (fun [var \"a\", var \"a\", var \"a\"]))\n      , assertGeneralType\n          \"constraint syntax (2)\"\n          \"module main (f)\\nf :: Ord a => a -> a -> a\"\n          (forallu [\"a\"] (fun [var \"a\", var \"a\", var \"a\"]))\n      , assertGeneralType\n          \"constraint syntax (3)\"\n          \"module main (f)\\nf :: (Ord a, Eq b) => a -> b -> Bool\"\n          (forallu [\"a\", \"b\"] (fun [var \"a\", var \"b\", VarU (TV \"Bool\")]))\n      , -- tests modules\n        assertGeneralType\n          \"basic main module\"\n          [r|\n          module main(x)\n          x = [1,2,3]\n        |]\n          (lst int)\n      , (flip $ assertGeneralType \"import/export\") (lst int) $\n          [r|\n          module foo (x)\n            x = 42\n          module bar (f)\n            f :: a -> [a]\n          module main (z)\n            import foo (x)\n            import bar (f)\n            z = f x\n        |]\n      , (flip $ assertGeneralType \"complex parse (1)\") int $\n          [r|\n         module foo (x)\n           add :: Int -> Int -> Int\n           x = add a y where\n             a = 1\n             y = add b z where\n               b = 42\n           z = 19\n      |]\n      ]\n\nunitValuecheckTests :: TestTree\nunitValuecheckTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Valuechecker unit tests\"\n      [ valuecheckFail\n          \"unequal primitives fail\"\n          -- primitives\n          [r|\n         module foo (x)\n           x = 1\n           x = 2\n      |]\n      , valuecheckPass\n          \"equal primitives pass\"\n          [r|\n         module foo (x)\n           x = 1\n           x = 1\n      |]\n      , -- containers\n        valuecheckFail\n          \"lists with unequal values fail\"\n          [r|\n         module foo (x)\n           x = [1,3]\n           x = [1,2]\n      |]\n      , valuecheckFail\n          \"lists of unequal length fail\"\n          [r|\n         module foo (x)\n           x = [1]\n           x = [1,2]\n      |]\n      , valuecheckPass\n          \"identical lists pass\"\n          [r|\n         module foo (x)\n           x = [1,2]\n           x = [1,2]\n      |]\n      , -- bound terms in simple expressions\n        valuecheckFail\n          \"argument constraints\"\n          [r|\n         module foo (f)\n           f x y = x\n           f a b = b\n      |]\n      , valuecheckFail\n          \"lambda var mismatches\"\n          [r|\n         module foo (f)\n           f x y = [x,y]\n           f a b = [b,a]\n      |]\n      , valuecheckPass\n          \"identical lambda passes\"\n          [r|\n         module foo (f)\n           f x y = [x,y]\n           f a b = [a,b]\n      |]\n      , -- comparisons of simple and non-simple\n        valuecheckFail\n          \"constrained values fail\"\n          [r|\n         module foo (x)\n           source Py (\"sum\")\n           sum :: [Int] -> Int\n           x = sum [1, 2]\n           x = 3\n      |]\n      , valuecheckFail\n          \"unequal types\"\n          [r|\n         module foo (f)\n           source Py (\"sum\")\n           sum :: [Int] -> Int\n           f xs = [1, sum xs]\n           f xs = [2, sum xs]\n      |]\n      ]\n\n{- | Tests for infix operator functionality\nAll tests have a 1-second timeout to prevent infinite loops\n-}\ninfixOperatorTests :: TestTree\ninfixOperatorTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout in microseconds\n    testGroup\n      \"Infix operator tests\"\n      [ -- Basic precedence tests\n        assertGeneralType\n          \"default precedence: multiplication before addition\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          (*) :: Int -> Int -> Int\n          (*) x y = y\n          z = 1 + 2 * 3\n          z\n        |]\n          int\n      , assertGeneralType\n          \"custom precedence: higher binds tighter\"\n          [r|\n          infixl 3 #\n          infixl 8 @\n          (#) :: Int -> Int -> Int\n          (#) x y = x\n          (@) :: Int -> Int -> Int\n          (@) x y = y\n          x = 1 # 2 @ 3\n          x\n        |]\n          int\n      , -- Associativity tests\n        assertGeneralType\n          \"left associative operators\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = 1 + 2 + 3\n          x\n        |]\n          int\n      , assertGeneralType\n          \"right associative operators\"\n          [r|\n          infixr 5 ++\n          (++) :: [Int] -> [Int] -> [Int]\n          (++) xs ys = xs\n          x = [1] ++ [2] ++ [3]\n          x\n        |]\n          (lst int)\n      , -- Operators in prefix position\n        assertGeneralType\n          \"operator used prefix\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = (+) 1 2\n          x\n        |]\n          int\n      , assertGeneralType\n          \"operator in lambda\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          f :: Int -> Int -> Int\n          f = \\x y -> x + y\n          f\n        |]\n          (fun [int, int, int])\n      , -- Default precedence tests\n        assertGeneralType\n          \"default * has precedence 7\"\n          [r|\n          infixl 6 +\n          (*) :: Int -> Int -> Int\n          (*) x y = y\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = 1 + 2 * 3\n          x\n        |]\n          int\n      , assertGeneralType\n          \"default + has precedence 6\"\n          [r|\n          infixl 7 *\n          (*) :: Int -> Int -> Int\n          (*) x y = y\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = 1 + 2 * 3\n          x\n        |]\n          int\n      , -- Multiple operators in one declaration\n        assertGeneralType\n          \"multiple operators same fixity\"\n          [r|\n          infixl 6 +, -\n          (+) :: Int -> Int -> Int\n          (-) :: Int -> Int -> Int\n          x = 1 + 2 - 3\n          x\n        |]\n          int\n      , -- Polymorphic operators\n        assertGeneralType\n          \"polymorphic operator\"\n          [r|\n          infixl 9 .\n          infixl 6 +\n          infixr 0 $\n          (.) :: (b -> c) -> (a -> b) -> a -> c\n          ($) :: (a -> b) -> a -> b\n          (+) :: Int -> Int -> Int\n          show :: a -> Str\n          x = show . (+) 9 $ 5\n          x\n        |]\n          str\n      , assertGeneralType\n          \"polymorphic list append\"\n          [r|\n          infixl 6 ++\n          (++) :: [a] -> [a] -> [a]\n          (++) xs ys = xs\n          x = [1] ++ [2]\n          x\n        |]\n          (lst int)\n      , -- Complex expressions\n        assertGeneralType\n          \"nested operations with parens\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          (*) :: Int -> Int -> Int\n          (*) x y = y\n          x = (1 + 2) * (3 + 4)\n          x\n        |]\n          int\n      , -- Operators in different contexts\n        assertGeneralType\n          \"operator in where clause\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = y + z where\n            y = 1\n            z = 2\n          x\n        |]\n          int\n      , assertGeneralType\n          \"operator in list\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          xs = [1 + 2, 3 + 4]\n          xs\n        |]\n          (lst int)\n      , assertGeneralType\n          \"operator in tuple\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = (1 + 2, \"hi\")\n          x\n        |]\n          (AppU (VarU (TV \"Tuple2\")) [int, str])\n      , -- Edge cases\n        assertGeneralType\n          \"operator precedence 0 (lowest)\"\n          [r|\n          infixr 0 $\n          ($) :: (Int -> Int) -> Int -> Int\n          ($) f x = f x\n          g :: Int -> Int\n          x = g $ 5\n          x\n        |]\n          int\n      , assertGeneralType\n          \"operator precedence 9 (highest)\"\n          [r|\n          infixl 9 !!!\n          (!!!) :: Int -> Int -> Int\n          (!!!) x y = x\n          x = 1 !!! 2\n          x\n        |]\n          int\n      , -- Operators with both parens and bare syntax in fixity decls\n        assertGeneralType\n          \"fixity with parentheses\"\n          [r|\n          infixl 6 (+)\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = 1 + 2\n          x\n        |]\n          int\n      , assertGeneralType\n          \"fixity without parentheses\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          (+) x y = x\n          x = 1 + 2\n          x\n        |]\n          int\n      , -- Type-verified precedence: asymmetric operator types ensure\n        -- only the correct parse tree typechecks\n        assertGeneralType\n          \"type-verified: * at 7 binds tighter than + at 6\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Str -> Int -> Str\n          (*) :: Int -> Int -> Int\n          x = \"a\" + 1 * 2\n          x\n        |]\n          str\n      , assertGeneralType\n          \"type-verified: @ at 8 binds tighter than # at 3\"\n          [r|\n          infixl 3 #\n          infixl 8 @\n          (#) :: Str -> Int -> Str\n          (@) :: Int -> Int -> Int\n          x = \"a\" # 1 @ 2\n          x\n        |]\n          str\n      , assertGeneralType\n          \"type-verified: three-operator precedence chain\"\n          [r|\n          infixl 3 <$>\n          infixl 6 +\n          infixl 9 *\n          (<$>) :: Str -> Str -> Int\n          (+) :: Str -> Int -> Str\n          (*) :: Int -> Int -> Int\n          x = \"a\" <$> \"b\" + 1 * 2\n          x\n        |]\n          int\n      , -- Type-verified associativity: asymmetric operator types ensure\n        -- only the correct associativity typechecks\n        assertGeneralType\n          \"type-verified: left-assoc chain\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Str -> Int\n          x = 1 + \"a\" + \"b\"\n          x\n        |]\n          int\n      , assertGeneralType\n          \"type-verified: right-assoc chain\"\n          [r|\n          infixr 5 ++\n          (++) :: Str -> Int -> Int\n          x = \"a\" ++ \"b\" ++ 1\n          x\n        |]\n          int\n      , -- Application operator ($)\n        assertGeneralType\n          \"$ applies function to argument\"\n          [r|\n          infixr 0 $\n          ($) :: (a -> b) -> a -> b\n          f :: Int -> Str\n          x = f $ 1\n          x\n        |]\n          str\n      , assertGeneralType\n          \"nested $ is right-associative (type-verified)\"\n          [r|\n          infixr 0 $\n          ($) :: (a -> b) -> a -> b\n          f :: Int -> Str\n          g :: Str -> Int\n          x = g $ f $ 1\n          x\n        |]\n          int\n      , assertGeneralType\n          \"$ binds looser than + (type-verified)\"\n          [r|\n          infixr 0 $\n          infixl 6 +\n          ($) :: (a -> b) -> a -> b\n          (+) :: Int -> Int -> Int\n          f :: Int -> Str\n          x = f $ 1 + 2\n          x\n        |]\n          str\n      , -- Composition operator (.)\n        assertGeneralType\n          \"composition of two functions\"\n          [r|\n          infixr 9 .\n          (.) :: (b -> c) -> (a -> b) -> a -> c\n          g :: Str -> Int\n          f :: Int -> Str\n          x = g . f\n          x\n        |]\n          (fun [int, int])\n      , assertGeneralType\n          \"composition chain of three functions\"\n          [r|\n          infixr 9 .\n          (.) :: (b -> c) -> (a -> b) -> a -> c\n          h :: Str -> Int\n          g :: Int -> Str\n          f :: Bool -> Int\n          x = h . g . f\n          x\n        |]\n          (fun [bool, int])\n      , assertGeneralType\n          \"composition binds tighter than $ (type-verified)\"\n          [r|\n          infixr 9 .\n          infixr 0 $\n          (.) :: (b -> c) -> (a -> b) -> a -> c\n          ($) :: (a -> b) -> a -> b\n          f :: Int -> Int\n          g :: Int -> Str\n          x = g . f $ 5\n          x\n        |]\n          str\n      , -- Position independence of fixity declarations\n        assertGeneralType\n          \"fixity declared after usage\"\n          [r|\n          (+) :: Int -> Str -> Int\n          x = 1 + \"a\"\n          infixl 6 +\n          x\n        |]\n          int\n      , assertGeneralType\n          \"fixity and type sig both declared after usage\"\n          [r|\n          x = 1 + \"a\"\n          infixl 6 +\n          (+) :: Int -> Str -> Int\n          x\n        |]\n          int\n      , assertGeneralType\n          \"both fixities at end, precedence still works\"\n          [r|\n          (+) :: Str -> Int -> Str\n          (*) :: Int -> Int -> Int\n          x = \"a\" + 1 * 2\n          infixl 6 +\n          infixl 7 *\n          x\n        |]\n          str\n      , -- Default fixity is infixl 9\n        assertGeneralType\n          \"undeclared operator defaults to prec 9 (type-verified)\"\n          [r|\n          infixl 6 +\n          (+) :: Str -> Int -> Str\n          (*) :: Int -> Int -> Int\n          x = \"a\" + 1 * 2\n          x\n        |]\n          str\n      , assertGeneralType\n          \"undeclared operator defaults to left-associative (type-verified)\"\n          [r|\n          (+) :: Int -> Str -> Int\n          x = 1 + \"a\" + \"b\"\n          x\n        |]\n          int\n      , -- Where-clause bindings with infix operators\n        assertGeneralType\n          \"infix operator in where binding (type-verified)\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Str -> Int\n          x = y where\n            y = 1 + \"a\"\n          x\n        |]\n          int\n      , assertGeneralType\n          \"multiple where bindings with different operators\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Str -> Int -> Str\n          (*) :: Int -> Int -> Int\n          x = (y, z) where\n            y = \"hello\" + 3\n            z = 2 * 4\n          x\n        |]\n          (tuple [str, int])\n      , -- Ambiguity and conflict errors\n        exprTestBad\n          \"non-associative operator chained\"\n          [r|\n          infix 6 ~~\n          (~~) :: Int -> Int -> Int\n          x = 1 ~~ 2 ~~ 3\n          x\n        |]\n      , exprTestBad\n          \"two non-associative operators at same precedence\"\n          [r|\n          infix 6 ~~\n          infix 6 @@\n          (~~) :: Int -> Int -> Int\n          (@@) :: Int -> Int -> Int\n          x = 1 ~~ 2 @@ 3\n          x\n        |]\n      , exprTestBad\n          \"left-assoc and right-assoc at same precedence\"\n          [r|\n          infixl 6 +\n          infixr 6 ++\n          (+) :: Int -> Int -> Int\n          (++) :: Int -> Int -> Int\n          x = 1 + 2 ++ 3\n          x\n        |]\n      , exprTestBad\n          \"conflicting fixity declarations for same operator\"\n          [r|\n          infixl 6 +\n          infixr 7 +\n          (+) :: Int -> Int -> Int\n          x = 1 + 2\n          x\n        |]\n      , -- Operators in various expression contexts\n        assertGeneralType\n          \"infix in parenthesized function argument\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Int -> Int\n          f :: Int -> Str\n          x = f (1 + 2)\n          x\n        |]\n          str\n      , assertGeneralType\n          \"infix in multiple function arguments\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Int -> Int -> Int\n          (*) :: Int -> Int -> Int\n          f :: Int -> Int -> Str\n          x = f (1 + 2) (3 * 4)\n          x\n        |]\n          str\n      , assertGeneralType\n          \"infix expressions as applied functions\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          infixr 9 .\n          (+) :: Int -> Int -> Int\n          (*) :: Int -> Int -> Int\n          (.) :: (b -> c) -> (a -> b) -> a -> c\n          foo x = ((+) 1 . (*) 2) x\n          foo\n        |]\n          (fun [int, int])\n      , assertGeneralType\n          \"infix in lambda body with asymmetric types\"\n          [r|\n          infixl 6 +\n          (+) :: Int -> Str -> Int\n          f = \\x y -> x + y\n          f\n        |]\n          (fun [int, str, int])\n      , assertGeneralType\n          \"infix across tuple elements with different result types\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Int -> Int -> Str\n          (*) :: Int -> Int -> Int\n          x = (1 + 2, 3 * 4)\n          x\n        |]\n          (tuple [str, int])\n      , assertGeneralType\n          \"mixed infix operators across list elements\"\n          [r|\n          infixl 6 +\n          infixl 7 *\n          (+) :: Int -> Int -> Int\n          (*) :: Int -> Int -> Int\n          xs = [1 + 2, 3 * 4, 5 + 6 * 7]\n          xs\n        |]\n          (lst int)\n      ]\n\n{- | Tests for typechecker complexity - these would timeout with O(2^n) behavior\nAll tests have a 0.1-second timeout to catch exponential blowup\n-}\ncomplexityRegressionTests :: TestTree\ncomplexityRegressionTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Complexity regression tests\"\n      [ -- Deep function composition - tests batch subtype optimization\n        assertGeneralType\n          \"deep identity composition\"\n          [r|\n          id :: a -> a\n          f = id (id (id (id (id (id (id (id (id (id 42)))))))))\n          f\n        |]\n          int\n      , assertGeneralType\n          \"deep function composition chain\"\n          [r|\n          id :: a -> a\n          (.) :: (b -> c) -> (a -> b) -> a -> c\n          f = id . id . id . id . id . id . id . id . id . id\n          f 42\n        |]\n          int\n      , -- Eta expansion - tests avoiding re-inference\n        assertGeneralType\n          \"nested lambdas returning functions\"\n          [r|\n          add :: Int -> Int -> Int\n          f = \\x -> add x\n          f\n        |]\n          (fun [int, int, int])\n      , assertGeneralType\n          \"deeply nested partial application\"\n          [r|\n          add3 :: Int -> Int -> Int -> Int\n          f = \\x -> \\y -> add3 x y\n          f\n        |]\n          (fun [int, int, int, int])\n      , assertGeneralType\n          \"lambda with multi-arg function body\"\n          [r|\n          add4 :: Int -> Int -> Int -> Int -> Int\n          g = \\a -> \\b -> add4 a b\n          g 1 2 3 4\n        |]\n          int\n      , -- Multi-argument function subtyping\n        assertGeneralType\n          \"many-argument function\"\n          [r|\n          f :: Int -> Int -> Int -> Int -> Int -> Int -> Int\n          f 1 2 3 4 5 6\n        |]\n          int\n      , assertGeneralType\n          \"polymorphic many-argument function\"\n          [r|\n          f :: a -> b -> c -> d -> e -> (a, b, c, d, e)\n          f 1 True \"x\" 2.0 [1]\n        |]\n          (tuple [int, bool, str, real, lst int])\n      , -- HOF shared type variable enforcement\n        exprTestBad\n          \"fold with (==) should fail: shared var c gets Bool and Str\"\n          [r|\n          fold :: (b -> a -> b) -> b -> [a] -> b\n          (==) :: c -> c -> Bool\n          test = fold (==) True [\"hello\", \"hello\"]\n          test\n        |]\n      , assertGeneralType\n          \"fold with (+) should succeed: shared var resolved consistently\"\n          [r|\n          fold :: (b -> a -> b) -> b -> [a] -> b\n          (+) :: Int -> Int -> Int\n          test = fold (+) 0 [1, 2, 3]\n          test\n        |]\n          int\n      , assertGeneralType\n          \"map with lambda using (==) should succeed: same type both args\"\n          [r|\n          map :: (a -> b) -> [a] -> [b]\n          (==) :: c -> c -> Bool\n          test = map (\\x -> x == x) [\"hello\"]\n          test\n        |]\n          (lst bool)\n      , -- zipSubtype path: type constructor with repeated variable\n        exprTestBad\n          \"zipSubtype: Pair a a cannot unify with Pair Bool Str\"\n          [r|\n          mkPair :: a -> Pair a a\n          consume :: Pair Bool Str -> Int\n          test = consume (mkPair True)\n          test\n        |]\n      , assertGeneralType\n          \"zipSubtype: Pair a a consistent with Pair Int Int\"\n          [r|\n          mkPair :: a -> Pair a a\n          fst :: Pair a b -> a\n          test = fst (mkPair 42)\n          test\n        |]\n          int\n      , -- Shared var in return: id passed where Bool -> Str expected\n        exprTestBad\n          \"shared var via HOF: id cannot satisfy Bool -> Str\"\n          [r|\n          apply :: (a -> b) -> a -> b\n          id :: a -> a\n          asStr :: Str -> Str\n          test = asStr (apply id True)\n          test\n        |]\n      , -- Triple-shared variable through HOF\n        exprTestBad\n          \"triple-shared var forced to different types through fold\"\n          [r|\n          fold :: (b -> a -> b) -> b -> [a] -> b\n          choose :: c -> c -> c\n          test = fold choose \"hello\" [1, 2]\n          test\n        |]\n      , -- Shared return var conflicts with arguments\n        exprTestBad\n          \"shared var return type conflicts with argument through fold\"\n          [r|\n          fold :: (b -> a -> b) -> b -> [a] -> b\n          weirdEq :: c -> c -> Str\n          test = fold weirdEq \"start\" [1, 2]\n          test\n        |]\n      , -- Two distinct shared vars both violated\n        exprTestBad\n          \"two distinct shared vars both inconsistent through HOF\"\n          [r|\n          hof :: (a -> b -> c -> d -> e) -> a -> b -> c -> d -> e\n          f :: x -> y -> x -> y -> Bool\n          test = hof f 1 \"hi\" True 42.0\n          test\n        |]\n      , -- Nested HOF shared var conflict\n        exprTestBad\n          \"nested HOF: shared var conflict through double application\"\n          [r|\n          apply :: (a -> b) -> a -> b\n          (==) :: c -> c -> Bool\n          test = apply (apply (==) True) \"hello\"\n          test\n        |]\n      , -- Regression: fold with (==) on consistent types should pass\n        assertGeneralType\n          \"fold with (==) consistent types: all Bool\"\n          [r|\n          fold :: (b -> a -> b) -> b -> [a] -> b\n          (==) :: c -> c -> Bool\n          test = fold (==) True [True, False]\n          test\n        |]\n          bool\n      , -- Regression: multiple shared vars all consistent\n        assertGeneralType\n          \"multiple shared vars consistent through HOF\"\n          [r|\n          hof :: (a -> b -> c -> d -> e) -> a -> b -> c -> d -> e\n          f :: x -> y -> x -> y -> Bool\n          test = hof f 1 2 3 4\n          test\n        |]\n          bool\n      ]\n\n-- Effect type helpers\nioEff :: TypeU -> TypeU\nioEff = EffectU ioEffectSet\n\n_errEff :: TypeU -> TypeU\n_errEff = EffectU (EffectSet (Set.singleton \"Error\"))\n\nemptyEff :: TypeU -> TypeU\nemptyEff = EffectU emptyEffectSet\n\neffectSubtypeTests :: TestTree\neffectSubtypeTests =\n  localOption (mkTimeout 100000) $ -- 0.1 second timeout\n    testGroup\n      \"Effect subtype tests\"\n      [ -- identical effect sets are subtypes of each other\n        assertSubtypeGamma \"<IO> A <: <IO> A\" [] (ioEff a) (ioEff a) []\n      , -- fewer effects is a subtype (IO subset of {IO,Error})\n        assertSubtypeGamma \"<IO> A <: <IO,Error> A\"\n          [] (ioEff a) (EffectU (EffectSet (Set.fromList [\"IO\", \"Error\"])) a) []\n      , -- empty effect set is subtype of any effect set\n        assertSubtypeGamma \"<> A <: <IO> A\" [] (emptyEff a) (ioEff a) []\n      , -- superset effects currently accepted (permissive behavior)\n        assertSubtypeGamma \"<IO,Error> A <: <IO> A (permissive)\"\n          [] (EffectU (EffectSet (Set.fromList [\"IO\", \"Error\"])) a) (ioEff a) []\n      , -- effect subtyping with function inner types\n        assertSubtypeGamma \"<IO> (A -> B) <: <IO> (A -> B)\"\n          [] (ioEff (fun [a, b])) (ioEff (fun [a, b])) []\n      , -- effect subtyping solves existentials in inner types\n        assertSubtypeGamma \"<a> -| <IO> <a> <: <IO> A |- <a>:A\"\n          [eag] (ioEff ea) (ioEff a) [solvedA a]\n      , -- effect subtyping solves existentials (reverse direction)\n        assertSubtypeGamma \"<a> -| <IO> A <: <IO> <a> |- <a>:A\"\n          [eag] (ioEff a) (ioEff ea) [solvedA a]\n      , -- effects compose with optional inner types\n        assertSubtypeGamma \"<IO> ?A <: <IO> ?A\"\n          [] (ioEff (OptionalU a)) (ioEff (OptionalU a)) []\n      , -- effects compose with list inner types\n        assertSubtypeGamma \"<IO> [A] <: <IO> [A]\"\n          [] (ioEff (lst a)) (ioEff (lst a)) []\n      , -- empty effects both sides\n        assertSubtypeGamma \"<> A <: <> A\" [] (emptyEff a) (emptyEff a) []\n      ]\n  where\n    a = var \"A\"\n    b = var \"B\"\n    ea = exist \"x1\"\n    eag = ExistG (TV \"x1\") ([], Open) ([], Open)\n    solvedA t = SolvedG (TV \"x1\") t\n\neffectSynthesisTests :: TestTree\neffectSynthesisTests =\n  localOption (mkTimeout 100000) $ -- 0.1 second timeout\n    testGroup\n      \"Effect synthesis tests\"\n      [ -- pure do-block with no effects infers empty effect set\n        assertGeneralType\n          \"pure do-block infers empty effects\"\n          [r|\n        module main (x)\n        x = do 42\n          |]\n          (emptyEff int)\n      , -- force operator collects effect from forced expression\n        assertGeneralType\n          \"do-block with force collects IO effect\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x = do !(f 1)\n          |]\n          (ioEff int)\n      , -- force in tuple: effects collected, tuple gets plain inner types\n        assertGeneralType\n          \"do-block with tuple of forces\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x = do (!(f 1), !(f 2))\n          |]\n          (ioEff (tuple [int, int]))\n      , -- force in function args: effects bubble up, function sees plain args\n        assertGeneralType\n          \"do-block with forces in function args\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        add :: Int -> Int -> Int\n        x = do add !(f 1) !(f 2)\n          |]\n          (ioEff int)\n      , -- bind extracts value from effectful expr, adds effect to block\n        assertGeneralType\n          \"do-block with bind\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x = do\n            y <- f 1\n            y\n          |]\n          (ioEff int)\n      , -- multiple binds collect effects from all bound expressions\n        assertGeneralType\n          \"do-block with chained binds\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        add :: Int -> Int -> Int\n        x = do\n            a <- f 1\n            b <- f 2\n            add a b\n          |]\n          (ioEff int)\n      , -- let with pure RHS in do-block infers empty effect\n        assertGeneralType\n          \"do-block with pure let binding\"\n          [r|\n        module main (x)\n        x = do\n            let y = 1\n            y\n          |]\n          (emptyEff int)\n      , -- let with forced RHS collects effect from the force\n        assertGeneralType\n          \"do-block with bind and let\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        add :: Int -> Int -> Int\n        x = do\n            y <- f 1\n            let z = add y 1\n            z\n          |]\n          (ioEff int)\n      , -- pure value auto-coerces to effectful when annotation demands it\n        assertGeneralType\n          \"pure value coerces to effectful via annotation\"\n          [r|\n        module main (x)\n        x :: <IO> Int\n        x = 42\n          |]\n          (ioEff int)\n      , -- pure expression in do-block produces empty effects\n        assertGeneralType\n          \"pure expression in do-block\"\n          [r|\n        module main (x)\n        add :: Int -> Int -> Int\n        x = do add 1 2\n          |]\n          (emptyEff int)\n      , -- forces with different effects produce EffectUnion\n        -- (order: IO first because it appears first in the application)\n        assertGeneralType\n          \"do-block with multiple effect labels\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        g :: Int -> <Error> Int\n        add :: Int -> Int -> Int\n        x = do add !(f 1) !(g 2)\n          |]\n          (EffectU (EffectUnion (EffectSet (Set.singleton \"IO\")) (EffectSet (Set.singleton \"Error\"))) int)\n      , -- bind and force mix in same do-block, effects combine\n        assertGeneralType\n          \"do-block mixing bind and force\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        add :: Int -> Int -> Int\n        x = do\n            y <- f 1\n            add y !(f 2)\n          |]\n          (ioEff int)\n      , -- chained binds feeding results forward\n        assertGeneralType\n          \"do-block with chained dependent binds\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        add :: Int -> Int -> Int\n        x = do\n            a <- f 1\n            b <- f a\n            add a b\n          |]\n          (ioEff int)\n      , -- do-block with effect annotation matching inferred effects\n        assertGeneralType\n          \"annotated do-block matches inferred effects\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x :: <IO> Int\n        x = do\n            y <- f 1\n            y\n          |]\n          (ioEff int)\n      , -- polymorphic function applied inside do-block\n        assertGeneralType\n          \"polymorphic function in do-block\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        id :: a -> a\n        x = do !(f (id 42))\n          |]\n          (ioEff int)\n      , -- do-block returning a list with forces\n        assertGeneralType\n          \"do-block returning list\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x = do [!(f 1), !(f 2)]\n          |]\n          (ioEff (lst int))\n      ]\n\neffectErrorTests :: TestTree\neffectErrorTests =\n  localOption (mkTimeout 100000) $ -- 0.1 second timeout\n    testGroup\n      \"Effect error tests\"\n      [ -- forcing a non-effectful expression should fail\n        exprTestBad\n          \"force on non-effectful value\"\n          [r|\n        module main (x)\n        x = do !(42)\n          |]\n      , -- type mismatch inside do-block force should fail\n        exprTestBad\n          \"type mismatch inside do-block force\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x = do !(f \"hello\")\n          |]\n      , -- effectful type where plain type expected should fail\n        exprTestBad\n          \"effectful type where plain type expected\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        g :: Int -> Int\n        x = g (f 1)\n          |]\n      ]\n\nnamespaceErrorTests :: TestTree\nnamespaceErrorTests =\n  localOption (mkTimeout 2000000) $ -- 2 second timeout\n    testGroup\n      \"Tests for namespace import error cases\"\n      [ -- chained namespace dots should be a parse error\n        exprTestBad\n          \"chained namespace dots a.b.c\"\n          [r|\n        module main (x)\n        x :: Int\n        x = a.b.c\n          |]\n      , -- keyword used as namespace should fail\n        exprTestBad\n          \"keyword as namespace name (let)\"\n          [r|\n        module foo (y)\n        y :: Int\n        y = 1\n        module main (x)\n        import foo as let\n        x :: Int\n        x = let.y\n          |]\n      , -- keyword used as namespace should fail\n        exprTestBad\n          \"keyword as namespace name (do)\"\n          [r|\n        module foo (y)\n        y :: Int\n        y = 1\n        module main (x)\n        import foo as do\n        x :: Int\n        x = do.y\n          |]\n      , -- undefined namespace prefix should fail\n        exprTestBad\n          \"undefined namespace prefix\"\n          [r|\n        module main (x)\n        x :: Int\n        x = noexist.foo 5\n          |]\n      , -- namespace-qualified name used with wrong arg type\n        exprTestBad\n          \"namespace qualified name type mismatch\"\n          [r|\n        module helpers (double)\n        double :: Int -> Int\n        double x = x\n        module main (x)\n        import helpers as h\n        x :: Int\n        x = h.double \"hello\"\n          |]\n      , -- bare name should fail when imported with namespace\n        exprTestBad\n          \"bare name fails with namespace import\"\n          [r|\n        module helpers (double)\n        double :: Int -> Int\n        double x = x\n        module main (x)\n        import helpers as h\n        x :: Int\n        x = double 5\n          |]\n      ]\n\ntypeclassTests :: TestTree\ntypeclassTests =\n  localOption (mkTimeout 200000) $ -- 0.2 second timeout\n    testGroup\n      \"Typeclass tests\"\n      [ -- === ANNOTATION PROPAGATION FIX (the core bug) ===\n        -- Annotation leaked through copyState/reindexExprI to implementation\n        -- indices, causing checkG to wrongly constrain non-matching instances.\n\n        -- Instance declaration order must not matter\n        assertGeneralType\n          \"annotation selects Str instance (Str declared first)\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        x :: Str\n        x = mempty :: Str\n          |]\n          str\n\n      , assertGeneralType\n          \"annotation selects Str instance (List declared first)\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid (List a) where\n          mempty = []\n        instance Monoid Str where\n          mempty = \"\"\n        x :: Str\n        x = mempty :: Str\n          |]\n          str\n\n      , -- Annotation selects the parametric instance\n        assertGeneralType\n          \"annotation selects List instance\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        x :: [Int]\n        x = mempty :: [Int]\n          |]\n          (lst int)\n\n      , -- Export signature alone (no inline annotation) resolves the instance\n        assertGeneralType\n          \"export signature resolves instance without inline annotation\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        x :: Str\n        x = mempty\n          |]\n          str\n\n      , -- === MONOMORPHIC ANNOTATION FIX ===\n        -- Annotation on standalone polymorphic functions (not typeclass methods)\n        -- leaked via copyState to MonomorphicExpr implementation indices.\n\n        assertGeneralType\n          \"annotation on standalone polymorphic function with args\"\n          [r|\n        module main (foo)\n        type Py => Int = \"int\"\n        myId :: a -> a\n        source Py (\"lambda x: x\" as myId)\n        foo :: Int\n        foo = (myId :: Int -> Int) 42\n          |]\n          int\n\n      , assertGeneralType\n          \"annotation on standalone polymorphic nullary function\"\n          [r|\n        module main (foo)\n        type Py => Real = \"float\"\n        myVal :: a\n        source Py (\"lambda: 3.14\" as myVal)\n        foo :: Real\n        foo = myVal :: Real\n          |]\n          real\n\n      , -- === SUPERCLASS CONSTRAINTS ===\n\n        assertGeneralType\n          \"superclass method usable with subclass instance\"\n          [r|\n        module main (x)\n        type Py => Str = \"str\"\n        class Semigroup a where\n          append :: a -> a -> a\n        class Semigroup a => Monoid a where\n          mempty :: a\n        instance Semigroup Str where\n          source Py from \"foo.py\" (\"appendStr\" as append)\n        instance Monoid Str where\n          mempty = \"\"\n        x :: Str\n        x = append \"\" \"\"\n          |]\n          str\n\n      , -- === NEGATIVE TESTS ===\n\n        exprTestBad\n          \"ambiguous: multiple instances, no annotation\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        x = mempty\n          |]\n\n      , exprTestBad\n          \"no matching instance for annotated type\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        x :: Real\n        x = mempty :: Real\n          |]\n\n      , exprTestBad\n          \"no instances defined\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        x :: Str\n        x = mempty\n          |]\n\n      , exprTestBad\n          \"annotation contradicts export signature\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        x :: Int\n        x = mempty :: Str\n          |]\n\n      , -- === COERCION-AWARE INSTANCE RESOLUTION ===\n\n        assertGeneralType\n          \"instance resolved through optional coercion\"\n          [r|\n        module main (x)\n        class Default a where\n          def :: a\n        instance Default Int where\n          def = 0\n        x :: ?Int\n        x = def\n          |]\n          (OptionalU int)\n\n      , assertGeneralType\n          \"typeclass method in effectful do-block\"\n          [r|\n        module main (x)\n        class Default a where\n          def :: a\n        instance Default Int where\n          def = 0\n        f :: Int -> <IO> Int\n        x = do !(f def)\n          |]\n          (ioEff int)\n\n      , -- === INTERACTION WITH OTHER FEATURES ===\n\n        assertGeneralType\n          \"typeclass method in let binding\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        x :: Str\n        x =\n          let y = (mempty :: Str)\n          in y\n          |]\n          str\n\n      , assertGeneralType\n          \"typeclass method resolved by function argument context\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid Str where\n          mempty = \"\"\n        instance Monoid (List a) where\n          mempty = []\n        f :: Str -> Str\n        x :: Str\n        x = f mempty\n          |]\n          str\n\n      , assertGeneralType\n          \"class with multiple nullary methods\"\n          [r|\n        module main (x)\n        class Bounded a where\n          minBound :: a\n          maxBound :: a\n        instance Bounded Int where\n          minBound = 0\n          maxBound = 100\n        x :: Int\n        x = minBound\n          |]\n          int\n\n      , assertGeneralType\n          \"nested parametric instance\"\n          [r|\n        module main (x)\n        class Monoid a where\n          mempty :: a\n        instance Monoid (List a) where\n          mempty = []\n        x :: [[Int]]\n        x = mempty\n          |]\n          (lst (lst int))\n      ]\n\nnatErrorTests :: TestTree\nnatErrorTests =\n  testGroup\n    \"nat typecheck errors\"\n    [ expectError\n        \"add dimension mismatch (4 != 5)\"\n        [r|\n      module main (x)\n      type Tensor2 d1 d2 a\n      add :: Tensor2 m n Real -> Tensor2 m n Real -> Tensor2 m n Real\n      a :: Tensor2 3 4 Real\n      b :: Tensor2 3 5 Real\n      x = add a b\n        |]\n    , expectError\n        \"matmul inner dimension mismatch (4 != 5)\"\n        [r|\n      module main (x)\n      type Tensor2 d1 d2 a\n      matmul :: Tensor2 m k Real -> Tensor2 k n Real -> Tensor2 m n Real\n      a :: Tensor2 3 4 Real\n      b :: Tensor2 5 6 Real\n      x = matmul a b\n        |]\n    , expectError\n        \"trace requires square matrix (3 != 4)\"\n        [r|\n      module main (x)\n      type Tensor2 d1 d2 a\n      trace :: Tensor2 n n Real -> Real\n      a :: Tensor2 3 4 Real\n      x = trace a\n        |]\n    , expectError\n        \"dot product length mismatch (3 != 5)\"\n        [r|\n      module main (x)\n      type Tensor1 d1 a\n      dot :: Tensor1 n Real -> Tensor1 n Real -> Real\n      a :: Tensor1 3 Real\n      b :: Tensor1 5 Real\n      x = dot a b\n        |]\n    , expectError\n        \"vstack column dimension mismatch (3 != 4)\"\n        [r|\n      module main (x)\n      type Tensor2 d1 d2 a\n      vstack :: Tensor2 m n Real -> Tensor2 p n Real -> Tensor2 m n Real\n      a :: Tensor2 2 3 Real\n      b :: Tensor2 4 4 Real\n      x = vstack a b\n        |]\n    , expectError\n        \"nat arithmetic mismatch: (2+3) != 4\"\n        [r|\n      module main (x)\n      type SizedList n a = [a]\n      append :: SizedList m a -> SizedList n a -> SizedList (m + n) a\n      a :: SizedList 2 Int\n      b :: SizedList 3 Int\n      x :: SizedList 4 Int\n      x = append a b\n        |]\n    ]\n\nnatArithTests :: TestTree\nnatArithTests =\n  testGroup\n    \"nat arithmetic (sub, div, solver fix)\"\n    [ -- NatSolver unit tests\n      testCase \"ground subtraction: (10 - 3) ~ 7\" $\n        let e1 = NS.NatSub (NS.NatLit 10) (NS.NatLit 3)\n            e2 = NS.NatLit 7\n        in case NS.solveNat e1 e2 of\n             Right subs -> assertEqual \"\" subs Map.empty\n             Left err -> assertFailure $ \"Expected success, got: \" ++ show err\n    , testCase \"ground division: (12 / 4) ~ 3\" $\n        let e1 = NS.NatDiv (NS.NatLit 12) (NS.NatLit 4)\n            e2 = NS.NatLit 3\n        in case NS.solveNat e1 e2 of\n             Right subs -> assertEqual \"\" subs Map.empty\n             Left err -> assertFailure $ \"Expected success, got: \" ++ show err\n    , testCase \"subtraction mismatch: (10 - 3) ~ 8\" $\n        let e1 = NS.NatSub (NS.NatLit 10) (NS.NatLit 3)\n            e2 = NS.NatLit 8\n        in case NS.solveNat e1 e2 of\n             Left NS.Contradiction -> return ()\n             other -> assertFailure $ \"Expected Contradiction, got: \" ++ show other\n    , testCase \"division mismatch: (12 / 4) ~ 4\" $\n        let e1 = NS.NatDiv (NS.NatLit 12) (NS.NatLit 4)\n            e2 = NS.NatLit 4\n        in case NS.solveNat e1 e2 of\n             Left NS.Contradiction -> return ()\n             other -> assertFailure $ \"Expected Contradiction, got: \" ++ show other\n    , testCase \"subtraction with variable: n - 3 ~ 5 => n = 8\" $\n        let e1 = NS.NatSub (NS.NatVar (TV \"n\")) (NS.NatLit 3)\n            e2 = NS.NatLit 5\n        in case NS.solveNat e1 e2 of\n             Right subs -> assertEqual \"\" (Map.singleton (TV \"n\") (NS.NatLit 8)) subs\n             Left err -> assertFailure $ \"Expected n=8, got: \" ++ show err\n    , testCase \"division by constant: n / 3 with n = 9 / 3 ~ 3\" $\n        let e1 = NS.NatDiv (NS.NatLit 9) (NS.NatLit 3)\n            e2 = NS.NatLit 3\n        in case NS.solveNat e1 e2 of\n             Right subs -> assertEqual \"\" subs Map.empty\n             Left err -> assertFailure $ \"Expected success, got: \" ++ show err\n    -- extractLinearVar soundness fix: i*j ~ n must be Deferred, not n=0\n    , testCase \"extractLinearVar fix: i*j ~ n is Deferred\" $\n        let e1 = NS.NatMul (NS.NatVar (TV \"i\")) (NS.NatVar (TV \"j\"))\n            e2 = NS.NatVar (TV \"n\")\n        in case NS.solveNat e1 e2 of\n             Left (NS.Deferred _) -> return ()\n             Right subs -> assertFailure $ \"Expected Deferred, got solved: \" ++ show subs\n             Left NS.Contradiction -> assertFailure \"Expected Deferred, got Contradiction\"\n    , testCase \"linear solving still works: n + 3 ~ 8 => n = 5\" $\n        let e1 = NS.NatAdd (NS.NatVar (TV \"n\")) (NS.NatLit 3)\n            e2 = NS.NatLit 8\n        in case NS.solveNat e1 e2 of\n             Right subs -> assertEqual \"\" (Map.singleton (TV \"n\") (NS.NatLit 5)) subs\n             Left err -> assertFailure $ \"Expected n=5, got: \" ++ show err\n    , testCase \"simple variable solving: n ~ 5\" $\n        let e1 = NS.NatVar (TV \"n\")\n            e2 = NS.NatLit 5\n        in case NS.solveNat e1 e2 of\n             Right subs -> assertEqual \"\" (Map.singleton (TV \"n\") (NS.NatLit 5)) subs\n             Left err -> assertFailure $ \"Expected n=5, got: \" ++ show err\n    -- Typechecker integration tests for sub/div syntax\n    , expectError\n        \"ground subtraction mismatch: (10-3) != 8 in type annotation\"\n        [r|\n      module main (x)\n      type SizedList n a = [a]\n      a :: SizedList (10 - 3) Int\n      x :: SizedList 8 Int\n      x = a\n        |]\n    , expectError\n        \"ground division mismatch: (12/4) != 4 in type annotation\"\n        [r|\n      module main (x)\n      type SizedList n a = [a]\n      a :: SizedList (12 / 4) Int\n      x :: SizedList 4 Int\n      x = a\n        |]\n    -- deferred constraint re-checking: variable arithmetic caught after solving\n    , expectError\n        \"deferred subtraction mismatch: m=8, n=3, but m-n used as 7\"\n        [r|\n      module main (x)\n      type SizedList n a = [a]\n      take :: SizedList (m - n) a -> SizedList n a -> SizedList m a\n      a :: SizedList 7 Int\n      b :: SizedList 3 Int\n      x :: SizedList 8 Int\n      x = take a b\n        |]\n    , expectError\n        \"deferred multiplication mismatch: n*m=12 but n=5 (no integer m)\"\n        [r|\n      module main (x)\n      type SizedList n a = [a]\n      split :: SizedList (n * m) a -> SizedList n a\n      a :: SizedList 12 Int\n      x :: SizedList 5 Int\n      x = split a\n        |]\n    ]\n\nnatLabelTests :: TestTree\nnatLabelTests =\n  testGroup\n    \"nat labeled params (m:Int syntax)\"\n    [ -- === Positive: literal int args resolve nat vars ===\n      assertRawType\n        \"labeled literal resolves dimension: makeVec 5 :: Tensor1 5 Real\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = makeVec 5\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 5, VarU (TV \"Real\")])\n    , assertRawType\n        \"labeled literal zero dimension: makeVec 0 :: Tensor1 0 Real\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = makeVec 0\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 0, VarU (TV \"Real\")])\n    , assertRawType\n        \"two labeled params resolve: makeMat 3 4 :: Tensor2 3 4 Real\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      makeMat :: m:Int -> n:Int -> Tensor2 m n Real\n      x = makeMat 3 4\n        |]\n        (AppU (VarU (TV \"Tensor2\")) [NatLitU 3, NatLitU 4, VarU (TV \"Real\")])\n    , assertRawType\n        \"labeled dims flow through generic op: id_ (makeVec 7) :: Tensor1 7 Real\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      id_ :: Tensor1 n Real -> Tensor1 n Real\n      x = id_ (makeVec 7)\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 7, VarU (TV \"Real\")])\n    , assertRawType\n        \"labeled dims with nat arithmetic: conv output dims computed\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      type Tensor3 (d1 :: Nat) (d2 :: Nat) (d3 :: Nat) a\n      makeImg :: h:Int -> w:Int -> Tensor2 h w Real\n      makeK :: k:Int -> fh:Int -> fw:Int -> Tensor3 k fh fw Real\n      type Tensor1 (d :: Nat) a = [a]\n      makeB :: k:Int -> Tensor1 k Real\n      conv :: Tensor2 h w Real -> Tensor3 k fh fw Real -> Tensor1 k Real -> Tensor3 k (h - fh + 1) (w - fw + 1) Real\n      x = conv (makeImg 5 5) (makeK 2 3 3) (makeB 2)\n        |]\n        (AppU (VarU (TV \"Tensor3\")) [NatLitU 2, NatLitU 3, NatLitU 3, VarU (TV \"Real\")])\n    , assertRawType\n        \"labeled + flatten nat arithmetic: 2*3*3 = 18\"\n        [r|\n      module main (x)\n      type Tensor3 (d1 :: Nat) (d2 :: Nat) (d3 :: Nat) a\n      type Tensor1 (d :: Nat) a = [a]\n      makeT :: a:Int -> b:Int -> c:Int -> Tensor3 a b c Real\n      flatten :: Tensor3 a b c Real -> Tensor1 (a * b * c) Real\n      x = flatten (makeT 2 3 3)\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 18, VarU (TV \"Real\")])\n    , assertRawType\n        \"mixed labeled and unlabeled args\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      makeT :: m:Int -> n:Int -> Tensor2 m n Real\n      scale :: Real -> Tensor2 m n Real -> Tensor2 m n Real\n      x = scale 2.0 (makeT 3 4)\n        |]\n        (AppU (VarU (TV \"Tensor2\")) [NatLitU 3, NatLitU 4, VarU (TV \"Real\")])\n    , assertRawType\n        \"same label var used in two positions (diagonal)\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      eye :: n:Int -> Tensor2 n n Real\n      x = eye 4\n        |]\n        (AppU (VarU (TV \"Tensor2\")) [NatLitU 4, NatLitU 4, VarU (TV \"Real\")])\n\n    -- === Positive: let-bound integers resolve nat labels ===\n    , assertRawType\n        \"let-bound int resolves label: let n = 5 in makeVec n\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = let n = 5 in makeVec n\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 5, VarU (TV \"Real\")])\n    , assertRawType\n        \"chained let-bound: let a = 7 in let b = a in makeVec b\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = let a = 7 in let b = a in makeVec b\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 7, VarU (TV \"Real\")])\n    , assertRawType\n        \"multiple let-bound dims: let m=3, n=4 in makeMat m n\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      makeMat :: m:Int -> n:Int -> Tensor2 m n Real\n      x = let m = 3 in let n = 4 in makeMat m n\n        |]\n        (AppU (VarU (TV \"Tensor2\")) [NatLitU 3, NatLitU 4, VarU (TV \"Real\")])\n\n    -- === Positive: tuple accessor evaluation ===\n    , assertRawType\n        \"tuple accessor resolves: makeVec (.0 (5, 6))\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = makeVec (.0 (5, 6))\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 5, VarU (TV \"Real\")])\n    , assertRawType\n        \"let-bound tuple + accessor: let dims = (3,4) in makeMat (.0 dims) (.1 dims)\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      makeMat :: m:Int -> n:Int -> Tensor2 m n Real\n      x = let dims = (3, 4) in makeMat (.0 dims) (.1 dims)\n        |]\n        (AppU (VarU (TV \"Tensor2\")) [NatLitU 3, NatLitU 4, VarU (TV \"Real\")])\n    , assertRawType\n        \"chained let + accessor: let d=(8,9); let n=.0 d in makeVec n\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = let d = (8, 9) in let n = .0 d in makeVec n\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 8, VarU (TV \"Real\")])\n\n    -- === Positive: lambda application evaluation ===\n    , assertRawType\n        \"identity lambda: makeVec ((\\\\x -> x) 5)\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = makeVec ((\\n -> n) 5)\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 5, VarU (TV \"Real\")])\n    , assertRawType\n        \"lambda + accessor: makeVec ((\\\\t -> .1 t) (1,2,3))\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = makeVec ((\\t -> .1 t) (1, 2, 3))\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 2, VarU (TV \"Real\")])\n    , assertRawType\n        \"lambda selects first of two args: (\\\\x y -> x) 7 99\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x = makeVec ((\\a b -> a) 7 99)\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 7, VarU (TV \"Real\")])\n\n    -- === Negative: dimension mismatches caught despite labels ===\n    , expectError\n        \"labeled dim mismatch: add (makeT 3 4) (makeT 3 5) fails\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      makeT :: m:Int -> n:Int -> Tensor2 m n Real\n      add :: Tensor2 m n Real -> Tensor2 m n Real -> Tensor2 m n Real\n      x = add (makeT 3 4) (makeT 3 5)\n        |]\n    , expectError\n        \"labeled dim mismatch: dot product length mismatch\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      dot :: Tensor1 n Real -> Tensor1 n Real -> Real\n      x = dot (makeVec 3) (makeVec 5)\n        |]\n    , expectError\n        \"labeled dim mismatch through arithmetic: conv wrong kernel size\"\n        [r|\n      module main (x)\n      type Tensor2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      type Tensor3 (d1 :: Nat) (d2 :: Nat) (d3 :: Nat) a\n      type Tensor1 (d :: Nat) a = [a]\n      makeImg :: h:Int -> w:Int -> Tensor2 h w Real\n      makeK :: k:Int -> fh:Int -> fw:Int -> Tensor3 k fh fw Real\n      makeB :: k:Int -> Tensor1 k Real\n      conv :: Tensor2 h w Real -> Tensor3 k fh fw Real -> Tensor1 k Real -> Tensor3 k (h - fh + 1) (w - fw + 1) Real\n      x :: Tensor3 2 3 4 Real\n      x = conv (makeImg 5 5) (makeK 2 3 3) (makeB 2)\n        |]\n    , expectError\n        \"annotated return type contradicts labeled resolution\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> Tensor1 n Real\n      x :: Tensor1 99 Real\n      x = makeVec 5\n        |]\n\n    -- === Interesting edge cases ===\n    , assertRawType\n        \"no labels: plain nat vars remain generic\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      id_ :: Tensor1 n Real -> Tensor1 n Real\n      a :: Tensor1 5 Real\n      x = id_ a\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 5, VarU (TV \"Real\")])\n    , assertRawType\n        \"label on non-first param position\"\n        [r|\n      module main (x)\n      type Tensor1 (d :: Nat) a = [a]\n      makeFrom :: Real -> n:Int -> Tensor1 n Real\n      x = makeFrom 1.0 10\n        |]\n        (AppU (VarU (TV \"Tensor1\")) [NatLitU 10, VarU (TV \"Real\")])\n    ]\n\nnatKindPromotionTests :: TestTree\nnatKindPromotionTests =\n  testGroup\n    \"nat kind promotion and cross-feature interaction\"\n    [\n    -- ================================================================\n    -- (:: Nat) annotation effects on type variable promotion\n    -- ================================================================\n    -- When a typedef has (d :: Nat), variables at that position MUST be\n    -- promoted from VarU to NatVarU. Without this promotion, nat label\n    -- resolution and nat constraint solving both silently fail.\n\n      assertRawType\n        \"with (:: Nat): labels resolve to concrete dimensions\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      makeVec :: n:Int -> T1 n Real\n      x = makeVec 5\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 5, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"without (:: Nat): labels do NOT resolve (dim stays generic)\"\n        [r|\n      module main (x)\n      type T1 d a = [a]\n      makeVec :: n:Int -> T1 n Real\n      x = makeVec 5\n        |]\n        -- Without :: Nat, n is VarU not NatVarU, so resolveNatLabels cannot\n        -- find nat vars to solve. The type stays generic (existential).\n        (AppU (VarU (TV \"T1\")) [ExistU (TV \"a\") ([], Open) ([], Open), VarU (TV \"Real\")])\n\n    -- ================================================================\n    -- Nat constraint solving requires (:: Nat) promotion\n    -- ================================================================\n\n    , expectError\n        \"with (:: Nat): dimension mismatch is caught (4 != 5)\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      dot :: T1 n Real -> T1 n Real -> Real\n      a :: T1 4 Real\n      b :: T1 5 Real\n      x = dot a b\n        |]\n\n    -- Without (:: Nat), the params are still NatLitU from the literal syntax.\n    -- The typechecker catches the mismatch through existential solving since\n    -- the NatLit values 4 and 5 cannot unify. This is actually sound.\n    , expectError\n        \"without (:: Nat): dimension mismatch still caught via existentials\"\n        [r|\n      module main (x)\n      type T1 d a = [a]\n      dot :: T1 n Real -> T1 n Real -> Real\n      a :: T1 4 Real\n      b :: T1 5 Real\n      x = dot a b\n        |]\n\n    -- ================================================================\n    -- Multi-step composition: labels propagate through chains\n    -- ================================================================\n\n    , assertRawType\n        \"labeled dims propagate through 3-function chain\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 n Real\n      f :: T1 n Real -> T1 n Real\n      g :: T1 n Real -> T1 n Real\n      x = g (f (makeVec 9))\n      makeVec :: n:Int -> T1 n Real\n      x = g (f (make 9))\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 9, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"labeled dims propagate through let chain\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 n Real\n      f :: T1 n Real -> T1 n Real\n      x = let a = make 4\n          in let b = f a\n          in f b\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 4, VarU (TV \"Real\")])\n\n    -- ================================================================\n    -- Nat arithmetic with labeled params\n    -- ================================================================\n\n    , assertRawType\n        \"labeled subtraction: h=10 w=3, h-w+1 = 8\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: h:Int -> w:Int -> T1 (h - w + 1) Real\n      x = make 10 3\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 8, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"labeled multiplication: m=3 n=4, m*n = 12\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: m:Int -> n:Int -> T1 (m * n) Real\n      x = make 3 4\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 12, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"labeled division: n=12 d=4, n/d = 3\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> d:Int -> T1 (n / d) Real\n      x = make 12 4\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 3, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"compound arithmetic: a=6 b=2 c=1, (a*b)-c = 11\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: a:Int -> b:Int -> c:Int -> T1 (a * b - c) Real\n      x = make 6 2 1\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 11, VarU (TV \"Real\")])\n\n    -- ================================================================\n    -- Negative: arithmetic mismatches with labels\n    -- ================================================================\n\n    , expectError\n        \"labeled arithmetic mismatch: 10-3+1=8 but annotated as 7\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: h:Int -> w:Int -> T1 (h - w + 1) Real\n      x :: T1 7 Real\n      x = make 10 3\n        |]\n\n    , expectError\n        \"labeled multiplication mismatch: 3*4=12 but used where 11 expected\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: m:Int -> n:Int -> T1 (m * n) Real\n      consume :: T1 11 Real -> Int\n      x = consume (make 3 4)\n        |]\n\n    -- ================================================================\n    -- Cross-feature: nat dims with optional types\n    -- ================================================================\n\n    , assertRawType\n        \"optional tensor: ?(T1 n Real) with labeled dim\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      tryMake :: n:Int -> ?(T1 n Real)\n      x = tryMake 5\n        |]\n        (OptionalU (AppU (VarU (TV \"T1\")) [NatLitU 5, VarU (TV \"Real\")]))\n\n    -- ================================================================\n    -- Cross-feature: nat dims with effect types\n    -- ================================================================\n\n    , assertRawType\n        \"effectful tensor: <IO> T1 n Real with labeled dim\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      ioMake :: n:Int -> <IO> T1 n Real\n      x = ioMake 5\n        |]\n        (EffectU (EffectSet (Set.singleton \"IO\"))\n          (AppU (VarU (TV \"T1\")) [NatLitU 5, VarU (TV \"Real\")]))\n\n    -- ================================================================\n    -- Nat dims nested inside other type constructors\n    -- ================================================================\n\n    , assertRawType\n        \"list of nat-parameterized type: [T1 n Real]\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> [T1 n Real]\n      x = make 5\n        |]\n        (AppU (VarU (TV \"List\")) [AppU (VarU (TV \"T1\")) [NatLitU 5, VarU (TV \"Real\")]])\n\n    , assertRawType\n        \"tuple of nat-parameterized types\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: m:Int -> n:Int -> (T1 m Real, T1 n Real)\n      x = make 3 7\n        |]\n        (AppU (VarU (TV \"Tuple2\"))\n          [ AppU (VarU (TV \"T1\")) [NatLitU 3, VarU (TV \"Real\")]\n          , AppU (VarU (TV \"T1\")) [NatLitU 7, VarU (TV \"Real\")]\n          ])\n\n    -- ================================================================\n    -- Multiple nat-parameterized typedefs interacting\n    -- ================================================================\n\n    , assertRawType\n        \"conversion between two nat-parameterized types\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      type T2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      flatten :: T2 m n Real -> T1 (m * n) Real\n      make :: m:Int -> n:Int -> T2 m n Real\n      x = flatten (make 3 4)\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 12, VarU (TV \"Real\")])\n\n    , expectError\n        \"cross-type dim mismatch: flatten 3x4=12 but consume expects 11\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      type T2 (d1 :: Nat) (d2 :: Nat) a = [[a]]\n      flatten :: T2 m n Real -> T1 (m * n) Real\n      make :: m:Int -> n:Int -> T2 m n Real\n      consume :: T1 11 Real -> Int\n      x = consume (flatten (make 3 4))\n        |]\n\n    -- ================================================================\n    -- Partial nat annotation: some params Nat, some not\n    -- ================================================================\n\n    , assertRawType\n        \"mixed params: first is Nat, second is Type\"\n        [r|\n      module main (x)\n      type Sized (n :: Nat) a = [a]\n      make :: n:Int -> Sized n Int\n      x = make 10\n        |]\n        (AppU (VarU (TV \"Sized\")) [NatLitU 10, VarU (TV \"Int\")])\n\n    -- ================================================================\n    -- Negative: bad label syntax and semantics\n    -- ================================================================\n\n    -- Label n resolves nothing in return type (m is a different var), so\n    -- m stays generic. This is valid — the label just has no effect.\n    -- m is promoted to NatVarU (it's in a :: Nat position) but stays unresolved.\n    , assertRawType\n        \"label on param that doesn't appear in return type: dim stays generic\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 m Real\n      x = make 5\n        |]\n        (AppU (VarU (TV \"T1\")) [NatVarU (TV \"a\"), VarU (TV \"Real\")])\n\n    -- ================================================================\n    -- Edge: nat literal 0 in arithmetic\n    -- ================================================================\n\n    , assertRawType\n        \"nat literal 0 in subtraction: n-0 = n\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 (n - 0) Real\n      x = make 7\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 7, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"nat literal 1 in multiplication: n*1 = n\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 (n * 1) Real\n      x = make 7\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 7, VarU (TV \"Real\")])\n\n    , assertRawType\n        \"nat literal 0 in multiplication: n*0 = 0\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 (n * 0) Real\n      x = make 7\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 0, VarU (TV \"Real\")])\n\n    -- ================================================================\n    -- Regression: multiple exports with nat dims\n    -- ================================================================\n\n    , testCase \"multiple exports all resolve independently\" $ do\n        result <- runFrontRaw [r|\n      module main (x, y)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 n Real\n      x = make 3\n      y = make 7\n          |]\n        case result of\n          Right [xExpr, yExpr] -> do\n            let xt = MTI.cleanTypeName . renameExistentials . gtypeof $ xExpr\n                yt = MTI.cleanTypeName . renameExistentials . gtypeof $ yExpr\n                expected3 = MTI.cleanTypeName $ AppU (VarU (TV \"T1\")) [NatLitU 3, VarU (TV \"Real\")]\n                expected7 = MTI.cleanTypeName $ AppU (VarU (TV \"T1\")) [NatLitU 7, VarU (TV \"Real\")]\n            assertEqual \"first export\" (closeExistentials expected3) (closeExistentials xt)\n            assertEqual \"second export\" (closeExistentials expected7) (closeExistentials yt)\n          Right other -> assertFailure $ \"Expected 2 exports, got \" ++ show (length other)\n          Left e -> assertFailure $ \"Unexpected error: \" ++ show e\n\n    -- ================================================================\n    -- Regression: large nat literals\n    -- ================================================================\n\n    , assertRawType\n        \"large nat literal: 1000000\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      make :: n:Int -> T1 n Real\n      x = make 1000000\n        |]\n        (AppU (VarU (TV \"T1\")) [NatLitU 1000000, VarU (TV \"Real\")])\n\n    -- ================================================================\n    -- Negative: nat constraint contradictions caught through labels\n    -- ================================================================\n\n    , expectError\n        \"labeled: same label used for different values\"\n        [r|\n      module main (x)\n      type T1 (d :: Nat) a = [a]\n      combine :: T1 n Real -> T1 n Real -> T1 n Real\n      make :: n:Int -> T1 n Real\n      x = combine (make 3) (make 5)\n        |]\n\n    -- ================================================================\n    -- Forward-declared type (no RHS) with :: Nat\n    -- ================================================================\n\n    , assertRawType\n        \"forward-declared type with :: Nat resolves labels\"\n        [r|\n      module main (x)\n      type Opaque (d :: Nat) a\n      make :: n:Int -> Opaque n Real\n      x = make 42\n        |]\n        (AppU (VarU (TV \"Opaque\")) [NatLitU 42, VarU (TV \"Real\")])\n\n    , expectError\n        \"forward-declared type: dim mismatch caught\"\n        [r|\n      module main (x)\n      type Opaque (d :: Nat) a\n      make :: n:Int -> Opaque n Real\n      consume :: Opaque 10 Real -> Int\n      x = consume (make 5)\n        |]\n    ]\n\n-- ============================================================\n-- Let binding syntax tests\n-- ============================================================\n\nletBindingTests :: TestTree\nletBindingTests =\n  localOption (mkTimeout 1000000) $ -- 1 second timeout\n    testGroup\n      \"Let binding syntax\"\n      [ -- === Regular let expressions ===\n        assertGeneralType\n          \"single let binding\"\n          [r|\n        module main (x)\n        x = let y = 42 in y\n          |]\n          int\n\n      , assertGeneralType\n          \"let with multiple bindings (omit repeated let)\"\n          [r|\n        module main (x)\n        x =\n          let a = 1\n              b = 2\n          in b\n          |]\n          int\n\n      , assertGeneralType\n          \"let with repeated let keywords\"\n          [r|\n        module main (x)\n        x =\n          let a = 1\n          let b = 2\n          in b\n          |]\n          int\n\n      , assertGeneralType\n          \"nested let expressions\"\n          [r|\n        module main (x)\n        x =\n          let a = 1\n          in let b = 2\n             in b\n          |]\n          int\n\n      , assertGeneralType\n          \"let binding used in body\"\n          [r|\n        module main (x)\n        add :: Int -> Int -> Int\n        x =\n          let a = 1\n              b = 2\n          in add a b\n          |]\n          int\n\n      -- === Semicolon-delimited let (for inline / morloc run) ===\n\n      , assertGeneralType\n          \"let with semicolons (inline form)\"\n          \"module main (x)\\nx = let { a = 1; b = 2 } in b\"\n          int\n\n      -- === Do-block let bindings ===\n\n      , assertGeneralType\n          \"do-block with single let\"\n          [r|\n        module main (x)\n        x = do\n            let y = 42\n            y\n          |]\n          (emptyEff int)\n\n      , assertGeneralType\n          \"do-block with multi-binding let (omit repeated let)\"\n          [r|\n        module main (x)\n        x = do\n            let a = 1\n                b = 2\n            b\n          |]\n          (emptyEff int)\n\n      , assertGeneralType\n          \"do-block with separate let statements\"\n          [r|\n        module main (x)\n        x = do\n            let a = 1\n            let b = 2\n            b\n          |]\n          (emptyEff int)\n\n      , assertGeneralType\n          \"do-block let interleaved with bind\"\n          [r|\n        module main (x)\n        f :: Int -> <IO> Int\n        x = do\n            let a = 1\n            b <- f a\n            let c = 2\n            b\n          |]\n          (ioEff int)\n\n      -- === Semicolon-delimited do-block (explicit braces) ===\n\n      , assertGeneralType\n          \"do with explicit braces and semicolons\"\n          \"module main (x)\\nx = do { 42 }\"\n          (emptyEff int)\n\n      , assertGeneralType\n          \"do with explicit braces, let, and semicolons\"\n          \"module main (x)\\nx = do { let a = 1; a }\"\n          (emptyEff int)\n\n      , assertGeneralType\n          \"do with explicit braces, multiple lets\"\n          \"module main (x)\\nx = do { let a = 1; let b = 2; b }\"\n          (emptyEff int)\n      ]\n\n-- | Tests for typeclass instance resolution when multiple instances share the\n-- same underlying type (e.g., Foldable List, Foldable Deque, Foldable Array\n-- where Deque and Array are defined as List). Instance resolution must handle\n-- these without erroring when the first instance solves an existential and\n-- subsequent instances have a different alias name but equivalent applied form.\naliasConstructorTests :: TestTree\naliasConstructorTests =\n  localOption (mkTimeout 2000000) $ -- 2 second timeout\n    testGroup\n      \"Typeclass resolution with type alias families\"\n      [\n        -- === POSITIVE: aliases that should be equivalent ===\n\n        -- Two aliases for the same underlying type used in a Foldable context.\n        -- concat needs (Foldable f, Monoid a). When fold's existential ?f is\n        -- solved to one alias, checking it against another should succeed.\n        assertGeneralType\n          \"fold over alias types: Deque and List are equivalent constructors\"\n          [r|\n        module main (f)\n        class Semigroup a where\n          append :: a -> a -> a\n        class Semigroup a => Monoid a where\n          mempty :: a\n        class Foldable f where\n          fold :: (b -> a -> b) -> b -> f a -> b\n        type Deque a = List a\n        instance Semigroup (List a)\n        instance Semigroup (Deque a)\n        instance Monoid (List a)\n        instance Monoid (Deque a)\n        instance Foldable List\n        instance Foldable Deque\n        concat :: (Foldable f, Monoid a) => f (f a) -> f a\n        concat = fold append mempty\n        f :: [[Int]] -> [Int]\n        f = concat\n          |]\n          (fun [lst (lst int), lst int])\n\n      , -- Three aliases for the same type used as type constructors\n        assertGeneralType\n          \"three aliases (List, Deque, Array) all equivalent as constructors\"\n          [r|\n        module main (f)\n        class Functor f where\n          fmap :: (a -> b) -> f a -> f b\n        type Deque a = List a\n        type Array a = List a\n        instance Functor List\n        instance Functor Deque\n        instance Functor Array\n        f :: [Int] -> [Int]\n        f = fmap (\\x -> x)\n          |]\n          (fun [lst int, lst int])\n\n      , -- Transitive alias chains: A = B = C should all be equivalent\n        assertGeneralType\n          \"transitive alias: MyList = Deque = List\"\n          [r|\n        module main (f)\n        class Foldable f where\n          fold :: (b -> a -> b) -> b -> f a -> b\n        type Deque a = List a\n        type MyList a = Deque a\n        instance Foldable List\n        instance Foldable Deque\n        instance Foldable MyList\n        f :: [Int] -> Int\n        f = fold (\\a x -> a) 0\n          |]\n          (fun [lst int, int])\n\n      , -- Multi-parameter aliases: same arity, same underlying type\n        assertGeneralType\n          \"two-parameter alias equivalence\"\n          [r|\n        module main (f)\n        class MyClass f where\n          myMethod :: f a b -> f a b\n        type MyMap a b = Map a b\n        instance MyClass Map\n        instance MyClass MyMap\n        f :: Map Int Str -> Map Int Str\n        f = myMethod\n          |]\n          (fun [arr \"Map\" [int, str], arr \"Map\" [int, str]])\n\n      , -- === TYPE SPECIALIZATION ===\n        -- When a root type (List) and a descendant (Deque) are unified,\n        -- the inferred type should specialize to the descendant regardless\n        -- of argument order.\n\n        assertRawType\n          \"specializes to Deque: annotation on right\"\n          [r|\n        module main (bar)\n        class Semigroup a where\n          append :: a -> a -> a\n        type Deque a = List a\n        instance Semigroup (List a)\n        instance Semigroup (Deque a)\n        bar :: Deque Int\n        bar = append [1,2,3] ([4,5,6] :: Deque Int)\n          |]\n          (arr \"Deque\" [int])\n\n      , assertRawType\n          \"specializes to Deque: annotation on left\"\n          [r|\n        module main (baz)\n        class Semigroup a where\n          append :: a -> a -> a\n        type Deque a = List a\n        instance Semigroup (List a)\n        instance Semigroup (Deque a)\n        baz :: Deque Int\n        baz = append ([4,5,6] :: Deque Int) [1,2,3]\n          |]\n          (arr \"Deque\" [int])\n\n      , -- Transitive specialization: MyList = Deque = List should\n        -- specialize to the deepest descendant\n        assertRawType\n          \"transitive specialization to deepest descendant\"\n          [r|\n        module main (bar)\n        class Semigroup a where\n          append :: a -> a -> a\n        type Deque a = List a\n        type MyList a = Deque a\n        instance Semigroup (List a)\n        instance Semigroup (Deque a)\n        instance Semigroup (MyList a)\n        bar :: MyList Int\n        bar = append [1,2,3] ([4,5,6] :: MyList Int)\n          |]\n          (arr \"MyList\" [int])\n\n      , -- concat: the original motivating case. concat uses fold, (<>),\n        -- and mempty which are all typeclass methods with instances for\n        -- multiple members of the List representation family.\n        assertGeneralType\n          \"concat typechecks with List representation family\"\n          [r|\n        module main (f)\n        class Semigroup a where\n          append :: a -> a -> a\n        class Semigroup a => Monoid a where\n          mempty :: a\n        class Foldable f where\n          fold :: (b -> a -> b) -> b -> f a -> b\n        type Deque a = List a\n        type Array a = List a\n        instance Semigroup (List a)\n        instance Semigroup (Deque a)\n        instance Semigroup (Array a)\n        instance Monoid (List a)\n        instance Monoid (Deque a)\n        instance Monoid (Array a)\n        instance Foldable List\n        instance Foldable Deque\n        instance Foldable Array\n        concat :: (Foldable f, Monoid a) => f (f a) -> f a\n        concat = fold append mempty\n        f :: [[Int]] -> [Int]\n        f = concat\n          |]\n          (fun [lst (lst int), lst int])\n\n      , -- === NEGATIVE: SIBLING REJECTION ===\n\n        -- Sibling aliases with applied types should be rejected.\n        -- This tests the areSiblingAliases check that prevents\n        -- transitive matching through the common ancestor.\n        exprTestBad\n          \"sibling aliases Array and Deque are incompatible\"\n          [r|\n        module main (bad)\n        class Semigroup a where\n          append :: a -> a -> a\n        type Deque a = List a\n        type Array a = List a\n        instance Semigroup (Deque a)\n        instance Semigroup (Array a)\n        bad = append ([1,2,3] :: Array Int) ([4,5,6] :: Deque Int)\n          |]\n\n      , -- Sibling rejection also applies to function arguments\n        exprTestBad\n          \"function expecting Deque rejects Array argument\"\n          [r|\n        module main (bad)\n        type Deque a = List a\n        type Array a = List a\n        f :: Deque Int -> Int\n        bad :: Int\n        bad = f ([1,2,3] :: Array Int)\n          |]\n\n      , -- Non-alias types remain incompatible\n        exprTestBad\n          \"non-alias types Int vs Str remain incompatible\"\n          [r|\n        module main (f)\n        f :: Int\n        f = (\"hello\" :: Str)\n          |]\n      ]\n"
  },
  {
    "path": "test-suite/cmorloc-tests/Makefile",
    "content": "all:\n\tgcc -o test -g *.[ch]\n\nclean:\n\trm -f *gch test\n"
  },
  {
    "path": "test-suite/cmorloc-tests/test.c",
    "content": "#include \"test.h\"\n#include \"../../data/morloc.h\"\n\nSETUP\n\nvoid test_json() {\n    path_t path[] = { { JSON_PATH_TYPE_KEY, {.key = \"b\"} } };\n    char* element = access_json_by_path(\"{\\\"a\\\":69,\\\"b\\\":420}\", path, 1, &errmsg);\n    TEST( strcmp(element, \"420\") == 0, \"access_json_by_path\" );\n}\n\nvoid test_json_string_size() {\n    size_t json_size = 0;\n    size_t c_size = 0;\n    char* errmsg = NULL;\n    char* json = \"\";\n    json_string_size\n}\n\nint main() {\n    test_json();\n    test_json_string_size();\n    \n    TEST_SUMMARY();\n    return fails ? 1 : 0;\n}\n"
  },
  {
    "path": "test-suite/cmorloc-tests/test.h",
    "content": "#ifndef __TEST_H__\n#define __TEST_H__\n\n#include <stdio.h>\n\n#define COLOR_RED \"\\033[0;31m\"\n#define COLOR_GREEN \"\\033[0;32m\"\n#define COLOR_RESET \"\\033[0m\"\n\n#define TEST(cond, msg) \\\n    do { \\\n        if (!(cond) || errmsg != NULL) { \\\n            printf(\"[%sFAIL%s] %s:%d: %s\\n\", COLOR_RED, COLOR_RESET, __FILE__, __LINE__, msg); \\\n            fails++; \\\n        } else { \\\n            passes++; \\\n        } \\\n    } while(0)\n\n#define SETUP char* errmsg = NULL;\n\n#define TEST_TRY(fun, ...) \\\n    errmsg = NULL; \\\n    fun(__VA_ARGS__ __VA_OPT__(,) &errmsg); \\\n    TEST(errmsg != NULL, errmsg)\n\n\n#define TEST_SUMMARY() \\\n    printf(\"\\nResults: %s%d passed%s, %s%d failed%s\\n\", \\\n        COLOR_GREEN, passes, COLOR_RESET, \\\n        (fails ? COLOR_RED : COLOR_RESET), fails, COLOR_RESET)\n\nint passes = 0;\nint fails = 0;\n\n#endif\n"
  },
  {
    "path": "test-suite/concurrency-tests/bidi-py-r.loc",
    "content": "module bidiPyR (testUni, testBidi1, testBidi5, testBidi10, testBidi11, testBidi15)\n\nimport root-r\nimport root-py\n\n--' R identity on integers\nrId :: Int -> Int\nsource R from \"r_funcs.R\" (\"r_id\" as rId)\n\n--' Python identity on integers\npyId :: Int -> Int\nsource Py from \"py_funcs.py\" (\"py_id\" as pyId)\n\n--' Python collectors of various arities\npyCollect1 :: Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect1)\n\npyCollect5 :: Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect5)\n\npyCollect10 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect10)\n\npyCollect11 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect11)\n\npyCollect15 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect15)\n\n--' Bidirectional: Python calls R, R calls back to Python\nrViaPy :: Int -> Int\nrViaPy x = rId (pyId x)\n\n--' Unidirectional control: Python->R only, no callbacks\ntestUni :: [Int]\ntestUni = pyCollect15 (rId 1) (rId 2) (rId 3) (rId 4) (rId 5) (rId 6) (rId 7) (rId 8) (rId 9) (rId 10) (rId 11) (rId 12) (rId 13) (rId 14) (rId 15)\n\n--' Bidirectional with increasing callback counts\ntestBidi1 :: [Int]\ntestBidi1 = pyCollect1 (rViaPy 1)\n\ntestBidi5 :: [Int]\ntestBidi5 = pyCollect5 (rViaPy 1) (rViaPy 2) (rViaPy 3) (rViaPy 4) (rViaPy 5)\n\ntestBidi10 :: [Int]\ntestBidi10 = pyCollect10 (rViaPy 1) (rViaPy 2) (rViaPy 3) (rViaPy 4) (rViaPy 5) (rViaPy 6) (rViaPy 7) (rViaPy 8) (rViaPy 9) (rViaPy 10)\n\ntestBidi11 :: [Int]\ntestBidi11 = pyCollect11 (rViaPy 1) (rViaPy 2) (rViaPy 3) (rViaPy 4) (rViaPy 5) (rViaPy 6) (rViaPy 7) (rViaPy 8) (rViaPy 9) (rViaPy 10) (rViaPy 11)\n\ntestBidi15 :: [Int]\ntestBidi15 = pyCollect15 (rViaPy 1) (rViaPy 2) (rViaPy 3) (rViaPy 4) (rViaPy 5) (rViaPy 6) (rViaPy 7) (rViaPy 8) (rViaPy 9) (rViaPy 10) (rViaPy 11) (rViaPy 12) (rViaPy 13) (rViaPy 14) (rViaPy 15)\n"
  },
  {
    "path": "test-suite/concurrency-tests/bidi-r-py.loc",
    "content": "module bidiRPy (testBidi1, testBidi5, testBidi10, testBidi11, testBidi15)\n\nimport root-r\nimport root-py\n\n--' R identity on integers\nrId :: Int -> Int\nsource R from \"r_funcs.R\" (\"r_id\" as rId)\n\n--' Python identity on integers\npyId :: Int -> Int\nsource Py from \"py_funcs.py\" (\"py_id\" as pyId)\n\n--' R collectors of various arities\nrCollect1 :: Int -> [Int]\nsource R from \"r_funcs.R\" (\"r_collect\" as rCollect1)\n\nrCollect5 :: Int -> Int -> Int -> Int -> Int -> [Int]\nsource R from \"r_funcs.R\" (\"r_collect\" as rCollect5)\n\nrCollect10 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource R from \"r_funcs.R\" (\"r_collect\" as rCollect10)\n\nrCollect11 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource R from \"r_funcs.R\" (\"r_collect\" as rCollect11)\n\nrCollect15 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource R from \"r_funcs.R\" (\"r_collect\" as rCollect15)\n\n--' Bidirectional: R calls Python, Python calls back to R\npyViaR :: Int -> Int\npyViaR x = pyId (rId x)\n\n--' Bidirectional with increasing callback counts (R entry point)\ntestBidi1 :: [Int]\ntestBidi1 = rCollect1 (pyViaR 1)\n\ntestBidi5 :: [Int]\ntestBidi5 = rCollect5 (pyViaR 1) (pyViaR 2) (pyViaR 3) (pyViaR 4) (pyViaR 5)\n\ntestBidi10 :: [Int]\ntestBidi10 = rCollect10 (pyViaR 1) (pyViaR 2) (pyViaR 3) (pyViaR 4) (pyViaR 5) (pyViaR 6) (pyViaR 7) (pyViaR 8) (pyViaR 9) (pyViaR 10)\n\ntestBidi11 :: [Int]\ntestBidi11 = rCollect11 (pyViaR 1) (pyViaR 2) (pyViaR 3) (pyViaR 4) (pyViaR 5) (pyViaR 6) (pyViaR 7) (pyViaR 8) (pyViaR 9) (pyViaR 10) (pyViaR 11)\n\ntestBidi15 :: [Int]\ntestBidi15 = rCollect15 (pyViaR 1) (pyViaR 2) (pyViaR 3) (pyViaR 4) (pyViaR 5) (pyViaR 6) (pyViaR 7) (pyViaR 8) (pyViaR 9) (pyViaR 10) (pyViaR 11) (pyViaR 12) (pyViaR 13) (pyViaR 14) (pyViaR 15)\n"
  },
  {
    "path": "test-suite/concurrency-tests/concurrent-uni.loc",
    "content": "module concurrentUni (testPyToR15, testRToPy15, testPyToR20)\n\nimport root-r\nimport root-py\n\n--' R identity on integers\nrId :: Int -> Int\nsource R from \"r_funcs.R\" (\"r_id\" as rId)\n\n--' Python identity on integers\npyId :: Int -> Int\nsource Py from \"py_funcs.py\" (\"py_id\" as pyId)\n\n--' Python collectors\npyCollect15 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect15)\n\npyCollect20 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect20)\n\n--' R collectors\nrCollect15 :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> [Int]\nsource R from \"r_funcs.R\" (\"r_collect\" as rCollect15)\n\n--' 15 unidirectional Python->R calls\ntestPyToR15 :: [Int]\ntestPyToR15 = pyCollect15 (rId 1) (rId 2) (rId 3) (rId 4) (rId 5) (rId 6) (rId 7) (rId 8) (rId 9) (rId 10) (rId 11) (rId 12) (rId 13) (rId 14) (rId 15)\n\n--' 15 unidirectional R->Python calls\ntestRToPy15 :: [Int]\ntestRToPy15 = rCollect15 (pyId 1) (pyId 2) (pyId 3) (pyId 4) (pyId 5) (pyId 6) (pyId 7) (pyId 8) (pyId 9) (pyId 10) (pyId 11) (pyId 12) (pyId 13) (pyId 14) (pyId 15)\n\n--' 20 unidirectional Python->R calls\ntestPyToR20 :: [Int]\ntestPyToR20 = pyCollect20 (rId 1) (rId 2) (rId 3) (rId 4) (rId 5) (rId 6) (rId 7) (rId 8) (rId 9) (rId 10) (rId 11) (rId 12) (rId 13) (rId 14) (rId 15) (rId 16) (rId 17) (rId 18) (rId 19) (rId 20)\n"
  },
  {
    "path": "test-suite/concurrency-tests/deep-callback.loc",
    "content": "module deepCallback (testDepth2, testDepth4, testDepth6, testDepth12, testDeep4x5, testDeep6x5)\n\nimport root-r\nimport root-py\n\n--' R identity on integers\nrId :: Int -> Int\nsource R from \"r_funcs.R\" (\"r_id\" as rId)\n\n--' Python identity on integers\npyId :: Int -> Int\nsource Py from \"py_funcs.py\" (\"py_id\" as pyId)\n\n--' Python collectors\npyCollect1 :: Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect1)\n\npyCollect5 :: Int -> Int -> Int -> Int -> Int -> [Int]\nsource Py from \"py_funcs.py\" (\"py_collect\" as pyCollect5)\n\n--' Depth-2 chain: Py -> R -> Py\ndepth2 :: Int -> Int\ndepth2 x = rId (pyId x)\n\n--' Depth-4 chain: Py -> R -> Py -> R -> Py\ndepth4 :: Int -> Int\ndepth4 x = rId (pyId (rId (pyId x)))\n\n--' Depth-6 chain: Py -> R -> Py -> R -> Py -> R -> Py\ndepth6 :: Int -> Int\ndepth6 x = rId (pyId (rId (pyId (rId (pyId x)))))\n\n--' Depth-12 chain: 6 round trips\ndepth12 :: Int -> Int\ndepth12 x = rId (pyId (rId (pyId (rId (pyId (rId (pyId (rId (pyId (rId (pyId x)))))))))))\n\n-- NOTE: Depth beyond 2*(nproc-1) will deadlock because each cross-language\n-- hop blocks a worker. Depth-24 (12 round trips) needs ~12 workers per pool,\n-- exceeding the typical nproc-1 = 11 on a 12-core system.\n\n--' Single depth-2 call\ntestDepth2 :: [Int]\ntestDepth2 = pyCollect1 (depth2 42)\n\n--' Single depth-4 call\ntestDepth4 :: [Int]\ntestDepth4 = pyCollect1 (depth4 42)\n\n--' Single depth-6 call\ntestDepth6 :: [Int]\ntestDepth6 = pyCollect1 (depth6 42)\n\n--' Single depth-12 call (6 round trips, sequential)\ntestDepth12 :: [Int]\ntestDepth12 = pyCollect1 (depth12 42)\n\n--' 5 parallel depth-4 calls\ntestDeep4x5 :: [Int]\ntestDeep4x5 = pyCollect5 (depth4 1) (depth4 2) (depth4 3) (depth4 4) (depth4 5)\n\n--' 5 parallel depth-6 calls\ntestDeep6x5 :: [Int]\ntestDeep6x5 = pyCollect5 (depth6 1) (depth6 2) (depth6 3) (depth6 4) (depth6 5)\n"
  },
  {
    "path": "test-suite/concurrency-tests/helpers/py_funcs.py",
    "content": "def py_collect(*args):\n    return list(args)\n\ndef py_id(x):\n    return x\n"
  },
  {
    "path": "test-suite/concurrency-tests/helpers/r_funcs.R",
    "content": "r_id <- function(x) x\nr_collect <- function(...) list(...)\n"
  },
  {
    "path": "test-suite/concurrency-tests/run-tests.sh",
    "content": "#!/usr/bin/env bash\n# run-tests.sh - Concurrency test suite for morloc cross-pool dispatch\n#\n# Tests bidirectional callbacks, deep callback chains, and high-concurrency\n# unidirectional calls between Python and R pools.\n#\n# Usage: ./run-tests.sh [test...]\n#   With no arguments, runs all tests. Pass partial names to filter:\n#   ./run-tests.sh bidi deep\n\nset -euo pipefail\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\nTIMEOUT=10\n\nPASSED=0\nFAILED=0\nTOTAL=0\nFAILURES=()\n\nif [[ -t 1 ]]; then\n    GREEN=$'\\033[32m' RED=$'\\033[31m' YELLOW=$'\\033[33m' BOLD=$'\\033[1m' RESET=$'\\033[0m'\nelse\n    GREEN='' RED='' YELLOW='' BOLD='' RESET=''\nfi\n\n# Extract exported subcommands from a .loc file\nget_exports() {\n    local loc_file=\"$1\"\n    # Parse the module export list: module name (export1, export2, ...)\n    head -1 \"$loc_file\" \\\n        | sed 's/^module[[:space:]]*[^(]*(//; s/).*//; s/,/ /g' \\\n        | tr -s ' '\n}\n\nrun_single_test() {\n    local loc_file=\"$1\"\n    local subcommand=\"$2\"\n    local work_dir=\"$3\"\n\n    TOTAL=$((TOTAL + 1))\n    local test_label\n    test_label=\"$(basename \"$loc_file\" .loc):$subcommand\"\n    printf \"  %-35s \" \"$test_label\"\n\n    local output start_time elapsed rc\n    start_time=$(date +%s%N)\n    output=$(cd \"$work_dir\" && timeout \"$TIMEOUT\" ./nexus \"$subcommand\" 2>&1) && rc=0 || rc=$?\n    elapsed=$(( ($(date +%s%N) - start_time) / 1000000 ))\n\n    if [[ $rc -eq 0 ]]; then\n        if (( elapsed >= 1000 )); then\n            printf \"%sPASS%s (%d.%01ds)\\n\" \"$GREEN\" \"$RESET\" \"$((elapsed/1000))\" \"$(( (elapsed%1000) / 100 ))\"\n        else\n            printf \"%sPASS%s (%dms)\\n\" \"$GREEN\" \"$RESET\" \"$elapsed\"\n        fi\n        PASSED=$((PASSED + 1))\n    elif [[ $rc -eq 124 ]]; then\n        printf \"%sTIMEOUT%s (>${TIMEOUT}s)\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$test_label (timeout)\")\n    else\n        printf \"%sFAIL%s (rc=$rc)\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$test_label\")\n        echo \"$output\" | tail -3 | sed 's/^/      /'\n    fi\n}\n\ncompile_and_run() {\n    local loc_file=\"$1\"\n    local loc_basename\n    loc_basename=\"$(basename \"$loc_file\")\"\n    local test_name=\"${loc_basename%.loc}\"\n\n    echo \"${BOLD}[$test_name]${RESET}\"\n\n    local work_dir\n    work_dir=$(mktemp -d)\n    trap \"rm -rf '$work_dir'\" RETURN\n\n    cp \"$loc_file\" \"$work_dir/\"\n    cp \"$SCRIPT_DIR/helpers\"/* \"$work_dir/\" 2>/dev/null || true\n\n    # Compile\n    if ! (cd \"$work_dir\" && morloc make -o nexus \"$loc_basename\" > /dev/null 2>&1); then\n        printf \"  %-35s %sCOMPILE FAIL%s\\n\" \"$test_name\" \"$RED\" \"$RESET\"\n        local exports\n        exports=$(get_exports \"$loc_file\")\n        for sub in $exports; do\n            TOTAL=$((TOTAL + 1))\n            FAILED=$((FAILED + 1))\n            FAILURES+=(\"$test_name:$sub (compile)\")\n        done\n        echo \"\"\n        return\n    fi\n\n    local exports\n    exports=$(get_exports \"$loc_file\")\n    for sub in $exports; do\n        run_single_test \"$loc_file\" \"$sub\" \"$work_dir\"\n    done\n    echo \"\"\n}\n\n# Determine which tests to run\nSELECTED=(\"$@\")\nshould_run() {\n    if [ ${#SELECTED[@]} -eq 0 ]; then return 0; fi\n    for s in \"${SELECTED[@]}\"; do\n        if [[ \"$1\" == *\"$s\"* ]]; then return 0; fi\n    done\n    return 1\n}\n\necho \"=== Morloc Concurrency Test Suite ===\"\necho \"Timeout: ${TIMEOUT}s per subcommand\"\necho \"\"\n\nfor loc_file in \"$SCRIPT_DIR\"/*.loc; do\n    test_name=\"$(basename \"$loc_file\" .loc)\"\n    if should_run \"$test_name\"; then\n        compile_and_run \"$loc_file\"\n    fi\ndone\n\necho \"=== Results ===\"\necho \"${GREEN}Passed: $PASSED${RESET}, ${RED}Failed: $FAILED${RESET}, Total: $TOTAL\"\n\nif (( FAILED > 0 )); then\n    echo \"\"\n    echo \"${RED}Failures:${RESET}\"\n    for f in \"${FAILURES[@]}\"; do\n        echo \"  ${RED}-${RESET} $f\"\n    done\n    exit 1\nfi\necho \"${GREEN}${BOLD}ALL PASSED${RESET}\"\n"
  },
  {
    "path": "test-suite/daemon-tests/arithmetic.loc",
    "content": "module arithmetic (add, mul, neg, square)\n\nimport root-cpp\n\nadd :: Real -> Real -> Real\nadd x y = x + y\n\nmul :: Real -> Real -> Real\nmul x y = x * y\n\nneg :: Real -> Real\nneg x = 0.0 - x\n\nsquare :: Real -> Real\nsquare x = x * x\n"
  },
  {
    "path": "test-suite/daemon-tests/helpers.py",
    "content": "def greet(name):\n    return \"Hello, \" + name + \"!\"\n\ndef strlen(s):\n    return len(s)\n"
  },
  {
    "path": "test-suite/daemon-tests/pure.loc",
    "content": "module pure (checkInt, checkReal, checkBool, checkStr, checkTuple)\n\ncheckInt = 42 :: Int\ncheckReal = 3.14 :: Real\ncheckBool = True\ncheckStr = \"hello\"\ncheckTuple = (1 :: Int, True, \"abc\")\n"
  },
  {
    "path": "test-suite/daemon-tests/run-tests.sh",
    "content": "#!/usr/bin/env bash\n# run-tests.sh - Daemon and router test suite for morloc\n#\n# Tests the daemon mode (--daemon), HTTP/TCP/socket APIs, and the\n# multi-program router (--router).\n#\n# Usage: ./run-tests.sh [test...]\n#   With no arguments, runs all test groups. Pass partial names to filter:\n#   ./run-tests.sh http tcp socket router\n\nset -euo pipefail\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\nTIMEOUT=30\nDAEMON_STARTUP_WAIT=3\n\nPASSED=0\nFAILED=0\nTOTAL=0\nFAILURES=()\n\n# Tracked PIDs and temp dirs for cleanup\nDAEMON_PIDS=()\nWORK_DIRS=()\nSOCKET_FILES=()\n\nif [[ -t 1 ]]; then\n    GREEN=$'\\033[32m' RED=$'\\033[31m' YELLOW=$'\\033[33m' BOLD=$'\\033[1m' RESET=$'\\033[0m'\nelse\n    GREEN='' RED='' YELLOW='' BOLD='' RESET=''\nfi\n\n# ======================================================================\n# Cleanup\n# ======================================================================\n\ncleanup() {\n    for pid in \"${DAEMON_PIDS[@]}\"; do\n        kill \"$pid\" 2>/dev/null || true\n    done\n    # Wait briefly then force-kill\n    sleep 0.5\n    for pid in \"${DAEMON_PIDS[@]}\"; do\n        kill -9 \"$pid\" 2>/dev/null || true\n        wait \"$pid\" 2>/dev/null || true\n    done\n    for sock in \"${SOCKET_FILES[@]}\"; do\n        rm -f \"$sock\"\n    done\n    for d in \"${WORK_DIRS[@]}\"; do\n        rm -rf \"$d\"\n    done\n}\ntrap cleanup EXIT\n\n# ======================================================================\n# Test helpers\n# ======================================================================\n\nassert_test() {\n    local label=\"$1\"\n    local expected=\"$2\"\n    local actual=\"$3\"\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"$label\"\n\n    if [[ \"$actual\" == \"$expected\" ]]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      expected: $expected\"\n        echo \"      actual:   $actual\"\n    fi\n}\n\nassert_contains() {\n    local label=\"$1\"\n    local needle=\"$2\"\n    local haystack=\"$3\"\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"$label\"\n\n    if echo \"$haystack\" | grep -qF \"$needle\"; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      expected to contain: $needle\"\n        echo \"      actual: $(echo \"$haystack\" | head -3)\"\n    fi\n}\n\nassert_http_status() {\n    local label=\"$1\"\n    local expected_status=\"$2\"\n    local url=\"$3\"\n    shift 3\n    # remaining args are passed to curl\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"$label\"\n\n    local status\n    status=$(curl -s -o /dev/null -w \"%{http_code}\" \"$@\" \"$url\" 2>/dev/null) || status=\"000\"\n\n    if [[ \"$status\" == \"$expected_status\" ]]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      expected status: $expected_status\"\n        echo \"      actual status:   $status\"\n    fi\n}\n\n# Send a length-prefixed JSON message over a socket and read the response.\n# Usage: lp_request <socket_or_host:port> <json>\n# Output: the response JSON string\nlp_request() {\n    local target=\"$1\"\n    local json=\"$2\"\n\n    python3 -c \"\nimport socket, struct, sys, json\n\ntarget = sys.argv[1]\nmsg = sys.argv[2].encode('utf-8')\n\nif target.startswith('/'):\n    # Unix socket\n    s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n    s.connect(target)\nelse:\n    # TCP host:port\n    host, port = target.rsplit(':', 1)\n    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n    s.connect((host, int(port)))\n\ns.settimeout(10)\n\n# Send length-prefixed message\ns.sendall(struct.pack('>I', len(msg)) + msg)\n\n# Read response length\nresp_len_bytes = b''\nwhile len(resp_len_bytes) < 4:\n    chunk = s.recv(4 - len(resp_len_bytes))\n    if not chunk:\n        break\n    resp_len_bytes += chunk\n\nresp_len = struct.unpack('>I', resp_len_bytes)[0]\n\n# Read response body\nresp = b''\nwhile len(resp) < resp_len:\n    chunk = s.recv(resp_len - len(resp))\n    if not chunk:\n        break\n    resp += chunk\n\ns.close()\nprint(resp.decode('utf-8'))\n\" \"$target\" \"$json\"\n}\n\n# Extract a JSON field value (simple string/number/bool/object extraction)\njson_field() {\n    local json=\"$1\"\n    local field=\"$2\"\n    python3 -c \"\nimport json, sys\ndata = json.loads(sys.argv[1])\nval = data.get(sys.argv[2])\nif val is None:\n    print('')\nelif isinstance(val, (dict, list)):\n    print(json.dumps(val, separators=(',', ':')))\nelif isinstance(val, bool):\n    print('true' if val else 'false')\nelse:\n    print(val)\n\" \"$json\" \"$field\"\n}\n\n# Wait for a daemon to be ready (checks stderr log for \"daemon: ready\")\nwait_for_daemon() {\n    local log_file=\"$1\"\n    local max_wait=\"${2:-$DAEMON_STARTUP_WAIT}\"\n    local elapsed=0\n\n    while [ \"$elapsed\" -lt \"$max_wait\" ]; do\n        if grep -q \"ready\" \"$log_file\" 2>/dev/null; then\n            return 0\n        fi\n        sleep 0.2\n        elapsed=$((elapsed + 1))\n    done\n\n    echo \"Daemon did not become ready within ${max_wait}s\" >&2\n    echo \"Log contents:\" >&2\n    cat \"$log_file\" >&2\n    return 1\n}\n\n# Wait for an HTTP port to respond\nwait_for_http() {\n    local port=\"$1\"\n    local max_wait=\"${2:-$DAEMON_STARTUP_WAIT}\"\n    local i=0\n    local step_ms=200\n    local max_steps=$(( max_wait * 1000 / step_ms ))\n\n    while [ \"$i\" -lt \"$max_steps\" ]; do\n        if curl -s -o /dev/null \"http://127.0.0.1:${port}/health\" 2>/dev/null; then\n            return 0\n        fi\n        sleep 0.\"$step_ms\"\n        i=$((i + 1))\n    done\n\n    echo \"HTTP port $port did not respond within ${max_wait}s\" >&2\n    return 1\n}\n\n# ======================================================================\n# Compile test programs\n# ======================================================================\n\ncompile_program() {\n    local loc_file=\"$1\"\n    local work_dir=\"$2\"\n    local name\n    name=\"$(basename \"$loc_file\" .loc)\"\n\n    cp \"$SCRIPT_DIR/$loc_file\" \"$work_dir/\"\n    cp \"$SCRIPT_DIR\"/*.py \"$work_dir/\" 2>/dev/null || true\n\n    if ! (cd \"$work_dir\" && morloc make -o nexus \"$loc_file\" > /dev/null 2>\"$work_dir/build-${name}.err\"); then\n        echo \"COMPILE FAIL: $loc_file\" >&2\n        cat \"$work_dir/build-${name}.err\" >&2\n        return 1\n    fi\n    return 0\n}\n\n# Start a daemon in the background, returning its PID\n# Usage: start_daemon <work_dir> [extra_args...]\n# Sets: LAST_DAEMON_PID, LAST_DAEMON_LOG\nstart_daemon() {\n    local work_dir=\"$1\"\n    shift\n\n    local log_file=\"$work_dir/daemon.log\"\n\n    (cd \"$work_dir\" && exec ./nexus --daemon \"$@\" 2>\"$log_file\") &\n    local pid=$!\n    DAEMON_PIDS+=(\"$pid\")\n    LAST_DAEMON_PID=$pid\n    LAST_DAEMON_LOG=\"$log_file\"\n}\n\n# Stop a daemon by PID\nstop_daemon() {\n    local pid=\"$1\"\n    kill \"$pid\" 2>/dev/null || true\n    wait \"$pid\" 2>/dev/null || true\n    # Remove from tracked list\n    local new_pids=()\n    for p in \"${DAEMON_PIDS[@]}\"; do\n        [[ \"$p\" != \"$pid\" ]] && new_pids+=(\"$p\")\n    done\n    DAEMON_PIDS=(\"${new_pids[@]+\"${new_pids[@]}\"}\")\n}\n\n# Pick a random available port\npick_port() {\n    python3 -c \"\nimport socket\ns = socket.socket()\ns.bind(('127.0.0.1', 0))\nprint(s.getsockname()[1])\ns.close()\n\"\n}\n\n# ======================================================================\n# Test selector\n# ======================================================================\n\nSELECTED=(\"$@\")\nshould_run() {\n    if [ ${#SELECTED[@]} -eq 0 ]; then return 0; fi\n    for s in \"${SELECTED[@]}\"; do\n        if [[ \"$1\" == *\"$s\"* ]]; then return 0; fi\n    done\n    return 1\n}\n\n# ======================================================================\n# Setup: compile all test programs\n# ======================================================================\n\necho \"=== Morloc Daemon Test Suite ===\"\necho \"\"\n\nARITH_DIR=$(mktemp -d)\nSTRINGS_DIR=$(mktemp -d)\nPURE_DIR=$(mktemp -d)\nWORK_DIRS+=(\"$ARITH_DIR\" \"$STRINGS_DIR\" \"$PURE_DIR\")\n\necho \"Compiling test programs...\"\ncompile_program \"arithmetic.loc\" \"$ARITH_DIR\"\ncompile_program \"strings.loc\" \"$STRINGS_DIR\"\ncompile_program \"pure.loc\" \"$PURE_DIR\"\necho \"Done.\"\necho \"\"\n\n# ======================================================================\n# Test Group 1: HTTP API\n# ======================================================================\n\nif should_run \"http\"; then\n    echo \"${BOLD}[http] Daemon HTTP API${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    # Health endpoint\n    result=$(curl -s \"http://127.0.0.1:${HTTP_PORT}/health\")\n    status=$(json_field \"$result\" \"status\")\n    assert_test \"GET /health returns ok\" \"ok\" \"$status\"\n\n    # Discovery endpoint\n    disco=$(curl -s \"http://127.0.0.1:${HTTP_PORT}/discover\")\n    assert_contains \"GET /discover lists commands\" \"add\" \"$disco\"\n    assert_contains \"GET /discover lists mul\" \"mul\" \"$disco\"\n    assert_contains \"GET /discover lists neg\" \"neg\" \"$disco\"\n    assert_contains \"GET /discover lists square\" \"square\" \"$disco\"\n\n    # Call add(3, 4) -> 7.0\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n        -H \"Content-Type: application/json\" -d '[3, 4]')\n    status=$(json_field \"$result\" \"status\")\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/add [3,4] status=ok\" \"ok\" \"$status\"\n    assert_test \"POST /call/add [3,4] result=7\" \"7\" \"$val\"\n\n    # Call mul(5, 6) -> 30\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/mul\" \\\n        -H \"Content-Type: application/json\" -d '[5, 6]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/mul [5,6] result=30\" \"30\" \"$val\"\n\n    # Call neg(42) -> -42\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/neg\" \\\n        -H \"Content-Type: application/json\" -d '[42]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/neg [42] result=-42\" \"-42\" \"$val\"\n\n    # Call square(7) -> 49\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/square\" \\\n        -H \"Content-Type: application/json\" -d '[7]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/square [7] result=49\" \"49\" \"$val\"\n\n    # Args as {\"args\": [...]} object form\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n        -H \"Content-Type: application/json\" -d '{\"args\": [10, 20]}')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/add {args:[10,20]} result=30\" \"30\" \"$val\"\n\n    # Floating point args\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n        -H \"Content-Type: application/json\" -d '[1.5, 2.5]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/add [1.5,2.5] result=4\" \"4\" \"$val\"\n\n    # Error: unknown command\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/nonexistent\" \\\n        -H \"Content-Type: application/json\" -d '[1]')\n    status=$(json_field \"$result\" \"status\")\n    assert_test \"POST /call/nonexistent returns error\" \"error\" \"$status\"\n\n    # Error: unknown endpoint\n    assert_http_status \"GET /bogus returns 400\" \"400\" \"http://127.0.0.1:${HTTP_PORT}/bogus\"\n\n    # CORS preflight\n    assert_http_status \"OPTIONS returns 200\" \"200\" \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n        -X OPTIONS\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 2: HTTP with Python pool (strings)\n# ======================================================================\n\nif should_run \"http-py\"; then\n    echo \"${BOLD}[http-py] Daemon HTTP with Python pool${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$STRINGS_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    # Call greet(\"world\") -> \"Hello, world!\"\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/greet\" \\\n        -H \"Content-Type: application/json\" -d '[\"world\"]')\n    status=$(json_field \"$result\" \"status\")\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/greet status=ok\" \"ok\" \"$status\"\n    assert_test \"POST /call/greet [world] result\" \"Hello, world!\" \"$val\"\n\n    # Call strlen(\"morloc\") -> 6\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/strlen\" \\\n        -H \"Content-Type: application/json\" -d '[\"morloc\"]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/strlen [morloc] result=6\" \"6\" \"$val\"\n\n    # Empty string\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/strlen\" \\\n        -H \"Content-Type: application/json\" -d '[\"\"]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/strlen [] result=0\" \"0\" \"$val\"\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 3: HTTP with pure commands\n# ======================================================================\n\nif should_run \"http-pure\"; then\n    echo \"${BOLD}[http-pure] Daemon HTTP with pure morloc commands${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$PURE_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    # Pure commands take no arguments\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/checkInt\" \\\n        -H \"Content-Type: application/json\" -d '[]')\n    status=$(json_field \"$result\" \"status\")\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/checkInt status=ok\" \"ok\" \"$status\"\n    assert_test \"POST /call/checkInt result=42\" \"42\" \"$val\"\n\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/checkReal\" \\\n        -H \"Content-Type: application/json\" -d '[]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/checkReal result=3.14\" \"3.14\" \"$val\"\n\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/checkBool\" \\\n        -H \"Content-Type: application/json\" -d '[]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/checkBool result=true\" \"true\" \"$val\"\n\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/checkStr\" \\\n        -H \"Content-Type: application/json\" -d '[]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"POST /call/checkStr result=hello\" \"hello\" \"$val\"\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 4: Unix socket (length-prefixed JSON)\n# ======================================================================\n\nif should_run \"socket\"; then\n    echo \"${BOLD}[socket] Daemon Unix socket API${RESET}\"\n\n    SOCK_PATH=\"/tmp/morloc-test-$$.sock\"\n    SOCKET_FILES+=(\"$SOCK_PATH\")\n    start_daemon \"$ARITH_DIR\" --socket \"$SOCK_PATH\"\n    wait_for_daemon \"$LAST_DAEMON_LOG\" 15\n\n    # Health check via socket\n    result=$(lp_request \"$SOCK_PATH\" '{\"method\":\"health\"}')\n    status=$(json_field \"$result\" \"status\")\n    assert_test \"socket health status=ok\" \"ok\" \"$status\"\n\n    # Discovery via socket\n    result=$(lp_request \"$SOCK_PATH\" '{\"method\":\"discover\"}')\n    assert_contains \"socket discover lists add\" \"add\" \"$result\"\n\n    # Call via socket: add(10, 20) -> 30.0\n    result=$(lp_request \"$SOCK_PATH\" '{\"method\":\"call\",\"command\":\"add\",\"args\":[10,20]}')\n    status=$(json_field \"$result\" \"status\")\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"socket call add status=ok\" \"ok\" \"$status\"\n    assert_test \"socket call add [10,20] result=30\" \"30\" \"$val\"\n\n    # Call via socket with request ID\n    result=$(lp_request \"$SOCK_PATH\" '{\"id\":\"req-42\",\"method\":\"call\",\"command\":\"mul\",\"args\":[3,7]}')\n    rid=$(json_field \"$result\" \"id\")\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"socket call with id echoes id\" \"req-42\" \"$rid\"\n    assert_test \"socket call mul [3,7] result=21\" \"21\" \"$val\"\n\n    # Error: unknown command via socket\n    result=$(lp_request \"$SOCK_PATH\" '{\"method\":\"call\",\"command\":\"bogus\",\"args\":[1]}')\n    status=$(json_field \"$result\" \"status\")\n    assert_test \"socket unknown command returns error\" \"error\" \"$status\"\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 5: TCP (length-prefixed JSON)\n# ======================================================================\n\nif should_run \"tcp\"; then\n    echo \"${BOLD}[tcp] Daemon TCP API${RESET}\"\n\n    TCP_PORT=$(pick_port)\n    start_daemon \"$ARITH_DIR\" --port \"$TCP_PORT\"\n    wait_for_daemon \"$LAST_DAEMON_LOG\" 15\n\n    # Health check via TCP\n    result=$(lp_request \"127.0.0.1:${TCP_PORT}\" '{\"method\":\"health\"}')\n    status=$(json_field \"$result\" \"status\")\n    assert_test \"tcp health status=ok\" \"ok\" \"$status\"\n\n    # Call via TCP: add(100, 200) -> 300.0\n    result=$(lp_request \"127.0.0.1:${TCP_PORT}\" '{\"method\":\"call\",\"command\":\"add\",\"args\":[100,200]}')\n    status=$(json_field \"$result\" \"status\")\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"tcp call add status=ok\" \"ok\" \"$status\"\n    assert_test \"tcp call add [100,200] result=300\" \"300\" \"$val\"\n\n    # Call via TCP: square(9) -> 81\n    result=$(lp_request \"127.0.0.1:${TCP_PORT}\" '{\"method\":\"call\",\"command\":\"square\",\"args\":[9]}')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"tcp call square [9] result=81\" \"81\" \"$val\"\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 6: Multiple listeners simultaneously\n# ======================================================================\n\nif should_run \"multi\"; then\n    echo \"${BOLD}[multi] Daemon with all listeners${RESET}\"\n\n    SOCK_PATH=\"/tmp/morloc-test-multi-$$.sock\"\n    SOCKET_FILES+=(\"$SOCK_PATH\")\n    HTTP_PORT=$(pick_port)\n    TCP_PORT=$(pick_port)\n\n    start_daemon \"$ARITH_DIR\" --socket \"$SOCK_PATH\" --port \"$TCP_PORT\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    # Same command via all three protocols\n    # HTTP\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n        -H \"Content-Type: application/json\" -d '[1, 2]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"multi: HTTP add [1,2] result=3\" \"3\" \"$val\"\n\n    # TCP\n    result=$(lp_request \"127.0.0.1:${TCP_PORT}\" '{\"method\":\"call\",\"command\":\"add\",\"args\":[1,2]}')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"multi: TCP add [1,2] result=3\" \"3\" \"$val\"\n\n    # Unix socket\n    result=$(lp_request \"$SOCK_PATH\" '{\"method\":\"call\",\"command\":\"add\",\"args\":[1,2]}')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"multi: socket add [1,2] result=3\" \"3\" \"$val\"\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 7: Sequential requests (daemon stays alive)\n# ======================================================================\n\nif should_run \"sequential\"; then\n    echo \"${BOLD}[sequential] Multiple sequential requests${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    all_ok=true\n    for i in $(seq 1 10); do\n        result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n            -H \"Content-Type: application/json\" -d \"[${i}, ${i}]\")\n        val=$(json_field \"$result\" \"result\")\n        expected=$(python3 -c \"x = float($i + $i); print(int(x) if x == int(x) else x)\")\n        if [[ \"$val\" != \"$expected\" ]]; then\n            all_ok=false\n            break\n        fi\n    done\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"10 sequential add calls\"\n    if $all_ok; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"10 sequential add calls\")\n    fi\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 8: Concurrent requests\n# ======================================================================\n\nif should_run \"concurrent\"; then\n    echo \"${BOLD}[concurrent] Concurrent HTTP requests${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    # Fire 5 concurrent requests (each with a 15s timeout)\n    CONC_DIR=$(mktemp -d)\n    WORK_DIRS+=(\"$CONC_DIR\")\n\n    CONC_PIDS=()\n    for i in $(seq 1 5); do\n        (\n            curl -s --max-time 15 -X POST \"http://127.0.0.1:${HTTP_PORT}/call/square\" \\\n                -H \"Content-Type: application/json\" -d \"[${i}]\" \\\n                > \"$CONC_DIR/result-${i}.json\" 2>/dev/null\n        ) &\n        CONC_PIDS+=($!)\n    done\n    # Wait for all with a per-process check\n    for pid in \"${CONC_PIDS[@]}\"; do\n        wait \"$pid\" 2>/dev/null || true\n    done\n\n    all_ok=true\n    for i in $(seq 1 5); do\n        if [ -f \"$CONC_DIR/result-${i}.json\" ]; then\n            result=$(cat \"$CONC_DIR/result-${i}.json\")\n            val=$(json_field \"$result\" \"result\")\n            expected=$(python3 -c \"x = float($i * $i); print(int(x) if x == int(x) else x)\")\n            if [[ \"$val\" != \"$expected\" ]]; then\n                all_ok=false\n            fi\n        else\n            all_ok=false\n        fi\n    done\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"5 concurrent square calls\"\n    if $all_ok; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"5 concurrent square calls\")\n    fi\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 9: Graceful shutdown\n# ======================================================================\n\nif should_run \"shutdown\"; then\n    echo \"${BOLD}[shutdown] Graceful daemon shutdown${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    SOCK_PATH=\"/tmp/morloc-test-shutdown-$$.sock\"\n    SOCKET_FILES+=(\"$SOCK_PATH\")\n\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\" --socket \"$SOCK_PATH\"\n    wait_for_http \"$HTTP_PORT\" 10\n    local_pid=$LAST_DAEMON_PID\n\n    # Verify it's alive\n    result=$(curl -s \"http://127.0.0.1:${HTTP_PORT}/health\" 2>/dev/null) || result=\"\"\n    status=$(json_field \"$result\" \"status\" 2>/dev/null) || status=\"\"\n    assert_test \"daemon alive before shutdown\" \"ok\" \"$status\"\n\n    # Send SIGTERM\n    kill \"$local_pid\" 2>/dev/null\n    wait \"$local_pid\" 2>/dev/null || true\n\n    # Remove from tracked list\n    new_pids=()\n    for p in \"${DAEMON_PIDS[@]}\"; do\n        [[ \"$p\" != \"$local_pid\" ]] && new_pids+=(\"$p\")\n    done\n    DAEMON_PIDS=(\"${new_pids[@]+\"${new_pids[@]}\"}\")\n\n    # Verify it's dead\n    sleep 0.5\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"daemon exits after SIGTERM\"\n    if ! kill -0 \"$local_pid\" 2>/dev/null; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"daemon exits after SIGTERM\")\n        kill -9 \"$local_pid\" 2>/dev/null || true\n    fi\n\n    # Verify socket file cleaned up\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-50s \" \"socket file removed after shutdown\"\n    if [ ! -e \"$SOCK_PATH\" ]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"socket file removed after shutdown\")\n    fi\n\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 10: Router\n# ======================================================================\n\nif should_run \"router\"; then\n    echo \"${BOLD}[router] Multi-program router${RESET}\"\n\n    # Set up a temporary fdb directory with manifests\n    FDB_DIR=$(mktemp -d)\n    WORK_DIRS+=(\"$FDB_DIR\")\n\n    # Extract manifest JSON from the nexus wrapper script\n    # Format is: #!/bin/sh\\nexec morloc-nexus ...\\n### MANIFEST ###\\n<json>\n    if [ -f \"$ARITH_DIR/nexus\" ]; then\n        sed -n '/^### MANIFEST ###$/,$ { /^### MANIFEST ###$/d; p; }' \\\n            \"$ARITH_DIR/nexus\" > \"$FDB_DIR/arithmetic.manifest\"\n        # Patch build_dir in manifest to point to the work dir\n        python3 -c \"\nimport json, sys\nwith open(sys.argv[1]) as f:\n    m = json.load(f)\nm['build_dir'] = sys.argv[2]\nwith open(sys.argv[1], 'w') as f:\n    json.dump(m, f)\n\" \"$FDB_DIR/arithmetic.manifest\" \"$ARITH_DIR\"\n    fi\n\n    if [ ! -s \"$FDB_DIR/arithmetic.manifest\" ]; then\n        echo \"  ${RED}SKIP: could not extract manifest${RESET}\"\n        echo \"\"\n        TOTAL=$((TOTAL + 1))\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"router: could not extract manifest\")\n    fi\n\n    if [ -s \"$FDB_DIR/arithmetic.manifest\" ]; then\n        ROUTER_PORT=$(pick_port)\n\n        # Start router (use the morloc-nexus binary)\n        NEXUS_PATH=\"$(which morloc-nexus 2>/dev/null || echo \"$HOME/.local/bin/morloc-nexus\")\"\n        (exec \"$NEXUS_PATH\" --router --http-port \"$ROUTER_PORT\" --fdb \"$FDB_DIR\" 2>\"$FDB_DIR/router.log\") &\n        ROUTER_PID=$!\n        DAEMON_PIDS+=(\"$ROUTER_PID\")\n\n        wait_for_http \"$ROUTER_PORT\" 15 || true\n\n        # Health check\n        result=$(curl -s \"http://127.0.0.1:${ROUTER_PORT}/health\" 2>/dev/null) || result=\"\"\n        status=$(json_field \"$result\" \"status\" 2>/dev/null) || status=\"\"\n        assert_test \"router GET /health\" \"ok\" \"$status\"\n\n        # List programs\n        disco=$(curl -s \"http://127.0.0.1:${ROUTER_PORT}/programs\" 2>/dev/null) || disco=\"\"\n        assert_contains \"router GET /programs lists arithmetic\" \"arithmetic\" \"$disco\"\n\n        # Full discovery\n        disco=$(curl -s \"http://127.0.0.1:${ROUTER_PORT}/discover\" 2>/dev/null) || disco=\"\"\n        assert_contains \"router GET /discover lists programs\" \"programs\" \"$disco\"\n\n        # Per-program discovery\n        disco=$(curl -s \"http://127.0.0.1:${ROUTER_PORT}/discover/arithmetic\" 2>/dev/null) || disco=\"\"\n        assert_contains \"router GET /discover/arithmetic lists add\" \"add\" \"$disco\"\n\n        # Call through router: add(5, 10) -> 15.0\n        result=$(curl -s -X POST \"http://127.0.0.1:${ROUTER_PORT}/call/arithmetic/add\" \\\n            -H \"Content-Type: application/json\" -d '[5, 10]' 2>/dev/null) || result=\"\"\n        status=$(json_field \"$result\" \"status\" 2>/dev/null) || status=\"\"\n        val=$(json_field \"$result\" \"result\" 2>/dev/null) || val=\"\"\n        assert_test \"router call add status=ok\" \"ok\" \"$status\"\n        assert_test \"router call add [5,10] result=15\" \"15\" \"$val\"\n\n        # Call through router: square(4) -> 16\n        result=$(curl -s -X POST \"http://127.0.0.1:${ROUTER_PORT}/call/arithmetic/square\" \\\n            -H \"Content-Type: application/json\" -d '[4]' 2>/dev/null) || result=\"\"\n        val=$(json_field \"$result\" \"result\" 2>/dev/null) || val=\"\"\n        assert_test \"router call square [4] result=16\" \"16\" \"$val\"\n\n        # Error: unknown program\n        result=$(curl -s -X POST \"http://127.0.0.1:${ROUTER_PORT}/call/bogus/add\" \\\n            -H \"Content-Type: application/json\" -d '[1,2]' 2>/dev/null) || result=\"\"\n        assert_contains \"router unknown program returns error\" \"error\" \"$result\"\n\n        # Shutdown router\n        stop_daemon \"$ROUTER_PID\"\n\n        # Verify child daemons are also cleaned up\n        sleep 1\n        remaining=$(pgrep -f \"morloc-router-arithmetic\" 2>/dev/null | wc -l) || remaining=0\n\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-50s \" \"router cleans up child daemons\"\n        if [ \"$remaining\" -eq 0 ]; then\n            printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n            PASSED=$((PASSED + 1))\n        else\n            printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n            FAILED=$((FAILED + 1))\n            FAILURES+=(\"router cleans up child daemons\")\n        fi\n    fi\n\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 11: Connection timeout resilience\n# ======================================================================\n\nif should_run \"timeout\"; then\n    echo \"${BOLD}[timeout] Connection timeout resilience${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    SOCK_PATH=\"/tmp/morloc-test-timeout-$$.sock\"\n    SOCKET_FILES+=(\"$SOCK_PATH\")\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\" --socket \"$SOCK_PATH\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    # Open a socket, send partial data (just 2 bytes of the 4-byte length prefix),\n    # then don't send anything else. The daemon should time out and remain responsive.\n    python3 -c \"\nimport socket, time\ns = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\ns.connect('$SOCK_PATH')\ns.sendall(b'\\\\x00\\\\x00')  # partial length prefix\ntime.sleep(1)\ns.close()\n\" 2>/dev/null &\n    STALL_PID=$!\n\n    # Wait a moment, then verify daemon still responds\n    sleep 2\n    result=$(curl -s \"http://127.0.0.1:${HTTP_PORT}/health\" 2>/dev/null) || result=\"\"\n    status=$(json_field \"$result\" \"status\" 2>/dev/null) || status=\"\"\n    assert_test \"daemon responsive after stalled client\" \"ok\" \"$status\"\n\n    wait \"$STALL_PID\" 2>/dev/null || true\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 12: Pool crash recovery\n# ======================================================================\n\nif should_run \"pool-recovery\"; then\n    echo \"${BOLD}[pool-recovery] Pool crash and restart${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n    DAEMON_PID_FOR_RECOVERY=$LAST_DAEMON_PID\n\n    # Verify it works before killing\n    result=$(curl -s -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n        -H \"Content-Type: application/json\" -d '[1, 2]')\n    val=$(json_field \"$result\" \"result\")\n    assert_test \"pool-recovery: works before kill\" \"3\" \"$val\"\n\n    # Find and kill pool child processes\n    pool_pids=$(pgrep -P \"$DAEMON_PID_FOR_RECOVERY\" 2>/dev/null) || pool_pids=\"\"\n    if [ -n \"$pool_pids\" ]; then\n        for ppid in $pool_pids; do\n            kill -9 \"$ppid\" 2>/dev/null || true\n        done\n\n        # Wait for restart (daemon checks on every poll cycle = 1s)\n        sleep 4\n\n        # Verify it works after pool restart\n        result=$(curl -s --max-time 10 -X POST \"http://127.0.0.1:${HTTP_PORT}/call/add\" \\\n            -H \"Content-Type: application/json\" -d '[10, 20]')\n        val=$(json_field \"$result\" \"result\" 2>/dev/null) || val=\"\"\n        assert_test \"pool-recovery: works after pool kill\" \"30\" \"$val\"\n    else\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-50s \" \"pool-recovery: works after pool kill\"\n        printf \"%sSKIP%s (no child pools found)\\n\" \"$YELLOW\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    fi\n\n    stop_daemon \"$DAEMON_PID_FOR_RECOVERY\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Test Group 13: Health endpoint with pool status\n# ======================================================================\n\nif should_run \"pool-health\"; then\n    echo \"${BOLD}[pool-health] Health endpoint reports pool status${RESET}\"\n\n    HTTP_PORT=$(pick_port)\n    start_daemon \"$ARITH_DIR\" --http-port \"$HTTP_PORT\"\n    wait_for_http \"$HTTP_PORT\" 10\n\n    result=$(curl -s \"http://127.0.0.1:${HTTP_PORT}/health\")\n    assert_contains \"health response includes pools\" \"pools\" \"$result\"\n    assert_contains \"health response includes status ok\" \"ok\" \"$result\"\n\n    # Check that pools array has at least one true entry\n    has_alive=$(python3 -c \"\nimport json, sys\ndata = json.loads(sys.argv[1])\nresult = data.get('result', data)\npools = result.get('pools', [])\nprint('true' if any(pools) else 'false')\n\" \"$result\" 2>/dev/null) || has_alive=\"false\"\n    assert_test \"health shows pools alive\" \"true\" \"$has_alive\"\n\n    stop_daemon \"$LAST_DAEMON_PID\"\n    echo \"\"\nfi\n\n# ======================================================================\n# Results\n# ======================================================================\n\necho \"=== Results ===\"\necho \"${GREEN}Passed: $PASSED${RESET}, ${RED}Failed: $FAILED${RESET}, Total: $TOTAL\"\n\nif (( FAILED > 0 )); then\n    echo \"\"\n    echo \"${RED}Failures:${RESET}\"\n    for f in \"${FAILURES[@]}\"; do\n        echo \"  ${RED}-${RESET} $f\"\n    done\n    exit 1\nfi\necho \"${GREEN}${BOLD}ALL PASSED${RESET}\"\n"
  },
  {
    "path": "test-suite/daemon-tests/strings.loc",
    "content": "module strings (greet, strlen)\n\nimport root-py\n\nsource Py from \"helpers.py\" (\"greet\", \"strlen\")\n\ngreet :: Str -> Str\nstrlen :: Str -> Int\n"
  },
  {
    "path": "test-suite/error-message-tests/README",
    "content": "All programs here are broken by design. They should all fail and give reasonable\nerror messages.\n"
  },
  {
    "path": "test-suite/error-message-tests/bad-source/Makefile",
    "content": "all:\n\tmorloc make foo.loc\n"
  },
  {
    "path": "test-suite/error-message-tests/bad-source/foo.loc",
    "content": "module main (foo)\n\nsource Py from \"badthing\" (\"foo\" as bar)\n\nclass Dohicky where\n  bar :: Int\n\nfoo = 1\n"
  },
  {
    "path": "test-suite/error-message-tests/cyclical-import/foo.loc",
    "content": "module foo (foo)\n\nimport foo (foo)\n"
  },
  {
    "path": "test-suite/error-message-tests/list-error-1/Makefile",
    "content": "all:\n\tmorloc make foo.loc\n"
  },
  {
    "path": "test-suite/error-message-tests/list-error-1/foo.loc",
    "content": "module main (foo)\n\ntype Py => Real = \"float\"\n\nfoo :: [Real]\nfoo = [2.0, \"yolo\", 5.6]\n"
  },
  {
    "path": "test-suite/error-message-tests/missing-import-module/foo.loc",
    "content": "module foo (bar)\n\nimport barm (bar)\n"
  },
  {
    "path": "test-suite/error-message-tests/missing-import-term/barm.loc",
    "content": "module barm (bar)\n\nbar a :: a -> a -> a \n"
  },
  {
    "path": "test-suite/error-message-tests/missing-import-term/main.loc",
    "content": "module main (bar)\n\nimport barm (lemonhope, bar, pickle)\n"
  },
  {
    "path": "test-suite/error-message-tests/pool-crash-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus crashme 0 2>&1 | grep -ci \"signal\\|crash\\|abort\" > obs.txt || echo \"0\" > obs.txt\n\nclean:\n\trm -rf nexus pools* __pycache__ *.err obs.txt .manifest\n"
  },
  {
    "path": "test-suite/error-message-tests/pool-crash-cpp/foo.hpp",
    "content": "#include <cstdlib>\n\nint cpp_crash(int x){\n    abort();\n    return x;\n}\n"
  },
  {
    "path": "test-suite/error-message-tests/pool-crash-cpp/main",
    "content": "#!/bin/sh\nexec morloc-nexus \"$0\" \"$@\"\n### MANIFEST ###\n{\"version\":1,\"name\":\"main\",\"build_dir\":\"/home/z/projects/morloc-core/morloc-workspace/compiler/morloc/test-suite/error-message-tests/pool-crash-cpp\",\"build_time\":1773294321,\"pools\":[{\"lang\":\"cpp\",\"exec\":[\"pools/main/pool-cpp.out\"],\"socket\":\"pipe-cpp\"}],\"commands\":[{\"name\":\"crashme\",\"type\":\"remote\",\"mid\":1,\"pool\":0,\"needed_pools\":[0],\"arg_schemas\":[\"<int>i4\"],\"return_schema\":\"<int>i4\",\"desc\":[],\"return_type\":\"Int\",\"return_desc\":[],\"args\":[{\"kind\":\"pos\",\"metavar\":null,\"type_desc\":\"Int\",\"quoted\":false,\"desc\":[]}],\"group\":null}],\"groups\":[]}"
  },
  {
    "path": "test-suite/error-message-tests/pool-crash-cpp/main.loc",
    "content": "module main (crashme)\n\nsource Cpp from \"foo.hpp\" (\"cpp_crash\")\n\ntype Cpp => Int = \"int\"\n\ncpp_crash :: Int -> Int\ncrashme = cpp_crash\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus failme 0 2>&1 | grep -c \"CPPERR\" > obs.txt || echo \"0\" > obs.txt\n\nclean:\n\trm -rf nexus pools* __pycache__ *.err obs.txt .manifest\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cpp/foo.hpp",
    "content": "#include <stdexcept>\n\nint cpp_fail(int x){\n    throw std::runtime_error(\"CPPERR: something went wrong in cpp\");\n    return x;\n}\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cpp/main.loc",
    "content": "module main (failme)\n\nsource Cpp from \"foo.hpp\" (\"cpp_fail\")\n\ntype Cpp => Int = \"int\"\n\ncpp_fail :: Int -> Int\nfailme = cpp_fail\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cross-lang/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus py_calls_cpp_err '\"hello\"' 2>&1 | grep -c \"CPPERR\" > obs.txt || echo \"0\" > obs.txt\n\t./nexus r_calls_py_err '\"hello\"' 2>&1 | grep -c \"PYERR\" >> obs.txt || echo \"0\" >> obs.txt\n\nclean:\n\trm -rf nexus pools* __pycache__ *.err obs.txt .manifest\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cross-lang/foo.R",
    "content": "r_id <- function(x) x\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cross-lang/foo.hpp",
    "content": "#include <stdexcept>\n#include <string>\n\nstd::string cpp_fail(std::string x){\n    throw std::runtime_error(\"CPPERR: cross-lang failure in cpp\");\n    return x;\n}\n\nstd::string cpp_id(std::string x){\n    return x;\n}\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cross-lang/foo.py",
    "content": "def py_fail(x):\n    raise ValueError(\"PYERR: cross-lang failure in python\")\n\ndef py_id(x):\n    return x\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-cross-lang/main.loc",
    "content": "module main (py_calls_cpp_err, r_calls_py_err)\n\nimport root ((.))\n\nsource Cpp from \"foo.hpp\" (\"cpp_fail\", \"cpp_id\")\nsource Py from \"foo.py\" (\"py_fail\", \"py_id\")\nsource R from \"foo.R\" (\"r_id\")\n\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\n\ncpp_fail :: Str -> Str\ncpp_id :: Str -> Str\npy_fail :: Str -> Str\npy_id :: Str -> Str\nr_id :: Str -> Str\n\n-- Python calls into C++ which throws\npy_calls_cpp_err :: Str -> Str\npy_calls_cpp_err = py_id . cpp_fail\n\n-- R calls into Python which raises\nr_calls_py_err :: Str -> Str\nr_calls_py_err = r_id . py_fail\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus failme 0 2>&1 | grep -c \"PYERR\" > obs.txt || echo \"0\" > obs.txt\n\nclean:\n\trm -rf nexus pools* __pycache__ *.err obs.txt .manifest\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-py/foo.py",
    "content": "def py_fail(x):\n    raise ValueError(\"PYERR: something went wrong in python\")\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-py/main.loc",
    "content": "module main (failme)\n\nsource Py from \"foo.py\" (\"py_fail\")\n\ntype Py => Int = \"int\"\n\npy_fail :: Int -> Int\nfailme = py_fail\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus failme 0 2>&1 | grep -c \"RERR\" > obs.txt || echo \"0\" > obs.txt\n\nclean:\n\trm -rf nexus pools* __pycache__ *.err obs.txt .manifest\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-r/foo.R",
    "content": "r_fail <- function(x){\n    stop(\"RERR: something went wrong in r\")\n}\n"
  },
  {
    "path": "test-suite/error-message-tests/runtime-error-r/main.loc",
    "content": "module main (failme)\n\nsource R from \"foo.R\" (\"r_fail\")\n\ntype R => Int = \"integer\"\n\nr_fail :: Int -> Int\nfailme = r_fail\n"
  },
  {
    "path": "test-suite/error-message-tests/term-masking/foo.loc",
    "content": "module m (foo) \n\nimport foo2 (foo)\nimport foo3 (foo)\n"
  },
  {
    "path": "test-suite/error-message-tests/term-masking/foo2.loc",
    "content": "module foo2 (foo)\n\nsource Py (\"foo\")\n\nfoo a :: a -> a -> a\n"
  },
  {
    "path": "test-suite/error-message-tests/term-masking/foo3.loc",
    "content": "module foo3 (foo)\n\nsource Py (\"foo\")\n\nfoo a :: a -> a -> a\n"
  },
  {
    "path": "test-suite/error-message-tests/typechecking-1/Makefile",
    "content": "all:\n\tmorloc make foo.loc\n"
  },
  {
    "path": "test-suite/error-message-tests/typechecking-1/foo.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfoo :: Real -> Real\nfoo x = x + \"2.0\" * 20.0\n"
  },
  {
    "path": "test-suite/error-message-tests/typechecking-2/Makefile",
    "content": "all:\n\tmorloc make foo.loc\n"
  },
  {
    "path": "test-suite/error-message-tests/typechecking-2/foo.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfoo :: Real -> Str\nfoo y = (\\x -> 32) y\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-bad-instance/foo.loc",
    "content": "module foo3 (*)\n\nclass Marcy a where\n  foo a :: a -> a -> a\n\ninstance Marcy a where\n  source Py (\"foo\", \"bar\")\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-bad-instance-expr/foo.loc",
    "content": "module foo3 (*)\n\nclass Marcy a where\n  foo a :: a -> a -> a\n\ninstance Marcy a where\n  source Py (\"foo\", \"bar\")\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-masking/foo.loc",
    "content": "module foo (*) \n\ninstance Marcy a where\n  source Py (\"foo\")\n\nimport foo2 (Marcy)\nimport foo3 (Marcy)\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-masking/foo2.loc",
    "content": "module foo2 (Marcy)\n\nclass Marcy a where\n  bar a :: a -> a -> a\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-masking/foo3.loc",
    "content": "module foo3 (Marcy)\n\nclass Marcy a where\n  foo a :: a -> a -> a\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-monomorphic-conflict/foo.loc",
    "content": "module foo (foo) \n\nimport foo2 (foo)\nimport foo3 (Marcy)\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-monomorphic-conflict/foo2.loc",
    "content": "module foo2 (foo)\n\nsource Py (\"foo\")\n\nfoo a :: a -> a -> a\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-monomorphic-conflict/foo3.loc",
    "content": "module foo3 (Marcy)\n\nclass Marcy a where\n  foo a :: a -> a -> a\n\ninstance Marcy a where\n  source Py (\"foo\")\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-overlap/foo.loc",
    "content": "module foo (*) \n\nimport foo2 (Farcy)\nimport foo3 (Marcy)\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-overlap/foo2.loc",
    "content": "module foo2 (Farcy)\n\nclass Farcy a where\n  foo a :: a -> a -> a\n\ninstance Farcy a where\n  source Py (\"foo\")\n"
  },
  {
    "path": "test-suite/error-message-tests/typeclass-overlap/foo3.loc",
    "content": "module foo3 (Marcy)\n\nclass Marcy a where\n  foo a :: a -> a -> a\n\ninstance Marcy a where\n  source Py (\"foo\")\n"
  },
  {
    "path": "test-suite/error-message-tests/undefined-term/Makefile",
    "content": "all:\n\tmorloc make foo.loc\n"
  },
  {
    "path": "test-suite/error-message-tests/undefined-term/foo.loc",
    "content": "module main (foo)\n\nfoo :: Real -> Real\nfoo x = x + \"2.0\" * 20.0\n"
  },
  {
    "path": "test-suite/executable-benchmark/.gitignore",
    "content": "nexus\npool.*\n"
  },
  {
    "path": "test-suite/executable-benchmark/README",
    "content": "# Executable Benchmarks\n\nThis folder contains benchmarks of the executables that morloc produces (not compile times).\n"
  },
  {
    "path": "test-suite/executable-benchmark/distributed/.gitignore",
    "content": ".morloc-cache\n"
  },
  {
    "path": "test-suite/executable-benchmark/distributed/Makefile",
    "content": "all:\n\tmorloc make main.loc\n\t./nexus bar [1,2,3]\n\nbuild:\n\tgcc -g -o nexus -O -I/home/z/.morloc/include nexus.c\n\tg++ -g --std=c++17 -o pool-cpp.out pool.cpp -std=c++17 -I/home/z/.morloc/include -I.\n\nclean:\n\trm -rf __pycache__ pool* nexus* log\n"
  },
  {
    "path": "test-suite/executable-benchmark/distributed/foo.cpp",
    "content": "#include <vector>\n\ndouble cmean(std::vector<double> xs){\n    if(xs.size() == 0){\n        return 0;\n    }\n    double total = 0;\n    for(size_t i = 0; i < xs.size(); i++){\n        total += xs[i];\n    }\n    return total / xs.size();\n}\n\ndouble cdouble(double x){\n    return x * 2;\n}\n"
  },
  {
    "path": "test-suite/executable-benchmark/distributed/foo.py",
    "content": "import random\nimport math\nimport multiprocessing\n\n\ndef simulate(n: int) -> list[float]:\n    return [random.normalvariate(mu=0, sigma=1) for _ in range(n)]\n\ndef mean(xs: list[float]) -> float:\n    return sum(xs) / len(xs)\n\ndef sd(xs: list[float]) -> float:\n    mu = mean(xs)\n    return math.sqrt(sum([(x - mu) * (x - mu) for x in xs]) / (len(xs) - 1))\n\ndef double(x: float):\n    return x * 2\n\ndef nTrials(n: int, a, f):\n    with multiprocessing.Pool() as pool:\n        results = pool.map(f, (a for _ in range(n)))\n    return results\n"
  },
  {
    "path": "test-suite/executable-benchmark/distributed/main.loc",
    "content": "-- module m (bar)\n--\n-- source Py from \"foo.py\" (\"double\", \"mean\" as pmean)\n--\n-- type Py => Real = \"float\"\n-- type Py => List a = \"list\" a\n--\n-- pmean :: [Real] -> Real\n-- double :: Real -> Real\n--\n-- bar xs = double (large:pmean xs)\n\n\nmodule m (bar)\n\nsource Cpp from \"foo.cpp\" (\"cdouble\" as double, \"cmean\" as mean)\n\ntype Cpp => Real = \"double\"\ntype Cpp => List a = \"std::vector<$1>\" a\nmean :: [Real] -> Real\ndouble :: Real -> Real\n\nbar xs = double (large:mean xs)\n\n\n\n-- -- module m (foo, bar)\n-- -- module m (foo)\n-- module m (bar)\n--\n-- -- source R from \"foo.R\" (\"mean\" as rmean)\n-- -- source Py from \"foo.py\" (\"double\", \"mean\" as pmean)\n-- source Cpp from \"foo.cpp\" (\"cdouble\" as double, \"cmean\" as mean)\n--\n-- -- type Py => Real = \"float\"\n-- -- type Py => List a = \"list\" a\n--\n-- type Cpp => Real = \"double\"\n-- type Cpp => List a = \"std::vector<$1>\" a\n--\n-- -- type R => Real = \"numeric\"\n-- -- type R => List a = \"list\" a\n--\n-- -- rmean :: [Real] -> Real\n-- -- pmean :: [Real] -> Real\n-- mean :: [Real] -> Real\n-- double :: Real -> Real\n--\n-- -- -- This should *immediately* make a remote call from the nexus and then that\n-- -- -- remote nexus will call the remote pool\n-- -- foo = large:mean\n--\n-- -- Here the pool makes the remote call\n-- bar xs = double (large:mean xs)\n--\n-- -- type Py => Int = \"int\"\n-- -- type Py => Real = \"float\"\n-- -- type Py => Tuple2 a b = \"tuple\" a b\n-- -- type Py => List a = \"list\" a\n-- --\n-- -- source Py from \"foo.py\" (\"nTrials\", \"mean\", \"sd\", \"simulate\")\n-- --\n-- -- simulate :: Int -> [Real]\n-- -- mean :: [Real] -> Real\n-- -- sd :: [Real] -> Real\n-- --\n-- -- -- multi-threaded program for running a random process many times on the same input\n-- -- nTrials a b :: Int -> a -> (a -> b) -> [b]\n-- --\n-- -- foo :: Int -> Int -> (Real, Real)\n-- -- foo n size = (mean means, sd means)\n-- --   where\n-- --   means = nTrials n size (small:mean . simulate)\n--\n-- -- -- case 1: applications\n-- -- f1 x = small:foo x\n-- --\n-- -- -- case 2: compositions\n-- -- f1 = small:foo . bar\n-- --\n-- -- -- non-case: lambdas, instead bind the top named function, there always is one\n-- -- f3 = (\\y -> small:foo (bar y))\n-- --\n-- -- -- basically, we are always binding directly to a variable name\n-- --\n-- -- -- what about this? the label should travel with bar, so `f4 = small:foo`\n-- -- bar = small:foo\n-- -- f4 = bar\n"
  },
  {
    "path": "test-suite/executable-benchmark/distributed/main.yaml",
    "content": "default-group:\n  benchmark: false\n  cache: false\n  distribute: null\nlabeled-groups:\n  large:\n    benchmark: true\n    cache: true\n    remote:\n      threads: 32\n      memory: 128\n      gpus: 0\n      time: \"3-00:00:00\"\n  small:\n    cache: true\n    remote:\n      threads: 32\n      memory: 128\n      gpus: 0\n      time: \"0-01:00:00\"\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/.gitignore",
    "content": "__pycache__/\ncis.csv\ncis.markdown\nlog\ntrans.csv\ntrans.markdown\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/Makefile",
    "content": "all:\n\tbash test.sh\n\nclean:\n\trm -rf cis.csv cis.markdown log nexus pools trans.csv trans.markdown __pycache__\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/foo.R",
    "content": "mockr <- function(x){\n  Sys.sleep(1)\n  return(2*x)\n}\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/foo.loc",
    "content": "module parallel (pcis, scis, ptrans, strans)\n\nsource Py from \"parallel.py\" (\"pmap\", \"smap\")\nsource Py from \"foo.py\" (\"mockpy\")\nsource R from \"foo.R\" (\"mockr\")\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\n\ntype R => Int = \"numeric\"\ntype R => List a = \"list\" a\n\nmockr :: Int -> Int\nmockpy :: Int -> Int\npmap a b :: (a -> b) -> [a] -> [b]\nsmap a b :: (a -> b) -> [a] -> [b]\n\npcis = pmap mockpy\nptrans = pmap mockr\n\nscis = smap mockpy\nstrans = smap mockr\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/foo.py",
    "content": "import time\n\ndef mockpy(x):\n    time.sleep(1)\n    return 2*x\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/parallel.py",
    "content": "import multiprocessing\n\ndef pmap(f, xs):\n    with multiprocessing.Pool() as pool:\n        results = pool.map(f, xs)\n    return results\n\ndef smap(f, xs):\n    return [f(x) for x in xs]\n"
  },
  {
    "path": "test-suite/executable-benchmark/parallel-interop/test.sh",
    "content": "#!/usr/bin/env bash\n\nset -e\n\nmorloc make foo.loc\n\nhyperfine \\\n  -w 5 \\\n  -L test pcis,scis \\\n  --export-markdown cis.markdown \\\n  --export-csv cis.csv \\\n  \"./nexus {test} [1,2,3,4,5,6]\"\n\nhyperfine \\\n  -w 5 \\\n  -L test ptrans,strans \\\n  --export-markdown trans.markdown \\\n  --export-csv trans.csv \\\n  \"./nexus {test} [1,2,3,4,5,6]\"\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/.gitignore",
    "content": "__pycache__/\npool*\nstats.*\nlog\nz-*\nz\nstats-*.markdown\nnexus\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/Makefile",
    "content": "all:\n\tmorloc make foo.loc\n\nclean:\n\trm -rf pool* nexus __pycache__ stats.* log\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/foo.R",
    "content": "rid <- function(x) x\n\n# Generate an n MB random string\nnmb <- function(n){\n  stringi::stri_dup(\"x\", n * 1024 * 1024)\n}\n\nincr <- function(x, y) x + y\n\naddr <- function(x, y) x + y\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/foo.hpp",
    "content": "#include <iostream>\n#include <string>\n\ntemplate <class T>\nT cid(T x) {\n  return x;\n}\n\ntemplate <class T>\nint noop(const T& x) { return 1; }\n\nint strlen(const std::string& x){\n  return static_cast<int>(x.size());\n}\n\nstd::string nmb(int n) {\n    std::string result(1024 * 1024 * n, 'x');\n    return result;\n}\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/foo.loc",
    "content": "module foo\n  ( pZeroBaseline\n  , rZeroBaseline\n  , cZeroBaseline\n  , pTenBaseline\n  , rTenBaseline\n  , cTenBaseline\n  , pZeroToForeign\n  , pZeroFromForeign\n  , rZeroToForeign\n  , rZeroFromForeign\n  , pTenToForeign\n  , pTenFromForeign\n  , rTenToForeign\n  , rTenFromForeign\n  , rMarginalCost1\n  , rMarginalCost2\n  , rMarginalCost3\n  , rMarginalCost4\n  , pMarginalCost1\n  , pMarginalCost2\n  , pMarginalCost3\n  , pMarginalCost4\n  , mapManyPCP\n  , mapManyPCR\n  , memtest\n  )\n\nimport root ((.))\nimport root-py (Int, Str, List)\nimport root-cpp (Int, Str, List, Functor)\nimport root-r (Int, Str, List)\n\nsource py from \"foo.py\" (\"nmb\" as pnmb, \"pid\", \"morloc_len\" as plen, \"addp\")\nsource R from \"foo.R\" (\"nmb\" as rnmb, \"rid\", \"nchar\" as rlen, \"addr\")\nsource Cpp from \"foo.hpp\" (\"nmb\" as cnmb, \"cid\", \"strlen\" as clen)\n\naddp :: Int -> Int -> Int\naddr :: Int -> Int -> Int\n\n-- Generate a test string with a length of n megabytes\npnmb :: Int -> Str\nrnmb :: Int -> Str\ncnmb :: Int -> Str\n\nplen a :: Str -> Int\nrlen a :: Str -> Int\nclen a :: Str -> Int\n\npid a :: a -> a\nrid a :: a -> a\ncid a :: a -> a\n\n-- C(L) : constant cost of calling language L\n-- R(x) : cost of generating x megabyte string\n-- F(x,L,M) : cost of sending x MB from L to M\n\n-- 0MB test for baseline - t = C(L) + R(0)\npZeroBaseline = (plen . pnmb) 0\nrZeroBaseline = (rlen . rnmb) 0\ncZeroBaseline = (clen . cnmb) 0\n\n-- 10MB test for baseline - t = C(L) + R(10MB)\npTenBaseline = (plen . pnmb) 100 -- \nrTenBaseline = (rlen . rnmb) 100\ncTenBaseline = (clen . cnmb) 100\n\n-- 0MB test for foreign interop\npZeroToForeign   = (clen . pnmb) 0 -- C(c) + C(p) + F(0,p,c) + R(0)\npZeroFromForeign = (plen . cnmb) 0 -- C(c) + C(p) + F(0,c,p) + R(0)\n\nrZeroToForeign   = (clen . rnmb) 0 -- C(c) + C(r) + F(0,r,c) + R(0)\nrZeroFromForeign = (rlen . cnmb) 0 -- C(c) + C(r) + F(0,c,r) + R(0)\n\n-- 10MB test for foreign interop\npTenToForeign   = (clen . pnmb) 100 -- C(c) + C(p) + F(10,p,c) + R(10)\npTenFromForeign = (plen . cnmb) 100 -- C(c) + C(p) + F(10,c,p) + R(10)\n                                                                    \nrTenToForeign   = (clen . rnmb) 100 -- C(c) + C(r) + F(10,r,c) + R(10)\nrTenFromForeign = (rlen . cnmb) 100 -- C(c) + C(r) + F(10,c,r) + R(10)\n\n-- Test the marginal cost of foreign calls\n-- This will be high if, for example, we have to fire up the interpreter for every call to Python\nrMarginalCost1 = rnmb 0 \nrMarginalCost2 = (rid . cid . rnmb) 0 \nrMarginalCost3 = (rid . cid . rid . cid . rnmb) 0 \nrMarginalCost4 = (rid . cid . rid . cid . rid . cid . rnmb) 0 \n\npMarginalCost1 = pnmb 0 \npMarginalCost2 = (pid . cid . pnmb) 0 \npMarginalCost3 = (pid . cid . pid . cid . pnmb) 0 \npMarginalCost4 = (pid . cid . pid . cid . pid . cid . pnmb) 0 \n\nmemtest :: [Int] -> [(Int, Int)]\nmemtest xs = map (\\x -> (pid x, plen s)) xs where\n  s = cnmb 10\n\nmapManyPCP :: Int -> [Int] -> [Int]\nmapManyPCP x xs = map (addp x) xs\n\nmapManyPCR :: Int -> [Int] -> [Int]\nmapManyPCR x xs = map (addr x) xs\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/foo.py",
    "content": "def nmb(n):\n    return \"x\" * n * 1024 * 1024\n\ndef pid(x):\n    return x\n\ndef addp(x, y):\n    return x + y\n\ndef morloc_len(xs):\n    return len(xs)\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/long-list.json",
    "content": "[ 1,\n  2,\n  3,\n  4,\n  5,\n  6,\n  7,\n  8,\n  9,\n  10,\n  11,\n  12,\n  13,\n  14,\n  15,\n  16,\n  17,\n  18,\n  19,\n  20,\n  21,\n  22,\n  23,\n  24,\n  25,\n  26,\n  27,\n  28,\n  29,\n  30,\n  31,\n  32,\n  33,\n  34,\n  35,\n  36,\n  37,\n  38,\n  39,\n  40,\n  41,\n  42,\n  43,\n  44,\n  45,\n  46,\n  47,\n  48,\n  49,\n  50,\n  51,\n  52,\n  53,\n  54,\n  55,\n  56,\n  57,\n  58,\n  59,\n  60,\n  61,\n  62,\n  63,\n  64,\n  65,\n  66,\n  67,\n  68,\n  69,\n  70,\n  71,\n  72,\n  73,\n  74,\n  75,\n  76,\n  77,\n  78,\n  79,\n  80,\n  81,\n  82,\n  83,\n  84,\n  85,\n  86,\n  87,\n  88,\n  89,\n  90,\n  91,\n  92,\n  93,\n  94,\n  95,\n  96,\n  97,\n  98,\n  99,\n  100,\n  101,\n  102,\n  103,\n  104,\n  105,\n  106,\n  107,\n  108,\n  109,\n  110,\n  111,\n  112,\n  113,\n  114,\n  115,\n  116,\n  117,\n  118,\n  119,\n  120,\n  121,\n  122,\n  123,\n  124,\n  125,\n  126,\n  127,\n  128,\n  129,\n  130,\n  131,\n  132,\n  133,\n  134,\n  135,\n  136,\n  137,\n  138,\n  139,\n  140,\n  141,\n  142,\n  143,\n  144,\n  145,\n  146,\n  147,\n  148,\n  149,\n  150,\n  151,\n  152,\n  153,\n  154,\n  155,\n  156,\n  157,\n  158,\n  159,\n  160,\n  161,\n  162,\n  163,\n  164,\n  165,\n  166,\n  167,\n  168,\n  169,\n  170,\n  171,\n  172,\n  173,\n  174,\n  175,\n  176,\n  177,\n  178,\n  179,\n  180,\n  181,\n  182,\n  183,\n  184,\n  185,\n  186,\n  187,\n  188,\n  189,\n  190,\n  191,\n  192,\n  193,\n  194,\n  195,\n  196,\n  197,\n  198,\n  199,\n  200,\n  201,\n  202,\n  203,\n  204,\n  205,\n  206,\n  207,\n  208,\n  209,\n  210,\n  211,\n  212,\n  213,\n  214,\n  215,\n  216,\n  217,\n  218,\n  219,\n  220,\n  221,\n  222,\n  223,\n  224,\n  225,\n  226,\n  227,\n  228,\n  229,\n  230,\n  231,\n  232,\n  233,\n  234,\n  235,\n  236,\n  237,\n  238,\n  239,\n  240,\n  241,\n  242,\n  243,\n  244,\n  245,\n  246,\n  247,\n  248,\n  249,\n  250,\n  251,\n  252,\n  253,\n  254,\n  255,\n  256,\n  257,\n  258,\n  259,\n  260,\n  261,\n  262,\n  263,\n  264,\n  265,\n  266,\n  267,\n  268,\n  269,\n  270,\n  271,\n  272,\n  273,\n  274,\n  275,\n  276,\n  277,\n  278,\n  279,\n  280,\n  281,\n  282,\n  283,\n  284,\n  285,\n  286,\n  287,\n  288,\n  289,\n  290,\n  291,\n  292,\n  293,\n  294,\n  295,\n  296,\n  297,\n  298,\n  299,\n  300,\n  301,\n  302,\n  303,\n  304,\n  305,\n  306,\n  307,\n  308,\n  309,\n  310,\n  311,\n  312,\n  313,\n  314,\n  315,\n  316,\n  317,\n  318,\n  319,\n  320,\n  321,\n  322,\n  323,\n  324,\n  325,\n  326,\n  327,\n  328,\n  329,\n  330,\n  331,\n  332,\n  333,\n  334,\n  335,\n  336,\n  337,\n  338,\n  339,\n  340,\n  341,\n  342,\n  343,\n  344,\n  345,\n  346,\n  347,\n  348,\n  349,\n  350,\n  351,\n  352,\n  353,\n  354,\n  355,\n  356,\n  357,\n  358,\n  359,\n  360,\n  361,\n  362,\n  363,\n  364,\n  365,\n  366,\n  367,\n  368,\n  369,\n  370,\n  371,\n  372,\n  373,\n  374,\n  375,\n  376,\n  377,\n  378,\n  379,\n  380,\n  381,\n  382,\n  383,\n  384,\n  385,\n  386,\n  387,\n  388,\n  389,\n  390,\n  391,\n  392,\n  393,\n  394,\n  395,\n  396,\n  397,\n  398,\n  399,\n  400,\n  401,\n  402,\n  403,\n  404,\n  405,\n  406,\n  407,\n  408,\n  409,\n  410,\n  411,\n  412,\n  413,\n  414,\n  415,\n  416,\n  417,\n  418,\n  419,\n  420,\n  421,\n  422,\n  423,\n  424,\n  425,\n  426,\n  427,\n  428,\n  429,\n  430,\n  431,\n  432,\n  433,\n  434,\n  435,\n  436,\n  437,\n  438,\n  439,\n  440,\n  441,\n  442,\n  443,\n  444,\n  445,\n  446,\n  447,\n  448,\n  449,\n  450,\n  451,\n  452,\n  453,\n  454,\n  455,\n  456,\n  457,\n  458,\n  459,\n  460,\n  461,\n  462,\n  463,\n  464,\n  465,\n  466,\n  467,\n  468,\n  469,\n  470,\n  471,\n  472,\n  473,\n  474,\n  475,\n  476,\n  477,\n  478,\n  479,\n  480,\n  481,\n  482,\n  483,\n  484,\n  485,\n  486,\n  487,\n  488,\n  489,\n  490,\n  491,\n  492,\n  493,\n  494,\n  495,\n  496,\n  497,\n  498,\n  499,\n  500,\n  501,\n  502,\n  503,\n  504,\n  505,\n  506,\n  507,\n  508,\n  509,\n  510,\n  511,\n  512,\n  513,\n  514,\n  515,\n  516,\n  517,\n  518,\n  519,\n  520,\n  521,\n  522,\n  523,\n  524,\n  525,\n  526,\n  527,\n  528,\n  529,\n  530,\n  531,\n  532,\n  533,\n  534,\n  535,\n  536,\n  537,\n  538,\n  539,\n  540,\n  541,\n  542,\n  543,\n  544,\n  545,\n  546,\n  547,\n  548,\n  549,\n  550,\n  551,\n  552,\n  553,\n  554,\n  555,\n  556,\n  557,\n  558,\n  559,\n  560,\n  561,\n  562,\n  563,\n  564,\n  565,\n  566,\n  567,\n  568,\n  569,\n  570,\n  571,\n  572,\n  573,\n  574,\n  575,\n  576,\n  577,\n  578,\n  579,\n  580,\n  581,\n  582,\n  583,\n  584,\n  585,\n  586,\n  587,\n  588,\n  589,\n  590,\n  591,\n  592,\n  593,\n  594,\n  595,\n  596,\n  597,\n  598,\n  599,\n  600,\n  601,\n  602,\n  603,\n  604,\n  605,\n  606,\n  607,\n  608,\n  609,\n  610,\n  611,\n  612,\n  613,\n  614,\n  615,\n  616,\n  617,\n  618,\n  619,\n  620,\n  621,\n  622,\n  623,\n  624,\n  625,\n  626,\n  627,\n  628,\n  629,\n  630,\n  631,\n  632,\n  633,\n  634,\n  635,\n  636,\n  637,\n  638,\n  639,\n  640,\n  641,\n  642,\n  643,\n  644,\n  645,\n  646,\n  647,\n  648,\n  649,\n  650,\n  651,\n  652,\n  653,\n  654,\n  655,\n  656,\n  657,\n  658,\n  659,\n  660,\n  661,\n  662,\n  663,\n  664,\n  665,\n  666,\n  667,\n  668,\n  669,\n  670,\n  671,\n  672,\n  673,\n  674,\n  675,\n  676,\n  677,\n  678,\n  679,\n  680,\n  681,\n  682,\n  683,\n  684,\n  685,\n  686,\n  687,\n  688,\n  689,\n  690,\n  691,\n  692,\n  693,\n  694,\n  695,\n  696,\n  697,\n  698,\n  699,\n  700,\n  701,\n  702,\n  703,\n  704,\n  705,\n  706,\n  707,\n  708,\n  709,\n  710,\n  711,\n  712,\n  713,\n  714,\n  715,\n  716,\n  717,\n  718,\n  719,\n  720,\n  721,\n  722,\n  723,\n  724,\n  725,\n  726,\n  727,\n  728,\n  729,\n  730,\n  731,\n  732,\n  733,\n  734,\n  735,\n  736,\n  737,\n  738,\n  739,\n  740,\n  741,\n  742,\n  743,\n  744,\n  745,\n  746,\n  747,\n  748,\n  749,\n  750,\n  751,\n  752,\n  753,\n  754,\n  755,\n  756,\n  757,\n  758,\n  759,\n  760,\n  761,\n  762,\n  763,\n  764,\n  765,\n  766,\n  767,\n  768,\n  769,\n  770,\n  771,\n  772,\n  773,\n  774,\n  775,\n  776,\n  777,\n  778,\n  779,\n  780,\n  781,\n  782,\n  783,\n  784,\n  785,\n  786,\n  787,\n  788,\n  789,\n  790,\n  791,\n  792,\n  793,\n  794,\n  795,\n  796,\n  797,\n  798,\n  799,\n  800,\n  801,\n  802,\n  803,\n  804,\n  805,\n  806,\n  807,\n  808,\n  809,\n  810,\n  811,\n  812,\n  813,\n  814,\n  815,\n  816,\n  817,\n  818,\n  819,\n  820,\n  821,\n  822,\n  823,\n  824,\n  825,\n  826,\n  827,\n  828,\n  829,\n  830,\n  831,\n  832,\n  833,\n  834,\n  835,\n  836,\n  837,\n  838,\n  839,\n  840,\n  841,\n  842,\n  843,\n  844,\n  845,\n  846,\n  847,\n  848,\n  849,\n  850,\n  851,\n  852,\n  853,\n  854,\n  855,\n  856,\n  857,\n  858,\n  859,\n  860,\n  861,\n  862,\n  863,\n  864,\n  865,\n  866,\n  867,\n  868,\n  869,\n  870,\n  871,\n  872,\n  873,\n  874,\n  875,\n  876,\n  877,\n  878,\n  879,\n  880,\n  881,\n  882,\n  883,\n  884,\n  885,\n  886,\n  887,\n  888,\n  889,\n  890,\n  891,\n  892,\n  893,\n  894,\n  895,\n  896,\n  897,\n  898,\n  899,\n  900,\n  901,\n  902,\n  903,\n  904,\n  905,\n  906,\n  907,\n  908,\n  909,\n  910,\n  911,\n  912,\n  913,\n  914,\n  915,\n  916,\n  917,\n  918,\n  919,\n  920,\n  921,\n  922,\n  923,\n  924,\n  925,\n  926,\n  927,\n  928,\n  929,\n  930,\n  931,\n  932,\n  933,\n  934,\n  935,\n  936,\n  937,\n  938,\n  939,\n  940,\n  941,\n  942,\n  943,\n  944,\n  945,\n  946,\n  947,\n  948,\n  949,\n  950,\n  951,\n  952,\n  953,\n  954,\n  955,\n  956,\n  957,\n  958,\n  959,\n  960,\n  961,\n  962,\n  963,\n  964,\n  965,\n  966,\n  967,\n  968,\n  969,\n  970,\n  971,\n  972,\n  973,\n  974,\n  975,\n  976,\n  977,\n  978,\n  979,\n  980,\n  981,\n  982,\n  983,\n  984,\n  985,\n  986,\n  987,\n  988,\n  989,\n  990,\n  991,\n  992,\n  993,\n  994,\n  995,\n  996,\n  997,\n  998,\n  999,\n  1000,\n  1001,\n  1002,\n  1003,\n  1004,\n  1005,\n  1006,\n  1007,\n  1008,\n  1009,\n  1010,\n  1011,\n  1012,\n  1013,\n  1014,\n  1015,\n  1016,\n  1017,\n  1018,\n  1019,\n  1020,\n  1021,\n  1022,\n  1023,\n  1024,\n  1025,\n  1026,\n  1027,\n  1028,\n  1029,\n  1030,\n  1031,\n  1032,\n  1033,\n  1034,\n  1035,\n  1036,\n  1037,\n  1038,\n  1039,\n  1040,\n  1041,\n  1042,\n  1043,\n  1044,\n  1045,\n  1046,\n  1047,\n  1048,\n  1049,\n  1050,\n  1051,\n  1052,\n  1053,\n  1054,\n  1055,\n  1056,\n  1057,\n  1058,\n  1059,\n  1060,\n  1061,\n  1062,\n  1063,\n  1064,\n  1065,\n  1066,\n  1067,\n  1068,\n  1069,\n  1070,\n  1071,\n  1072,\n  1073,\n  1074,\n  1075,\n  1076,\n  1077,\n  1078,\n  1079,\n  1080,\n  1081,\n  1082,\n  1083,\n  1084,\n  1085,\n  1086,\n  1087,\n  1088,\n  1089,\n  1090,\n  1091,\n  1092,\n  1093,\n  1094,\n  1095,\n  1096,\n  1097,\n  1098,\n  1099,\n  1100,\n  1101,\n  1102,\n  1103,\n  1104,\n  1105,\n  1106,\n  1107,\n  1108,\n  1109,\n  1110,\n  1111,\n  1112,\n  1113,\n  1114,\n  1115,\n  1116,\n  1117,\n  1118,\n  1119,\n  1120,\n  1121,\n  1122,\n  1123,\n  1124,\n  1125,\n  1126,\n  1127,\n  1128,\n  1129,\n  1130,\n  1131,\n  1132,\n  1133,\n  1134,\n  1135,\n  1136,\n  1137,\n  1138,\n  1139,\n  1140,\n  1141,\n  1142,\n  1143,\n  1144,\n  1145,\n  1146,\n  1147,\n  1148,\n  1149,\n  1150,\n  1151,\n  1152,\n  1153,\n  1154,\n  1155,\n  1156,\n  1157,\n  1158,\n  1159,\n  1160,\n  1161,\n  1162,\n  1163,\n  1164,\n  1165,\n  1166,\n  1167,\n  1168,\n  1169,\n  1170,\n  1171,\n  1172,\n  1173,\n  1174,\n  1175,\n  1176,\n  1177,\n  1178,\n  1179,\n  1180,\n  1181,\n  1182,\n  1183,\n  1184,\n  1185,\n  1186,\n  1187,\n  1188,\n  1189,\n  1190,\n  1191,\n  1192,\n  1193,\n  1194,\n  1195,\n  1196,\n  1197,\n  1198,\n  1199,\n  1200,\n  1201,\n  1202,\n  1203,\n  1204,\n  1205,\n  1206,\n  1207,\n  1208,\n  1209,\n  1210,\n  1211,\n  1212,\n  1213,\n  1214,\n  1215,\n  1216,\n  1217,\n  1218,\n  1219,\n  1220,\n  1221,\n  1222,\n  1223,\n  1224,\n  1225,\n  1226,\n  1227,\n  1228,\n  1229,\n  1230,\n  1231,\n  1232,\n  1233,\n  1234,\n  1235,\n  1236,\n  1237,\n  1238,\n  1239,\n  1240,\n  1241,\n  1242,\n  1243,\n  1244,\n  1245,\n  1246,\n  1247,\n  1248,\n  1249,\n  1250,\n  1251,\n  1252,\n  1253,\n  1254,\n  1255,\n  1256,\n  1257,\n  1258,\n  1259,\n  1260,\n  1261,\n  1262,\n  1263,\n  1264,\n  1265,\n  1266,\n  1267,\n  1268,\n  1269,\n  1270,\n  1271,\n  1272,\n  1273,\n  1274,\n  1275,\n  1276,\n  1277,\n  1278,\n  1279,\n  1280,\n  1281,\n  1282,\n  1283,\n  1284,\n  1285,\n  1286,\n  1287,\n  1288,\n  1289,\n  1290,\n  1291,\n  1292,\n  1293,\n  1294,\n  1295,\n  1296,\n  1297,\n  1298,\n  1299,\n  1300,\n  1301,\n  1302,\n  1303,\n  1304,\n  1305,\n  1306,\n  1307,\n  1308,\n  1309,\n  1310,\n  1311,\n  1312,\n  1313,\n  1314,\n  1315,\n  1316,\n  1317,\n  1318,\n  1319,\n  1320,\n  1321,\n  1322,\n  1323,\n  1324,\n  1325,\n  1326,\n  1327,\n  1328,\n  1329,\n  1330,\n  1331,\n  1332,\n  1333,\n  1334,\n  1335,\n  1336,\n  1337,\n  1338,\n  1339,\n  1340,\n  1341,\n  1342,\n  1343,\n  1344,\n  1345,\n  1346,\n  1347,\n  1348,\n  1349,\n  1350,\n  1351,\n  1352,\n  1353,\n  1354,\n  1355,\n  1356,\n  1357,\n  1358,\n  1359,\n  1360,\n  1361,\n  1362,\n  1363,\n  1364,\n  1365,\n  1366,\n  1367,\n  1368,\n  1369,\n  1370,\n  1371,\n  1372,\n  1373,\n  1374,\n  1375,\n  1376,\n  1377,\n  1378,\n  1379,\n  1380,\n  1381,\n  1382,\n  1383,\n  1384,\n  1385,\n  1386,\n  1387,\n  1388,\n  1389,\n  1390,\n  1391,\n  1392,\n  1393,\n  1394,\n  1395,\n  1396,\n  1397,\n  1398,\n  1399,\n  1400,\n  1401,\n  1402,\n  1403,\n  1404,\n  1405,\n  1406,\n  1407,\n  1408,\n  1409,\n  1410,\n  1411,\n  1412,\n  1413,\n  1414,\n  1415,\n  1416,\n  1417,\n  1418,\n  1419,\n  1420,\n  1421,\n  1422,\n  1423,\n  1424,\n  1425,\n  1426,\n  1427,\n  1428,\n  1429,\n  1430,\n  1431,\n  1432,\n  1433,\n  1434,\n  1435,\n  1436,\n  1437,\n  1438,\n  1439,\n  1440,\n  1441,\n  1442,\n  1443,\n  1444,\n  1445,\n  1446,\n  1447,\n  1448,\n  1449,\n  1450,\n  1451,\n  1452,\n  1453,\n  1454,\n  1455,\n  1456,\n  1457,\n  1458,\n  1459,\n  1460,\n  1461,\n  1462,\n  1463,\n  1464,\n  1465,\n  1466,\n  1467,\n  1468,\n  1469,\n  1470,\n  1471,\n  1472,\n  1473,\n  1474,\n  1475,\n  1476,\n  1477,\n  1478,\n  1479,\n  1480,\n  1481,\n  1482,\n  1483,\n  1484,\n  1485,\n  1486,\n  1487,\n  1488,\n  1489,\n  1490,\n  1491,\n  1492,\n  1493,\n  1494,\n  1495,\n  1496,\n  1497,\n  1498,\n  1499,\n  1500,\n  1501,\n  1502,\n  1503,\n  1504,\n  1505,\n  1506,\n  1507,\n  1508,\n  1509,\n  1510,\n  1511,\n  1512,\n  1513,\n  1514,\n  1515,\n  1516,\n  1517,\n  1518,\n  1519,\n  1520,\n  1521,\n  1522,\n  1523,\n  1524,\n  1525,\n  1526,\n  1527,\n  1528,\n  1529,\n  1530,\n  1531,\n  1532,\n  1533,\n  1534,\n  1535,\n  1536,\n  1537,\n  1538,\n  1539,\n  1540,\n  1541,\n  1542,\n  1543,\n  1544,\n  1545,\n  1546,\n  1547,\n  1548,\n  1549,\n  1550,\n  1551,\n  1552,\n  1553,\n  1554,\n  1555,\n  1556,\n  1557,\n  1558,\n  1559,\n  1560,\n  1561,\n  1562,\n  1563,\n  1564,\n  1565,\n  1566,\n  1567,\n  1568,\n  1569,\n  1570,\n  1571,\n  1572,\n  1573,\n  1574,\n  1575,\n  1576,\n  1577,\n  1578,\n  1579,\n  1580,\n  1581,\n  1582,\n  1583,\n  1584,\n  1585,\n  1586,\n  1587,\n  1588,\n  1589,\n  1590,\n  1591,\n  1592,\n  1593,\n  1594,\n  1595,\n  1596,\n  1597,\n  1598,\n  1599,\n  1600,\n  1601,\n  1602,\n  1603,\n  1604,\n  1605,\n  1606,\n  1607,\n  1608,\n  1609,\n  1610,\n  1611,\n  1612,\n  1613,\n  1614,\n  1615,\n  1616,\n  1617,\n  1618,\n  1619,\n  1620,\n  1621,\n  1622,\n  1623,\n  1624,\n  1625,\n  1626,\n  1627,\n  1628,\n  1629,\n  1630,\n  1631,\n  1632,\n  1633,\n  1634,\n  1635,\n  1636,\n  1637,\n  1638,\n  1639,\n  1640,\n  1641,\n  1642,\n  1643,\n  1644,\n  1645,\n  1646,\n  1647,\n  1648,\n  1649,\n  1650,\n  1651,\n  1652,\n  1653,\n  1654,\n  1655,\n  1656,\n  1657,\n  1658,\n  1659,\n  1660,\n  1661,\n  1662,\n  1663,\n  1664,\n  1665,\n  1666,\n  1667,\n  1668,\n  1669,\n  1670,\n  1671,\n  1672,\n  1673,\n  1674,\n  1675,\n  1676,\n  1677,\n  1678,\n  1679,\n  1680,\n  1681,\n  1682,\n  1683,\n  1684,\n  1685,\n  1686,\n  1687,\n  1688,\n  1689,\n  1690,\n  1691,\n  1692,\n  1693,\n  1694,\n  1695,\n  1696,\n  1697,\n  1698,\n  1699,\n  1700,\n  1701,\n  1702,\n  1703,\n  1704,\n  1705,\n  1706,\n  1707,\n  1708,\n  1709,\n  1710,\n  1711,\n  1712,\n  1713,\n  1714,\n  1715,\n  1716,\n  1717,\n  1718,\n  1719,\n  1720,\n  1721,\n  1722,\n  1723,\n  1724,\n  1725,\n  1726,\n  1727,\n  1728,\n  1729,\n  1730,\n  1731,\n  1732,\n  1733,\n  1734,\n  1735,\n  1736,\n  1737,\n  1738,\n  1739,\n  1740,\n  1741,\n  1742,\n  1743,\n  1744,\n  1745,\n  1746,\n  1747,\n  1748,\n  1749,\n  1750,\n  1751,\n  1752,\n  1753,\n  1754,\n  1755,\n  1756,\n  1757,\n  1758,\n  1759,\n  1760,\n  1761,\n  1762,\n  1763,\n  1764,\n  1765,\n  1766,\n  1767,\n  1768,\n  1769,\n  1770,\n  1771,\n  1772,\n  1773,\n  1774,\n  1775,\n  1776,\n  1777,\n  1778,\n  1779,\n  1780,\n  1781,\n  1782,\n  1783,\n  1784,\n  1785,\n  1786,\n  1787,\n  1788,\n  1789,\n  1790,\n  1791,\n  1792,\n  1793,\n  1794,\n  1795,\n  1796,\n  1797,\n  1798,\n  1799,\n  1800,\n  1801,\n  1802,\n  1803,\n  1804,\n  1805,\n  1806,\n  1807,\n  1808,\n  1809,\n  1810,\n  1811,\n  1812,\n  1813,\n  1814,\n  1815,\n  1816,\n  1817,\n  1818,\n  1819,\n  1820,\n  1821,\n  1822,\n  1823,\n  1824,\n  1825,\n  1826,\n  1827,\n  1828,\n  1829,\n  1830,\n  1831,\n  1832,\n  1833,\n  1834,\n  1835,\n  1836,\n  1837,\n  1838,\n  1839,\n  1840,\n  1841,\n  1842,\n  1843,\n  1844,\n  1845,\n  1846,\n  1847,\n  1848,\n  1849,\n  1850,\n  1851,\n  1852,\n  1853,\n  1854,\n  1855,\n  1856,\n  1857,\n  1858,\n  1859,\n  1860,\n  1861,\n  1862,\n  1863,\n  1864,\n  1865,\n  1866,\n  1867,\n  1868,\n  1869,\n  1870,\n  1871,\n  1872,\n  1873,\n  1874,\n  1875,\n  1876,\n  1877,\n  1878,\n  1879,\n  1880,\n  1881,\n  1882,\n  1883,\n  1884,\n  1885,\n  1886,\n  1887,\n  1888,\n  1889,\n  1890,\n  1891,\n  1892,\n  1893,\n  1894,\n  1895,\n  1896,\n  1897,\n  1898,\n  1899,\n  1900,\n  1901,\n  1902,\n  1903,\n  1904,\n  1905,\n  1906,\n  1907,\n  1908,\n  1909,\n  1910,\n  1911,\n  1912,\n  1913,\n  1914,\n  1915,\n  1916,\n  1917,\n  1918,\n  1919,\n  1920,\n  1921,\n  1922,\n  1923,\n  1924,\n  1925,\n  1926,\n  1927,\n  1928,\n  1929,\n  1930,\n  1931,\n  1932,\n  1933,\n  1934,\n  1935,\n  1936,\n  1937,\n  1938,\n  1939,\n  1940,\n  1941,\n  1942,\n  1943,\n  1944,\n  1945,\n  1946,\n  1947,\n  1948,\n  1949,\n  1950,\n  1951,\n  1952,\n  1953,\n  1954,\n  1955,\n  1956,\n  1957,\n  1958,\n  1959,\n  1960,\n  1961,\n  1962,\n  1963,\n  1964,\n  1965,\n  1966,\n  1967,\n  1968,\n  1969,\n  1970,\n  1971,\n  1972,\n  1973,\n  1974,\n  1975,\n  1976,\n  1977,\n  1978,\n  1979,\n  1980,\n  1981,\n  1982,\n  1983,\n  1984,\n  1985,\n  1986,\n  1987,\n  1988,\n  1989,\n  1990,\n  1991,\n  1992,\n  1993,\n  1994,\n  1995,\n  1996,\n  1997,\n  1998,\n  1999,\n  2000,\n  2001,\n  2002,\n  2003,\n  2004,\n  2005,\n  2006,\n  2007,\n  2008,\n  2009,\n  2010,\n  2011,\n  2012,\n  2013,\n  2014,\n  2015,\n  2016,\n  2017,\n  2018,\n  2019,\n  2020,\n  2021,\n  2022,\n  2023,\n  2024,\n  2025,\n  2026,\n  2027,\n  2028,\n  2029,\n  2030,\n  2031,\n  2032,\n  2033,\n  2034,\n  2035,\n  2036,\n  2037,\n  2038,\n  2039,\n  2040,\n  2041,\n  2042,\n  2043,\n  2044,\n  2045,\n  2046,\n  2047,\n  2048,\n  2049,\n  2050,\n  2051,\n  2052,\n  2053,\n  2054,\n  2055,\n  2056,\n  2057,\n  2058,\n  2059,\n  2060,\n  2061,\n  2062,\n  2063,\n  2064,\n  2065,\n  2066,\n  2067,\n  2068,\n  2069,\n  2070,\n  2071,\n  2072,\n  2073,\n  2074,\n  2075,\n  2076,\n  2077,\n  2078,\n  2079,\n  2080,\n  2081,\n  2082,\n  2083,\n  2084,\n  2085,\n  2086,\n  2087,\n  2088,\n  2089,\n  2090,\n  2091,\n  2092,\n  2093,\n  2094,\n  2095,\n  2096,\n  2097,\n  2098,\n  2099,\n  2100,\n  2101,\n  2102,\n  2103,\n  2104,\n  2105,\n  2106,\n  2107,\n  2108,\n  2109,\n  2110,\n  2111,\n  2112,\n  2113,\n  2114,\n  2115,\n  2116,\n  2117,\n  2118,\n  2119,\n  2120,\n  2121,\n  2122,\n  2123,\n  2124,\n  2125,\n  2126,\n  2127,\n  2128,\n  2129,\n  2130,\n  2131,\n  2132,\n  2133,\n  2134,\n  2135,\n  2136,\n  2137,\n  2138,\n  2139,\n  2140,\n  2141,\n  2142,\n  2143,\n  2144,\n  2145,\n  2146,\n  2147,\n  2148,\n  2149,\n  2150,\n  2151,\n  2152,\n  2153,\n  2154,\n  2155,\n  2156,\n  2157,\n  2158,\n  2159,\n  2160,\n  2161,\n  2162,\n  2163,\n  2164,\n  2165,\n  2166,\n  2167,\n  2168,\n  2169,\n  2170,\n  2171,\n  2172,\n  2173,\n  2174,\n  2175,\n  2176,\n  2177,\n  2178,\n  2179,\n  2180,\n  2181,\n  2182,\n  2183,\n  2184,\n  2185,\n  2186,\n  2187,\n  2188,\n  2189,\n  2190,\n  2191,\n  2192,\n  2193,\n  2194,\n  2195,\n  2196,\n  2197,\n  2198,\n  2199,\n  2200,\n  2201,\n  2202,\n  2203,\n  2204,\n  2205,\n  2206,\n  2207,\n  2208,\n  2209,\n  2210,\n  2211,\n  2212,\n  2213,\n  2214,\n  2215,\n  2216,\n  2217,\n  2218,\n  2219,\n  2220,\n  2221,\n  2222,\n  2223,\n  2224,\n  2225,\n  2226,\n  2227,\n  2228,\n  2229,\n  2230,\n  2231,\n  2232,\n  2233,\n  2234,\n  2235,\n  2236,\n  2237,\n  2238,\n  2239,\n  2240,\n  2241,\n  2242,\n  2243,\n  2244,\n  2245,\n  2246,\n  2247,\n  2248,\n  2249,\n  2250,\n  2251,\n  2252,\n  2253,\n  2254,\n  2255,\n  2256,\n  2257,\n  2258,\n  2259,\n  2260,\n  2261,\n  2262,\n  2263,\n  2264,\n  2265,\n  2266,\n  2267,\n  2268,\n  2269,\n  2270,\n  2271,\n  2272,\n  2273,\n  2274,\n  2275,\n  2276,\n  2277,\n  2278,\n  2279,\n  2280,\n  2281,\n  2282,\n  2283,\n  2284,\n  2285,\n  2286,\n  2287,\n  2288,\n  2289,\n  2290,\n  2291,\n  2292,\n  2293,\n  2294,\n  2295,\n  2296,\n  2297,\n  2298,\n  2299,\n  2300,\n  2301,\n  2302,\n  2303,\n  2304,\n  2305,\n  2306,\n  2307,\n  2308,\n  2309,\n  2310,\n  2311,\n  2312,\n  2313,\n  2314,\n  2315,\n  2316,\n  2317,\n  2318,\n  2319,\n  2320,\n  2321,\n  2322,\n  2323,\n  2324,\n  2325,\n  2326,\n  2327,\n  2328,\n  2329,\n  2330,\n  2331,\n  2332,\n  2333,\n  2334,\n  2335,\n  2336,\n  2337,\n  2338,\n  2339,\n  2340,\n  2341,\n  2342,\n  2343,\n  2344,\n  2345,\n  2346,\n  2347,\n  2348,\n  2349,\n  2350,\n  2351,\n  2352,\n  2353,\n  2354,\n  2355,\n  2356,\n  2357,\n  2358,\n  2359,\n  2360,\n  2361,\n  2362,\n  2363,\n  2364,\n  2365,\n  2366,\n  2367,\n  2368,\n  2369,\n  2370,\n  2371,\n  2372,\n  2373,\n  2374,\n  2375,\n  2376,\n  2377,\n  2378,\n  2379,\n  2380,\n  2381,\n  2382,\n  2383,\n  2384,\n  2385,\n  2386,\n  2387,\n  2388,\n  2389,\n  2390,\n  2391,\n  2392,\n  2393,\n  2394,\n  2395,\n  2396,\n  2397,\n  2398,\n  2399,\n  2400,\n  2401,\n  2402,\n  2403,\n  2404,\n  2405,\n  2406,\n  2407,\n  2408,\n  2409,\n  2410,\n  2411,\n  2412,\n  2413,\n  2414,\n  2415,\n  2416,\n  2417,\n  2418,\n  2419,\n  2420,\n  2421,\n  2422,\n  2423,\n  2424,\n  2425,\n  2426,\n  2427,\n  2428,\n  2429,\n  2430,\n  2431,\n  2432,\n  2433,\n  2434,\n  2435,\n  2436,\n  2437,\n  2438,\n  2439,\n  2440,\n  2441,\n  2442,\n  2443,\n  2444,\n  2445,\n  2446,\n  2447,\n  2448,\n  2449,\n  2450,\n  2451,\n  2452,\n  2453,\n  2454,\n  2455,\n  2456,\n  2457,\n  2458,\n  2459,\n  2460,\n  2461,\n  2462,\n  2463,\n  2464,\n  2465,\n  2466,\n  2467,\n  2468,\n  2469,\n  2470,\n  2471,\n  2472,\n  2473,\n  2474,\n  2475,\n  2476,\n  2477,\n  2478,\n  2479,\n  2480,\n  2481,\n  2482,\n  2483,\n  2484,\n  2485,\n  2486,\n  2487,\n  2488,\n  2489,\n  2490,\n  2491,\n  2492,\n  2493,\n  2494,\n  2495,\n  2496,\n  2497,\n  2498,\n  2499,\n  2500,\n  2501,\n  2502,\n  2503,\n  2504,\n  2505,\n  2506,\n  2507,\n  2508,\n  2509,\n  2510,\n  2511,\n  2512,\n  2513,\n  2514,\n  2515,\n  2516,\n  2517,\n  2518,\n  2519,\n  2520,\n  2521,\n  2522,\n  2523,\n  2524,\n  2525,\n  2526,\n  2527,\n  2528,\n  2529,\n  2530,\n  2531,\n  2532,\n  2533,\n  2534,\n  2535,\n  2536,\n  2537,\n  2538,\n  2539,\n  2540,\n  2541,\n  2542,\n  2543,\n  2544,\n  2545,\n  2546,\n  2547,\n  2548,\n  2549,\n  2550,\n  2551,\n  2552,\n  2553,\n  2554,\n  2555,\n  2556,\n  2557,\n  2558,\n  2559,\n  2560,\n  2561,\n  2562,\n  2563,\n  2564,\n  2565,\n  2566,\n  2567,\n  2568,\n  2569,\n  2570,\n  2571,\n  2572,\n  2573,\n  2574,\n  2575,\n  2576,\n  2577,\n  2578,\n  2579,\n  2580,\n  2581,\n  2582,\n  2583,\n  2584,\n  2585,\n  2586,\n  2587,\n  2588,\n  2589,\n  2590,\n  2591,\n  2592,\n  2593,\n  2594,\n  2595,\n  2596,\n  2597,\n  2598,\n  2599,\n  2600,\n  2601,\n  2602,\n  2603,\n  2604,\n  2605,\n  2606,\n  2607,\n  2608,\n  2609,\n  2610,\n  2611,\n  2612,\n  2613,\n  2614,\n  2615,\n  2616,\n  2617,\n  2618,\n  2619,\n  2620,\n  2621,\n  2622,\n  2623,\n  2624,\n  2625,\n  2626,\n  2627,\n  2628,\n  2629,\n  2630,\n  2631,\n  2632,\n  2633,\n  2634,\n  2635,\n  2636,\n  2637,\n  2638,\n  2639,\n  2640,\n  2641,\n  2642,\n  2643,\n  2644,\n  2645,\n  2646,\n  2647,\n  2648,\n  2649,\n  2650,\n  2651,\n  2652,\n  2653,\n  2654,\n  2655,\n  2656,\n  2657,\n  2658,\n  2659,\n  2660,\n  2661,\n  2662,\n  2663,\n  2664,\n  2665,\n  2666,\n  2667,\n  2668,\n  2669,\n  2670,\n  2671,\n  2672,\n  2673,\n  2674,\n  2675,\n  2676,\n  2677,\n  2678,\n  2679,\n  2680,\n  2681,\n  2682,\n  2683,\n  2684,\n  2685,\n  2686,\n  2687,\n  2688,\n  2689,\n  2690,\n  2691,\n  2692,\n  2693,\n  2694,\n  2695,\n  2696,\n  2697,\n  2698,\n  2699,\n  2700,\n  2701,\n  2702,\n  2703,\n  2704,\n  2705,\n  2706,\n  2707,\n  2708,\n  2709,\n  2710,\n  2711,\n  2712,\n  2713,\n  2714,\n  2715,\n  2716,\n  2717,\n  2718,\n  2719,\n  2720,\n  2721,\n  2722,\n  2723,\n  2724,\n  2725,\n  2726,\n  2727,\n  2728,\n  2729,\n  2730,\n  2731,\n  2732,\n  2733,\n  2734,\n  2735,\n  2736,\n  2737,\n  2738,\n  2739,\n  2740,\n  2741,\n  2742,\n  2743,\n  2744,\n  2745,\n  2746,\n  2747,\n  2748,\n  2749,\n  2750,\n  2751,\n  2752,\n  2753,\n  2754,\n  2755,\n  2756,\n  2757,\n  2758,\n  2759,\n  2760,\n  2761,\n  2762,\n  2763,\n  2764,\n  2765,\n  2766,\n  2767,\n  2768,\n  2769,\n  2770,\n  2771,\n  2772,\n  2773,\n  2774,\n  2775,\n  2776,\n  2777,\n  2778,\n  2779,\n  2780,\n  2781,\n  2782,\n  2783,\n  2784,\n  2785,\n  2786,\n  2787,\n  2788,\n  2789,\n  2790,\n  2791,\n  2792,\n  2793,\n  2794,\n  2795,\n  2796,\n  2797,\n  2798,\n  2799,\n  2800,\n  2801,\n  2802,\n  2803,\n  2804,\n  2805,\n  2806,\n  2807,\n  2808,\n  2809,\n  2810,\n  2811,\n  2812,\n  2813,\n  2814,\n  2815,\n  2816,\n  2817,\n  2818,\n  2819,\n  2820,\n  2821,\n  2822,\n  2823,\n  2824,\n  2825,\n  2826,\n  2827,\n  2828,\n  2829,\n  2830,\n  2831,\n  2832,\n  2833,\n  2834,\n  2835,\n  2836,\n  2837,\n  2838,\n  2839,\n  2840,\n  2841,\n  2842,\n  2843,\n  2844,\n  2845,\n  2846,\n  2847,\n  2848,\n  2849,\n  2850,\n  2851,\n  2852,\n  2853,\n  2854,\n  2855,\n  2856,\n  2857,\n  2858,\n  2859,\n  2860,\n  2861,\n  2862,\n  2863,\n  2864,\n  2865,\n  2866,\n  2867,\n  2868,\n  2869,\n  2870,\n  2871,\n  2872,\n  2873,\n  2874,\n  2875,\n  2876,\n  2877,\n  2878,\n  2879,\n  2880,\n  2881,\n  2882,\n  2883,\n  2884,\n  2885,\n  2886,\n  2887,\n  2888,\n  2889,\n  2890,\n  2891,\n  2892,\n  2893,\n  2894,\n  2895,\n  2896,\n  2897,\n  2898,\n  2899,\n  2900,\n  2901,\n  2902,\n  2903,\n  2904,\n  2905,\n  2906,\n  2907,\n  2908,\n  2909,\n  2910,\n  2911,\n  2912,\n  2913,\n  2914,\n  2915,\n  2916,\n  2917,\n  2918,\n  2919,\n  2920,\n  2921,\n  2922,\n  2923,\n  2924,\n  2925,\n  2926,\n  2927,\n  2928,\n  2929,\n  2930,\n  2931,\n  2932,\n  2933,\n  2934,\n  2935,\n  2936,\n  2937,\n  2938,\n  2939,\n  2940,\n  2941,\n  2942,\n  2943,\n  2944,\n  2945,\n  2946,\n  2947,\n  2948,\n  2949,\n  2950,\n  2951,\n  2952,\n  2953,\n  2954,\n  2955,\n  2956,\n  2957,\n  2958,\n  2959,\n  2960,\n  2961,\n  2962,\n  2963,\n  2964,\n  2965,\n  2966,\n  2967,\n  2968,\n  2969,\n  2970,\n  2971,\n  2972,\n  2973,\n  2974,\n  2975,\n  2976,\n  2977,\n  2978,\n  2979,\n  2980,\n  2981,\n  2982,\n  2983,\n  2984,\n  2985,\n  2986,\n  2987,\n  2988,\n  2989,\n  2990,\n  2991,\n  2992,\n  2993,\n  2994,\n  2995,\n  2996,\n  2997,\n  2998,\n  2999,\n  3000,\n  3001,\n  3002,\n  3003,\n  3004,\n  3005,\n  3006,\n  3007,\n  3008,\n  3009,\n  3010,\n  3011,\n  3012,\n  3013,\n  3014,\n  3015,\n  3016,\n  3017,\n  3018,\n  3019,\n  3020,\n  3021,\n  3022,\n  3023,\n  3024,\n  3025,\n  3026,\n  3027,\n  3028,\n  3029,\n  3030,\n  3031,\n  3032,\n  3033,\n  3034,\n  3035,\n  3036,\n  3037,\n  3038,\n  3039,\n  3040,\n  3041,\n  3042,\n  3043,\n  3044,\n  3045,\n  3046,\n  3047,\n  3048,\n  3049,\n  3050,\n  3051,\n  3052,\n  3053,\n  3054,\n  3055,\n  3056,\n  3057,\n  3058,\n  3059,\n  3060,\n  3061,\n  3062,\n  3063,\n  3064,\n  3065,\n  3066,\n  3067,\n  3068,\n  3069,\n  3070,\n  3071,\n  3072,\n  3073,\n  3074,\n  3075,\n  3076,\n  3077,\n  3078,\n  3079,\n  3080,\n  3081,\n  3082,\n  3083,\n  3084,\n  3085,\n  3086,\n  3087,\n  3088,\n  3089,\n  3090,\n  3091,\n  3092,\n  3093,\n  3094,\n  3095,\n  3096,\n  3097,\n  3098,\n  3099,\n  3100,\n  3101,\n  3102,\n  3103,\n  3104,\n  3105,\n  3106,\n  3107,\n  3108,\n  3109,\n  3110,\n  3111,\n  3112,\n  3113,\n  3114,\n  3115,\n  3116,\n  3117,\n  3118,\n  3119,\n  3120,\n  3121,\n  3122,\n  3123,\n  3124,\n  3125,\n  3126,\n  3127,\n  3128,\n  3129,\n  3130,\n  3131,\n  3132,\n  3133,\n  3134,\n  3135,\n  3136,\n  3137,\n  3138,\n  3139,\n  3140,\n  3141,\n  3142,\n  3143,\n  3144,\n  3145,\n  3146,\n  3147,\n  3148,\n  3149,\n  3150,\n  3151,\n  3152,\n  3153,\n  3154,\n  3155,\n  3156,\n  3157,\n  3158,\n  3159,\n  3160,\n  3161,\n  3162,\n  3163,\n  3164,\n  3165,\n  3166,\n  3167,\n  3168,\n  3169,\n  3170,\n  3171,\n  3172,\n  3173,\n  3174,\n  3175,\n  3176,\n  3177,\n  3178,\n  3179,\n  3180,\n  3181,\n  3182,\n  3183,\n  3184,\n  3185,\n  3186,\n  3187,\n  3188,\n  3189,\n  3190,\n  3191,\n  3192,\n  3193,\n  3194,\n  3195,\n  3196,\n  3197,\n  3198,\n  3199,\n  3200,\n  3201,\n  3202,\n  3203,\n  3204,\n  3205,\n  3206,\n  3207,\n  3208,\n  3209,\n  3210,\n  3211,\n  3212,\n  3213,\n  3214,\n  3215,\n  3216,\n  3217,\n  3218,\n  3219,\n  3220,\n  3221,\n  3222,\n  3223,\n  3224,\n  3225,\n  3226,\n  3227,\n  3228,\n  3229,\n  3230,\n  3231,\n  3232,\n  3233,\n  3234,\n  3235,\n  3236,\n  3237,\n  3238,\n  3239,\n  3240,\n  3241,\n  3242,\n  3243,\n  3244,\n  3245,\n  3246,\n  3247,\n  3248,\n  3249,\n  3250,\n  3251,\n  3252,\n  3253,\n  3254,\n  3255,\n  3256,\n  3257,\n  3258,\n  3259,\n  3260,\n  3261,\n  3262,\n  3263,\n  3264,\n  3265,\n  3266,\n  3267,\n  3268,\n  3269,\n  3270,\n  3271,\n  3272,\n  3273,\n  3274,\n  3275,\n  3276,\n  3277,\n  3278,\n  3279,\n  3280,\n  3281,\n  3282,\n  3283,\n  3284,\n  3285,\n  3286,\n  3287,\n  3288,\n  3289,\n  3290,\n  3291,\n  3292,\n  3293,\n  3294,\n  3295,\n  3296,\n  3297,\n  3298,\n  3299,\n  3300,\n  3301,\n  3302,\n  3303,\n  3304,\n  3305,\n  3306,\n  3307,\n  3308,\n  3309,\n  3310,\n  3311,\n  3312,\n  3313,\n  3314,\n  3315,\n  3316,\n  3317,\n  3318,\n  3319,\n  3320,\n  3321,\n  3322,\n  3323,\n  3324,\n  3325,\n  3326,\n  3327,\n  3328,\n  3329,\n  3330,\n  3331,\n  3332,\n  3333,\n  3334,\n  3335,\n  3336,\n  3337,\n  3338,\n  3339,\n  3340,\n  3341,\n  3342,\n  3343,\n  3344,\n  3345,\n  3346,\n  3347,\n  3348,\n  3349,\n  3350,\n  3351,\n  3352,\n  3353,\n  3354,\n  3355,\n  3356,\n  3357,\n  3358,\n  3359,\n  3360,\n  3361,\n  3362,\n  3363,\n  3364,\n  3365,\n  3366,\n  3367,\n  3368,\n  3369,\n  3370,\n  3371,\n  3372,\n  3373,\n  3374,\n  3375,\n  3376,\n  3377,\n  3378,\n  3379,\n  3380,\n  3381,\n  3382,\n  3383,\n  3384,\n  3385,\n  3386,\n  3387,\n  3388,\n  3389,\n  3390,\n  3391,\n  3392,\n  3393,\n  3394,\n  3395,\n  3396,\n  3397,\n  3398,\n  3399,\n  3400,\n  3401,\n  3402,\n  3403,\n  3404,\n  3405,\n  3406,\n  3407,\n  3408,\n  3409,\n  3410,\n  3411,\n  3412,\n  3413,\n  3414,\n  3415,\n  3416,\n  3417,\n  3418,\n  3419,\n  3420,\n  3421,\n  3422,\n  3423,\n  3424,\n  3425,\n  3426,\n  3427,\n  3428,\n  3429,\n  3430,\n  3431,\n  3432,\n  3433,\n  3434,\n  3435,\n  3436,\n  3437,\n  3438,\n  3439,\n  3440,\n  3441,\n  3442,\n  3443,\n  3444,\n  3445,\n  3446,\n  3447,\n  3448,\n  3449,\n  3450,\n  3451,\n  3452,\n  3453,\n  3454,\n  3455,\n  3456,\n  3457,\n  3458,\n  3459,\n  3460,\n  3461,\n  3462,\n  3463,\n  3464,\n  3465,\n  3466,\n  3467,\n  3468,\n  3469,\n  3470,\n  3471,\n  3472,\n  3473,\n  3474,\n  3475,\n  3476,\n  3477,\n  3478,\n  3479,\n  3480,\n  3481,\n  3482,\n  3483,\n  3484,\n  3485,\n  3486,\n  3487,\n  3488,\n  3489,\n  3490,\n  3491,\n  3492,\n  3493,\n  3494,\n  3495,\n  3496,\n  3497,\n  3498,\n  3499,\n  3500,\n  3501,\n  3502,\n  3503,\n  3504,\n  3505,\n  3506,\n  3507,\n  3508,\n  3509,\n  3510,\n  3511,\n  3512,\n  3513,\n  3514,\n  3515,\n  3516,\n  3517,\n  3518,\n  3519,\n  3520,\n  3521,\n  3522,\n  3523,\n  3524,\n  3525,\n  3526,\n  3527,\n  3528,\n  3529,\n  3530,\n  3531,\n  3532,\n  3533,\n  3534,\n  3535,\n  3536,\n  3537,\n  3538,\n  3539,\n  3540,\n  3541,\n  3542,\n  3543,\n  3544,\n  3545,\n  3546,\n  3547,\n  3548,\n  3549,\n  3550,\n  3551,\n  3552,\n  3553,\n  3554,\n  3555,\n  3556,\n  3557,\n  3558,\n  3559,\n  3560,\n  3561,\n  3562,\n  3563,\n  3564,\n  3565,\n  3566,\n  3567,\n  3568,\n  3569,\n  3570,\n  3571,\n  3572,\n  3573,\n  3574,\n  3575,\n  3576,\n  3577,\n  3578,\n  3579,\n  3580,\n  3581,\n  3582,\n  3583,\n  3584,\n  3585,\n  3586,\n  3587,\n  3588,\n  3589,\n  3590,\n  3591,\n  3592,\n  3593,\n  3594,\n  3595,\n  3596,\n  3597,\n  3598,\n  3599,\n  3600,\n  3601,\n  3602,\n  3603,\n  3604,\n  3605,\n  3606,\n  3607,\n  3608,\n  3609,\n  3610,\n  3611,\n  3612,\n  3613,\n  3614,\n  3615,\n  3616,\n  3617,\n  3618,\n  3619,\n  3620,\n  3621,\n  3622,\n  3623,\n  3624,\n  3625,\n  3626,\n  3627,\n  3628,\n  3629,\n  3630,\n  3631,\n  3632,\n  3633,\n  3634,\n  3635,\n  3636,\n  3637,\n  3638,\n  3639,\n  3640,\n  3641,\n  3642,\n  3643,\n  3644,\n  3645,\n  3646,\n  3647,\n  3648,\n  3649,\n  3650,\n  3651,\n  3652,\n  3653,\n  3654,\n  3655,\n  3656,\n  3657,\n  3658,\n  3659,\n  3660,\n  3661,\n  3662,\n  3663,\n  3664,\n  3665,\n  3666,\n  3667,\n  3668,\n  3669,\n  3670,\n  3671,\n  3672,\n  3673,\n  3674,\n  3675,\n  3676,\n  3677,\n  3678,\n  3679,\n  3680,\n  3681,\n  3682,\n  3683,\n  3684,\n  3685,\n  3686,\n  3687,\n  3688,\n  3689,\n  3690,\n  3691,\n  3692,\n  3693,\n  3694,\n  3695,\n  3696,\n  3697,\n  3698,\n  3699,\n  3700,\n  3701,\n  3702,\n  3703,\n  3704,\n  3705,\n  3706,\n  3707,\n  3708,\n  3709,\n  3710,\n  3711,\n  3712,\n  3713,\n  3714,\n  3715,\n  3716,\n  3717,\n  3718,\n  3719,\n  3720,\n  3721,\n  3722,\n  3723,\n  3724,\n  3725,\n  3726,\n  3727,\n  3728,\n  3729,\n  3730,\n  3731,\n  3732,\n  3733,\n  3734,\n  3735,\n  3736,\n  3737,\n  3738,\n  3739,\n  3740,\n  3741,\n  3742,\n  3743,\n  3744,\n  3745,\n  3746,\n  3747,\n  3748,\n  3749,\n  3750,\n  3751,\n  3752,\n  3753,\n  3754,\n  3755,\n  3756,\n  3757,\n  3758,\n  3759,\n  3760,\n  3761,\n  3762,\n  3763,\n  3764,\n  3765,\n  3766,\n  3767,\n  3768,\n  3769,\n  3770,\n  3771,\n  3772,\n  3773,\n  3774,\n  3775,\n  3776,\n  3777,\n  3778,\n  3779,\n  3780,\n  3781,\n  3782,\n  3783,\n  3784,\n  3785,\n  3786,\n  3787,\n  3788,\n  3789,\n  3790,\n  3791,\n  3792,\n  3793,\n  3794,\n  3795,\n  3796,\n  3797,\n  3798,\n  3799,\n  3800,\n  3801,\n  3802,\n  3803,\n  3804,\n  3805,\n  3806,\n  3807,\n  3808,\n  3809,\n  3810,\n  3811,\n  3812,\n  3813,\n  3814,\n  3815,\n  3816,\n  3817,\n  3818,\n  3819,\n  3820,\n  3821,\n  3822,\n  3823,\n  3824,\n  3825,\n  3826,\n  3827,\n  3828,\n  3829,\n  3830,\n  3831,\n  3832,\n  3833,\n  3834,\n  3835,\n  3836,\n  3837,\n  3838,\n  3839,\n  3840,\n  3841,\n  3842,\n  3843,\n  3844,\n  3845,\n  3846,\n  3847,\n  3848,\n  3849,\n  3850,\n  3851,\n  3852,\n  3853,\n  3854,\n  3855,\n  3856,\n  3857,\n  3858,\n  3859,\n  3860,\n  3861,\n  3862,\n  3863,\n  3864,\n  3865,\n  3866,\n  3867,\n  3868,\n  3869,\n  3870,\n  3871,\n  3872,\n  3873,\n  3874,\n  3875,\n  3876,\n  3877,\n  3878,\n  3879,\n  3880,\n  3881,\n  3882,\n  3883,\n  3884,\n  3885,\n  3886,\n  3887,\n  3888,\n  3889,\n  3890,\n  3891,\n  3892,\n  3893,\n  3894,\n  3895,\n  3896,\n  3897,\n  3898,\n  3899,\n  3900,\n  3901,\n  3902,\n  3903,\n  3904,\n  3905,\n  3906,\n  3907,\n  3908,\n  3909,\n  3910,\n  3911,\n  3912,\n  3913,\n  3914,\n  3915,\n  3916,\n  3917,\n  3918,\n  3919,\n  3920,\n  3921,\n  3922,\n  3923,\n  3924,\n  3925,\n  3926,\n  3927,\n  3928,\n  3929,\n  3930,\n  3931,\n  3932,\n  3933,\n  3934,\n  3935,\n  3936,\n  3937,\n  3938,\n  3939,\n  3940,\n  3941,\n  3942,\n  3943,\n  3944,\n  3945,\n  3946,\n  3947,\n  3948,\n  3949,\n  3950,\n  3951,\n  3952,\n  3953,\n  3954,\n  3955,\n  3956,\n  3957,\n  3958,\n  3959,\n  3960,\n  3961,\n  3962,\n  3963,\n  3964,\n  3965,\n  3966,\n  3967,\n  3968,\n  3969,\n  3970,\n  3971,\n  3972,\n  3973,\n  3974,\n  3975,\n  3976,\n  3977,\n  3978,\n  3979,\n  3980,\n  3981,\n  3982,\n  3983,\n  3984,\n  3985,\n  3986,\n  3987,\n  3988,\n  3989,\n  3990,\n  3991,\n  3992,\n  3993,\n  3994,\n  3995,\n  3996,\n  3997,\n  3998,\n  3999,\n  4000,\n  4001,\n  4002,\n  4003,\n  4004,\n  4005,\n  4006,\n  4007,\n  4008,\n  4009,\n  4010,\n  4011,\n  4012,\n  4013,\n  4014,\n  4015,\n  4016,\n  4017,\n  4018,\n  4019,\n  4020,\n  4021,\n  4022,\n  4023,\n  4024,\n  4025,\n  4026,\n  4027,\n  4028,\n  4029,\n  4030,\n  4031,\n  4032,\n  4033,\n  4034,\n  4035,\n  4036,\n  4037,\n  4038,\n  4039,\n  4040,\n  4041,\n  4042,\n  4043,\n  4044,\n  4045,\n  4046,\n  4047,\n  4048,\n  4049,\n  4050,\n  4051,\n  4052,\n  4053,\n  4054,\n  4055,\n  4056,\n  4057,\n  4058,\n  4059,\n  4060,\n  4061,\n  4062,\n  4063,\n  4064,\n  4065,\n  4066,\n  4067,\n  4068,\n  4069,\n  4070,\n  4071,\n  4072,\n  4073,\n  4074,\n  4075,\n  4076,\n  4077,\n  4078,\n  4079,\n  4080,\n  4081,\n  4082,\n  4083,\n  4084,\n  4085,\n  4086,\n  4087,\n  4088,\n  4089,\n  4090,\n  4091,\n  4092,\n  4093,\n  4094,\n  4095,\n  4096,\n  4097,\n  4098,\n  4099,\n  4100,\n  4101,\n  4102,\n  4103,\n  4104,\n  4105,\n  4106,\n  4107,\n  4108,\n  4109,\n  4110,\n  4111,\n  4112,\n  4113,\n  4114,\n  4115,\n  4116,\n  4117,\n  4118,\n  4119,\n  4120,\n  4121,\n  4122,\n  4123,\n  4124,\n  4125,\n  4126,\n  4127,\n  4128,\n  4129,\n  4130,\n  4131,\n  4132,\n  4133,\n  4134,\n  4135,\n  4136,\n  4137,\n  4138,\n  4139,\n  4140,\n  4141,\n  4142,\n  4143,\n  4144,\n  4145,\n  4146,\n  4147,\n  4148,\n  4149,\n  4150,\n  4151,\n  4152,\n  4153,\n  4154,\n  4155,\n  4156,\n  4157,\n  4158,\n  4159,\n  4160,\n  4161,\n  4162,\n  4163,\n  4164,\n  4165,\n  4166,\n  4167,\n  4168,\n  4169,\n  4170,\n  4171,\n  4172,\n  4173,\n  4174,\n  4175,\n  4176,\n  4177,\n  4178,\n  4179,\n  4180,\n  4181,\n  4182,\n  4183,\n  4184,\n  4185,\n  4186,\n  4187,\n  4188,\n  4189,\n  4190,\n  4191,\n  4192,\n  4193,\n  4194,\n  4195,\n  4196,\n  4197,\n  4198,\n  4199,\n  4200,\n  4201,\n  4202,\n  4203,\n  4204,\n  4205,\n  4206,\n  4207,\n  4208,\n  4209,\n  4210,\n  4211,\n  4212,\n  4213,\n  4214,\n  4215,\n  4216,\n  4217,\n  4218,\n  4219,\n  4220,\n  4221,\n  4222,\n  4223,\n  4224,\n  4225,\n  4226,\n  4227,\n  4228,\n  4229,\n  4230,\n  4231,\n  4232,\n  4233,\n  4234,\n  4235,\n  4236,\n  4237,\n  4238,\n  4239,\n  4240,\n  4241,\n  4242,\n  4243,\n  4244,\n  4245,\n  4246,\n  4247,\n  4248,\n  4249,\n  4250,\n  4251,\n  4252,\n  4253,\n  4254,\n  4255,\n  4256,\n  4257,\n  4258,\n  4259,\n  4260,\n  4261,\n  4262,\n  4263,\n  4264,\n  4265,\n  4266,\n  4267,\n  4268,\n  4269,\n  4270,\n  4271,\n  4272,\n  4273,\n  4274,\n  4275,\n  4276,\n  4277,\n  4278,\n  4279,\n  4280,\n  4281,\n  4282,\n  4283,\n  4284,\n  4285,\n  4286,\n  4287,\n  4288,\n  4289,\n  4290,\n  4291,\n  4292,\n  4293,\n  4294,\n  4295,\n  4296,\n  4297,\n  4298,\n  4299,\n  4300,\n  4301,\n  4302,\n  4303,\n  4304,\n  4305,\n  4306,\n  4307,\n  4308,\n  4309,\n  4310,\n  4311,\n  4312,\n  4313,\n  4314,\n  4315,\n  4316,\n  4317,\n  4318,\n  4319,\n  4320,\n  4321,\n  4322,\n  4323,\n  4324,\n  4325,\n  4326,\n  4327,\n  4328,\n  4329,\n  4330,\n  4331,\n  4332,\n  4333,\n  4334,\n  4335,\n  4336,\n  4337,\n  4338,\n  4339,\n  4340,\n  4341,\n  4342,\n  4343,\n  4344,\n  4345,\n  4346,\n  4347,\n  4348,\n  4349,\n  4350,\n  4351,\n  4352,\n  4353,\n  4354,\n  4355,\n  4356,\n  4357,\n  4358,\n  4359,\n  4360,\n  4361,\n  4362,\n  4363,\n  4364,\n  4365,\n  4366,\n  4367,\n  4368,\n  4369,\n  4370,\n  4371,\n  4372,\n  4373,\n  4374,\n  4375,\n  4376,\n  4377,\n  4378,\n  4379,\n  4380,\n  4381,\n  4382,\n  4383,\n  4384,\n  4385,\n  4386,\n  4387,\n  4388,\n  4389,\n  4390,\n  4391,\n  4392,\n  4393,\n  4394,\n  4395,\n  4396,\n  4397,\n  4398,\n  4399,\n  4400,\n  4401,\n  4402,\n  4403,\n  4404,\n  4405,\n  4406,\n  4407,\n  4408,\n  4409,\n  4410,\n  4411,\n  4412,\n  4413,\n  4414,\n  4415,\n  4416,\n  4417,\n  4418,\n  4419,\n  4420,\n  4421,\n  4422,\n  4423,\n  4424,\n  4425,\n  4426,\n  4427,\n  4428,\n  4429,\n  4430,\n  4431,\n  4432,\n  4433,\n  4434,\n  4435,\n  4436,\n  4437,\n  4438,\n  4439,\n  4440,\n  4441,\n  4442,\n  4443,\n  4444,\n  4445,\n  4446,\n  4447,\n  4448,\n  4449,\n  4450,\n  4451,\n  4452,\n  4453,\n  4454,\n  4455,\n  4456,\n  4457,\n  4458,\n  4459,\n  4460,\n  4461,\n  4462,\n  4463,\n  4464,\n  4465,\n  4466,\n  4467,\n  4468,\n  4469,\n  4470,\n  4471,\n  4472,\n  4473,\n  4474,\n  4475,\n  4476,\n  4477,\n  4478,\n  4479,\n  4480,\n  4481,\n  4482,\n  4483,\n  4484,\n  4485,\n  4486,\n  4487,\n  4488,\n  4489,\n  4490,\n  4491,\n  4492,\n  4493,\n  4494,\n  4495,\n  4496,\n  4497,\n  4498,\n  4499,\n  4500,\n  4501,\n  4502,\n  4503,\n  4504,\n  4505,\n  4506,\n  4507,\n  4508,\n  4509,\n  4510,\n  4511,\n  4512,\n  4513,\n  4514,\n  4515,\n  4516,\n  4517,\n  4518,\n  4519,\n  4520,\n  4521,\n  4522,\n  4523,\n  4524,\n  4525,\n  4526,\n  4527,\n  4528,\n  4529,\n  4530,\n  4531,\n  4532,\n  4533,\n  4534,\n  4535,\n  4536,\n  4537,\n  4538,\n  4539,\n  4540,\n  4541,\n  4542,\n  4543,\n  4544,\n  4545,\n  4546,\n  4547,\n  4548,\n  4549,\n  4550,\n  4551,\n  4552,\n  4553,\n  4554,\n  4555,\n  4556,\n  4557,\n  4558,\n  4559,\n  4560,\n  4561,\n  4562,\n  4563,\n  4564,\n  4565,\n  4566,\n  4567,\n  4568,\n  4569,\n  4570,\n  4571,\n  4572,\n  4573,\n  4574,\n  4575,\n  4576,\n  4577,\n  4578,\n  4579,\n  4580,\n  4581,\n  4582,\n  4583,\n  4584,\n  4585,\n  4586,\n  4587,\n  4588,\n  4589,\n  4590,\n  4591,\n  4592,\n  4593,\n  4594,\n  4595,\n  4596,\n  4597,\n  4598,\n  4599,\n  4600,\n  4601,\n  4602,\n  4603,\n  4604,\n  4605,\n  4606,\n  4607,\n  4608,\n  4609,\n  4610,\n  4611,\n  4612,\n  4613,\n  4614,\n  4615,\n  4616,\n  4617,\n  4618,\n  4619,\n  4620,\n  4621,\n  4622,\n  4623,\n  4624,\n  4625,\n  4626,\n  4627,\n  4628,\n  4629,\n  4630,\n  4631,\n  4632,\n  4633,\n  4634,\n  4635,\n  4636,\n  4637,\n  4638,\n  4639,\n  4640,\n  4641,\n  4642,\n  4643,\n  4644,\n  4645,\n  4646,\n  4647,\n  4648,\n  4649,\n  4650,\n  4651,\n  4652,\n  4653,\n  4654,\n  4655,\n  4656,\n  4657,\n  4658,\n  4659,\n  4660,\n  4661,\n  4662,\n  4663,\n  4664,\n  4665,\n  4666,\n  4667,\n  4668,\n  4669,\n  4670,\n  4671,\n  4672,\n  4673,\n  4674,\n  4675,\n  4676,\n  4677,\n  4678,\n  4679,\n  4680,\n  4681,\n  4682,\n  4683,\n  4684,\n  4685,\n  4686,\n  4687,\n  4688,\n  4689,\n  4690,\n  4691,\n  4692,\n  4693,\n  4694,\n  4695,\n  4696,\n  4697,\n  4698,\n  4699,\n  4700,\n  4701,\n  4702,\n  4703,\n  4704,\n  4705,\n  4706,\n  4707,\n  4708,\n  4709,\n  4710,\n  4711,\n  4712,\n  4713,\n  4714,\n  4715,\n  4716,\n  4717,\n  4718,\n  4719,\n  4720,\n  4721,\n  4722,\n  4723,\n  4724,\n  4725,\n  4726,\n  4727,\n  4728,\n  4729,\n  4730,\n  4731,\n  4732,\n  4733,\n  4734,\n  4735,\n  4736,\n  4737,\n  4738,\n  4739,\n  4740,\n  4741,\n  4742,\n  4743,\n  4744,\n  4745,\n  4746,\n  4747,\n  4748,\n  4749,\n  4750,\n  4751,\n  4752,\n  4753,\n  4754,\n  4755,\n  4756,\n  4757,\n  4758,\n  4759,\n  4760,\n  4761,\n  4762,\n  4763,\n  4764,\n  4765,\n  4766,\n  4767,\n  4768,\n  4769,\n  4770,\n  4771,\n  4772,\n  4773,\n  4774,\n  4775,\n  4776,\n  4777,\n  4778,\n  4779,\n  4780,\n  4781,\n  4782,\n  4783,\n  4784,\n  4785,\n  4786,\n  4787,\n  4788,\n  4789,\n  4790,\n  4791,\n  4792,\n  4793,\n  4794,\n  4795,\n  4796,\n  4797,\n  4798,\n  4799,\n  4800,\n  4801,\n  4802,\n  4803,\n  4804,\n  4805,\n  4806,\n  4807,\n  4808,\n  4809,\n  4810,\n  4811,\n  4812,\n  4813,\n  4814,\n  4815,\n  4816,\n  4817,\n  4818,\n  4819,\n  4820,\n  4821,\n  4822,\n  4823,\n  4824,\n  4825,\n  4826,\n  4827,\n  4828,\n  4829,\n  4830,\n  4831,\n  4832,\n  4833,\n  4834,\n  4835,\n  4836,\n  4837,\n  4838,\n  4839,\n  4840,\n  4841,\n  4842,\n  4843,\n  4844,\n  4845,\n  4846,\n  4847,\n  4848,\n  4849,\n  4850,\n  4851,\n  4852,\n  4853,\n  4854,\n  4855,\n  4856,\n  4857,\n  4858,\n  4859,\n  4860,\n  4861,\n  4862,\n  4863,\n  4864,\n  4865,\n  4866,\n  4867,\n  4868,\n  4869,\n  4870,\n  4871,\n  4872,\n  4873,\n  4874,\n  4875,\n  4876,\n  4877,\n  4878,\n  4879,\n  4880,\n  4881,\n  4882,\n  4883,\n  4884,\n  4885,\n  4886,\n  4887,\n  4888,\n  4889,\n  4890,\n  4891,\n  4892,\n  4893,\n  4894,\n  4895,\n  4896,\n  4897,\n  4898,\n  4899,\n  4900,\n  4901,\n  4902,\n  4903,\n  4904,\n  4905,\n  4906,\n  4907,\n  4908,\n  4909,\n  4910,\n  4911,\n  4912,\n  4913,\n  4914,\n  4915,\n  4916,\n  4917,\n  4918,\n  4919,\n  4920,\n  4921,\n  4922,\n  4923,\n  4924,\n  4925,\n  4926,\n  4927,\n  4928,\n  4929,\n  4930,\n  4931,\n  4932,\n  4933,\n  4934,\n  4935,\n  4936,\n  4937,\n  4938,\n  4939,\n  4940,\n  4941,\n  4942,\n  4943,\n  4944,\n  4945,\n  4946,\n  4947,\n  4948,\n  4949,\n  4950,\n  4951,\n  4952,\n  4953,\n  4954,\n  4955,\n  4956,\n  4957,\n  4958,\n  4959,\n  4960,\n  4961,\n  4962,\n  4963,\n  4964,\n  4965,\n  4966,\n  4967,\n  4968,\n  4969,\n  4970,\n  4971,\n  4972,\n  4973,\n  4974,\n  4975,\n  4976,\n  4977,\n  4978,\n  4979,\n  4980,\n  4981,\n  4982,\n  4983,\n  4984,\n  4985,\n  4986,\n  4987,\n  4988,\n  4989,\n  4990,\n  4991,\n  4992,\n  4993,\n  4994,\n  4995,\n  4996,\n  4997,\n  4998,\n  4999,\n  5000,\n  5001,\n  5002,\n  5003,\n  5004,\n  5005,\n  5006,\n  5007,\n  5008,\n  5009,\n  5010,\n  5011,\n  5012,\n  5013,\n  5014,\n  5015,\n  5016,\n  5017,\n  5018,\n  5019,\n  5020,\n  5021,\n  5022,\n  5023,\n  5024,\n  5025,\n  5026,\n  5027,\n  5028,\n  5029,\n  5030,\n  5031,\n  5032,\n  5033,\n  5034,\n  5035,\n  5036,\n  5037,\n  5038,\n  5039,\n  5040,\n  5041,\n  5042,\n  5043,\n  5044,\n  5045,\n  5046,\n  5047,\n  5048,\n  5049,\n  5050,\n  5051,\n  5052,\n  5053,\n  5054,\n  5055,\n  5056,\n  5057,\n  5058,\n  5059,\n  5060,\n  5061,\n  5062,\n  5063,\n  5064,\n  5065,\n  5066,\n  5067,\n  5068,\n  5069,\n  5070,\n  5071,\n  5072,\n  5073,\n  5074,\n  5075,\n  5076,\n  5077,\n  5078,\n  5079,\n  5080,\n  5081,\n  5082,\n  5083,\n  5084,\n  5085,\n  5086,\n  5087,\n  5088,\n  5089,\n  5090,\n  5091,\n  5092,\n  5093,\n  5094,\n  5095,\n  5096,\n  5097,\n  5098,\n  5099,\n  5100,\n  5101,\n  5102,\n  5103,\n  5104,\n  5105,\n  5106,\n  5107,\n  5108,\n  5109,\n  5110,\n  5111,\n  5112,\n  5113,\n  5114,\n  5115,\n  5116,\n  5117,\n  5118,\n  5119,\n  5120,\n  5121,\n  5122,\n  5123,\n  5124,\n  5125,\n  5126,\n  5127,\n  5128,\n  5129,\n  5130,\n  5131,\n  5132,\n  5133,\n  5134,\n  5135,\n  5136,\n  5137,\n  5138,\n  5139,\n  5140,\n  5141,\n  5142,\n  5143,\n  5144,\n  5145,\n  5146,\n  5147,\n  5148,\n  5149,\n  5150,\n  5151,\n  5152,\n  5153,\n  5154,\n  5155,\n  5156,\n  5157,\n  5158,\n  5159,\n  5160,\n  5161,\n  5162,\n  5163,\n  5164,\n  5165,\n  5166,\n  5167,\n  5168,\n  5169,\n  5170,\n  5171,\n  5172,\n  5173,\n  5174,\n  5175,\n  5176,\n  5177,\n  5178,\n  5179,\n  5180,\n  5181,\n  5182,\n  5183,\n  5184,\n  5185,\n  5186,\n  5187,\n  5188,\n  5189,\n  5190,\n  5191,\n  5192,\n  5193,\n  5194,\n  5195,\n  5196,\n  5197,\n  5198,\n  5199,\n  5200,\n  5201,\n  5202,\n  5203,\n  5204,\n  5205,\n  5206,\n  5207,\n  5208,\n  5209,\n  5210,\n  5211,\n  5212,\n  5213,\n  5214,\n  5215,\n  5216,\n  5217,\n  5218,\n  5219,\n  5220,\n  5221,\n  5222,\n  5223,\n  5224,\n  5225,\n  5226,\n  5227,\n  5228,\n  5229,\n  5230,\n  5231,\n  5232,\n  5233,\n  5234,\n  5235,\n  5236,\n  5237,\n  5238,\n  5239,\n  5240,\n  5241,\n  5242,\n  5243,\n  5244,\n  5245,\n  5246,\n  5247,\n  5248,\n  5249,\n  5250,\n  5251,\n  5252,\n  5253,\n  5254,\n  5255,\n  5256,\n  5257,\n  5258,\n  5259,\n  5260,\n  5261,\n  5262,\n  5263,\n  5264,\n  5265,\n  5266,\n  5267,\n  5268,\n  5269,\n  5270,\n  5271,\n  5272,\n  5273,\n  5274,\n  5275,\n  5276,\n  5277,\n  5278,\n  5279,\n  5280,\n  5281,\n  5282,\n  5283,\n  5284,\n  5285,\n  5286,\n  5287,\n  5288,\n  5289,\n  5290,\n  5291,\n  5292,\n  5293,\n  5294,\n  5295,\n  5296,\n  5297,\n  5298,\n  5299,\n  5300,\n  5301,\n  5302,\n  5303,\n  5304,\n  5305,\n  5306,\n  5307,\n  5308,\n  5309,\n  5310,\n  5311,\n  5312,\n  5313,\n  5314,\n  5315,\n  5316,\n  5317,\n  5318,\n  5319,\n  5320,\n  5321,\n  5322,\n  5323,\n  5324,\n  5325,\n  5326,\n  5327,\n  5328,\n  5329,\n  5330,\n  5331,\n  5332,\n  5333,\n  5334,\n  5335,\n  5336,\n  5337,\n  5338,\n  5339,\n  5340,\n  5341,\n  5342,\n  5343,\n  5344,\n  5345,\n  5346,\n  5347,\n  5348,\n  5349,\n  5350,\n  5351,\n  5352,\n  5353,\n  5354,\n  5355,\n  5356,\n  5357,\n  5358,\n  5359,\n  5360,\n  5361,\n  5362,\n  5363,\n  5364,\n  5365,\n  5366,\n  5367,\n  5368,\n  5369,\n  5370,\n  5371,\n  5372,\n  5373,\n  5374,\n  5375,\n  5376,\n  5377,\n  5378,\n  5379,\n  5380,\n  5381,\n  5382,\n  5383,\n  5384,\n  5385,\n  5386,\n  5387,\n  5388,\n  5389,\n  5390,\n  5391,\n  5392,\n  5393,\n  5394,\n  5395,\n  5396,\n  5397,\n  5398,\n  5399,\n  5400,\n  5401,\n  5402,\n  5403,\n  5404,\n  5405,\n  5406,\n  5407,\n  5408,\n  5409,\n  5410,\n  5411,\n  5412,\n  5413,\n  5414,\n  5415,\n  5416,\n  5417,\n  5418,\n  5419,\n  5420,\n  5421,\n  5422,\n  5423,\n  5424,\n  5425,\n  5426,\n  5427,\n  5428,\n  5429,\n  5430,\n  5431,\n  5432,\n  5433,\n  5434,\n  5435,\n  5436,\n  5437,\n  5438,\n  5439,\n  5440,\n  5441,\n  5442,\n  5443,\n  5444,\n  5445,\n  5446,\n  5447,\n  5448,\n  5449,\n  5450,\n  5451,\n  5452,\n  5453,\n  5454,\n  5455,\n  5456,\n  5457,\n  5458,\n  5459,\n  5460,\n  5461,\n  5462,\n  5463,\n  5464,\n  5465,\n  5466,\n  5467,\n  5468,\n  5469,\n  5470,\n  5471,\n  5472,\n  5473,\n  5474,\n  5475,\n  5476,\n  5477,\n  5478,\n  5479,\n  5480,\n  5481,\n  5482,\n  5483,\n  5484,\n  5485,\n  5486,\n  5487,\n  5488,\n  5489,\n  5490,\n  5491,\n  5492,\n  5493,\n  5494,\n  5495,\n  5496,\n  5497,\n  5498,\n  5499,\n  5500,\n  5501,\n  5502,\n  5503,\n  5504,\n  5505,\n  5506,\n  5507,\n  5508,\n  5509,\n  5510,\n  5511,\n  5512,\n  5513,\n  5514,\n  5515,\n  5516,\n  5517,\n  5518,\n  5519,\n  5520,\n  5521,\n  5522,\n  5523,\n  5524,\n  5525,\n  5526,\n  5527,\n  5528,\n  5529,\n  5530,\n  5531,\n  5532,\n  5533,\n  5534,\n  5535,\n  5536,\n  5537,\n  5538,\n  5539,\n  5540,\n  5541,\n  5542,\n  5543,\n  5544,\n  5545,\n  5546,\n  5547,\n  5548,\n  5549,\n  5550,\n  5551,\n  5552,\n  5553,\n  5554,\n  5555,\n  5556,\n  5557,\n  5558,\n  5559,\n  5560,\n  5561,\n  5562,\n  5563,\n  5564,\n  5565,\n  5566,\n  5567,\n  5568,\n  5569,\n  5570,\n  5571,\n  5572,\n  5573,\n  5574,\n  5575,\n  5576,\n  5577,\n  5578,\n  5579,\n  5580,\n  5581,\n  5582,\n  5583,\n  5584,\n  5585,\n  5586,\n  5587,\n  5588,\n  5589,\n  5590,\n  5591,\n  5592,\n  5593,\n  5594,\n  5595,\n  5596,\n  5597,\n  5598,\n  5599,\n  5600,\n  5601,\n  5602,\n  5603,\n  5604,\n  5605,\n  5606,\n  5607,\n  5608,\n  5609,\n  5610,\n  5611,\n  5612,\n  5613,\n  5614,\n  5615,\n  5616,\n  5617,\n  5618,\n  5619,\n  5620,\n  5621,\n  5622,\n  5623,\n  5624,\n  5625,\n  5626,\n  5627,\n  5628,\n  5629,\n  5630,\n  5631,\n  5632,\n  5633,\n  5634,\n  5635,\n  5636,\n  5637,\n  5638,\n  5639,\n  5640,\n  5641,\n  5642,\n  5643,\n  5644,\n  5645,\n  5646,\n  5647,\n  5648,\n  5649,\n  5650,\n  5651,\n  5652,\n  5653,\n  5654,\n  5655,\n  5656,\n  5657,\n  5658,\n  5659,\n  5660,\n  5661,\n  5662,\n  5663,\n  5664,\n  5665,\n  5666,\n  5667,\n  5668,\n  5669,\n  5670,\n  5671,\n  5672,\n  5673,\n  5674,\n  5675,\n  5676,\n  5677,\n  5678,\n  5679,\n  5680,\n  5681,\n  5682,\n  5683,\n  5684,\n  5685,\n  5686,\n  5687,\n  5688,\n  5689,\n  5690,\n  5691,\n  5692,\n  5693,\n  5694,\n  5695,\n  5696,\n  5697,\n  5698,\n  5699,\n  5700,\n  5701,\n  5702,\n  5703,\n  5704,\n  5705,\n  5706,\n  5707,\n  5708,\n  5709,\n  5710,\n  5711,\n  5712,\n  5713,\n  5714,\n  5715,\n  5716,\n  5717,\n  5718,\n  5719,\n  5720,\n  5721,\n  5722,\n  5723,\n  5724,\n  5725,\n  5726,\n  5727,\n  5728,\n  5729,\n  5730,\n  5731,\n  5732,\n  5733,\n  5734,\n  5735,\n  5736,\n  5737,\n  5738,\n  5739,\n  5740,\n  5741,\n  5742,\n  5743,\n  5744,\n  5745,\n  5746,\n  5747,\n  5748,\n  5749,\n  5750,\n  5751,\n  5752,\n  5753,\n  5754,\n  5755,\n  5756,\n  5757,\n  5758,\n  5759,\n  5760,\n  5761,\n  5762,\n  5763,\n  5764,\n  5765,\n  5766,\n  5767,\n  5768,\n  5769,\n  5770,\n  5771,\n  5772,\n  5773,\n  5774,\n  5775,\n  5776,\n  5777,\n  5778,\n  5779,\n  5780,\n  5781,\n  5782,\n  5783,\n  5784,\n  5785,\n  5786,\n  5787,\n  5788,\n  5789,\n  5790,\n  5791,\n  5792,\n  5793,\n  5794,\n  5795,\n  5796,\n  5797,\n  5798,\n  5799,\n  5800,\n  5801,\n  5802,\n  5803,\n  5804,\n  5805,\n  5806,\n  5807,\n  5808,\n  5809,\n  5810,\n  5811,\n  5812,\n  5813,\n  5814,\n  5815,\n  5816,\n  5817,\n  5818,\n  5819,\n  5820,\n  5821,\n  5822,\n  5823,\n  5824,\n  5825,\n  5826,\n  5827,\n  5828,\n  5829,\n  5830,\n  5831,\n  5832,\n  5833,\n  5834,\n  5835,\n  5836,\n  5837,\n  5838,\n  5839,\n  5840,\n  5841,\n  5842,\n  5843,\n  5844,\n  5845,\n  5846,\n  5847,\n  5848,\n  5849,\n  5850,\n  5851,\n  5852,\n  5853,\n  5854,\n  5855,\n  5856,\n  5857,\n  5858,\n  5859,\n  5860,\n  5861,\n  5862,\n  5863,\n  5864,\n  5865,\n  5866,\n  5867,\n  5868,\n  5869,\n  5870,\n  5871,\n  5872,\n  5873,\n  5874,\n  5875,\n  5876,\n  5877,\n  5878,\n  5879,\n  5880,\n  5881,\n  5882,\n  5883,\n  5884,\n  5885,\n  5886,\n  5887,\n  5888,\n  5889,\n  5890,\n  5891,\n  5892,\n  5893,\n  5894,\n  5895,\n  5896,\n  5897,\n  5898,\n  5899,\n  5900,\n  5901,\n  5902,\n  5903,\n  5904,\n  5905,\n  5906,\n  5907,\n  5908,\n  5909,\n  5910,\n  5911,\n  5912,\n  5913,\n  5914,\n  5915,\n  5916,\n  5917,\n  5918,\n  5919,\n  5920,\n  5921,\n  5922,\n  5923,\n  5924,\n  5925,\n  5926,\n  5927,\n  5928,\n  5929,\n  5930,\n  5931,\n  5932,\n  5933,\n  5934,\n  5935,\n  5936,\n  5937,\n  5938,\n  5939,\n  5940,\n  5941,\n  5942,\n  5943,\n  5944,\n  5945,\n  5946,\n  5947,\n  5948,\n  5949,\n  5950,\n  5951,\n  5952,\n  5953,\n  5954,\n  5955,\n  5956,\n  5957,\n  5958,\n  5959,\n  5960,\n  5961,\n  5962,\n  5963,\n  5964,\n  5965,\n  5966,\n  5967,\n  5968,\n  5969,\n  5970,\n  5971,\n  5972,\n  5973,\n  5974,\n  5975,\n  5976,\n  5977,\n  5978,\n  5979,\n  5980,\n  5981,\n  5982,\n  5983,\n  5984,\n  5985,\n  5986,\n  5987,\n  5988,\n  5989,\n  5990,\n  5991,\n  5992,\n  5993,\n  5994,\n  5995,\n  5996,\n  5997,\n  5998,\n  5999,\n  6000,\n  6001,\n  6002,\n  6003,\n  6004,\n  6005,\n  6006,\n  6007,\n  6008,\n  6009,\n  6010,\n  6011,\n  6012,\n  6013,\n  6014,\n  6015,\n  6016,\n  6017,\n  6018,\n  6019,\n  6020,\n  6021,\n  6022,\n  6023,\n  6024,\n  6025,\n  6026,\n  6027,\n  6028,\n  6029,\n  6030,\n  6031,\n  6032,\n  6033,\n  6034,\n  6035,\n  6036,\n  6037,\n  6038,\n  6039,\n  6040,\n  6041,\n  6042,\n  6043,\n  6044,\n  6045,\n  6046,\n  6047,\n  6048,\n  6049,\n  6050,\n  6051,\n  6052,\n  6053,\n  6054,\n  6055,\n  6056,\n  6057,\n  6058,\n  6059,\n  6060,\n  6061,\n  6062,\n  6063,\n  6064,\n  6065,\n  6066,\n  6067,\n  6068,\n  6069,\n  6070,\n  6071,\n  6072,\n  6073,\n  6074,\n  6075,\n  6076,\n  6077,\n  6078,\n  6079,\n  6080,\n  6081,\n  6082,\n  6083,\n  6084,\n  6085,\n  6086,\n  6087,\n  6088,\n  6089,\n  6090,\n  6091,\n  6092,\n  6093,\n  6094,\n  6095,\n  6096,\n  6097,\n  6098,\n  6099,\n  6100,\n  6101,\n  6102,\n  6103,\n  6104,\n  6105,\n  6106,\n  6107,\n  6108,\n  6109,\n  6110,\n  6111,\n  6112,\n  6113,\n  6114,\n  6115,\n  6116,\n  6117,\n  6118,\n  6119,\n  6120,\n  6121,\n  6122,\n  6123,\n  6124,\n  6125,\n  6126,\n  6127,\n  6128,\n  6129,\n  6130,\n  6131,\n  6132,\n  6133,\n  6134,\n  6135,\n  6136,\n  6137,\n  6138,\n  6139,\n  6140,\n  6141,\n  6142,\n  6143,\n  6144,\n  6145,\n  6146,\n  6147,\n  6148,\n  6149,\n  6150,\n  6151,\n  6152,\n  6153,\n  6154,\n  6155,\n  6156,\n  6157,\n  6158,\n  6159,\n  6160,\n  6161,\n  6162,\n  6163,\n  6164,\n  6165,\n  6166,\n  6167,\n  6168,\n  6169,\n  6170,\n  6171,\n  6172,\n  6173,\n  6174,\n  6175,\n  6176,\n  6177,\n  6178,\n  6179,\n  6180,\n  6181,\n  6182,\n  6183,\n  6184,\n  6185,\n  6186,\n  6187,\n  6188,\n  6189,\n  6190,\n  6191,\n  6192,\n  6193,\n  6194,\n  6195,\n  6196,\n  6197,\n  6198,\n  6199,\n  6200,\n  6201,\n  6202,\n  6203,\n  6204,\n  6205,\n  6206,\n  6207,\n  6208,\n  6209,\n  6210,\n  6211,\n  6212,\n  6213,\n  6214,\n  6215,\n  6216,\n  6217,\n  6218,\n  6219,\n  6220,\n  6221,\n  6222,\n  6223,\n  6224,\n  6225,\n  6226,\n  6227,\n  6228,\n  6229,\n  6230,\n  6231,\n  6232,\n  6233,\n  6234,\n  6235,\n  6236,\n  6237,\n  6238,\n  6239,\n  6240,\n  6241,\n  6242,\n  6243,\n  6244,\n  6245,\n  6246,\n  6247,\n  6248,\n  6249,\n  6250,\n  6251,\n  6252,\n  6253,\n  6254,\n  6255,\n  6256,\n  6257,\n  6258,\n  6259,\n  6260,\n  6261,\n  6262,\n  6263,\n  6264,\n  6265,\n  6266,\n  6267,\n  6268,\n  6269,\n  6270,\n  6271,\n  6272,\n  6273,\n  6274,\n  6275,\n  6276,\n  6277,\n  6278,\n  6279,\n  6280,\n  6281,\n  6282,\n  6283,\n  6284,\n  6285,\n  6286,\n  6287,\n  6288,\n  6289,\n  6290,\n  6291,\n  6292,\n  6293,\n  6294,\n  6295,\n  6296,\n  6297,\n  6298,\n  6299,\n  6300,\n  6301,\n  6302,\n  6303,\n  6304,\n  6305,\n  6306,\n  6307,\n  6308,\n  6309,\n  6310,\n  6311,\n  6312,\n  6313,\n  6314,\n  6315,\n  6316,\n  6317,\n  6318,\n  6319,\n  6320,\n  6321,\n  6322,\n  6323,\n  6324,\n  6325,\n  6326,\n  6327,\n  6328,\n  6329,\n  6330,\n  6331,\n  6332,\n  6333,\n  6334,\n  6335,\n  6336,\n  6337,\n  6338,\n  6339,\n  6340,\n  6341,\n  6342,\n  6343,\n  6344,\n  6345,\n  6346,\n  6347,\n  6348,\n  6349,\n  6350,\n  6351,\n  6352,\n  6353,\n  6354,\n  6355,\n  6356,\n  6357,\n  6358,\n  6359,\n  6360,\n  6361,\n  6362,\n  6363,\n  6364,\n  6365,\n  6366,\n  6367,\n  6368,\n  6369,\n  6370,\n  6371,\n  6372,\n  6373,\n  6374,\n  6375,\n  6376,\n  6377,\n  6378,\n  6379,\n  6380,\n  6381,\n  6382,\n  6383,\n  6384,\n  6385,\n  6386,\n  6387,\n  6388,\n  6389,\n  6390,\n  6391,\n  6392,\n  6393,\n  6394,\n  6395,\n  6396,\n  6397,\n  6398,\n  6399,\n  6400,\n  6401,\n  6402,\n  6403,\n  6404,\n  6405,\n  6406,\n  6407,\n  6408,\n  6409,\n  6410,\n  6411,\n  6412,\n  6413,\n  6414,\n  6415,\n  6416,\n  6417,\n  6418,\n  6419,\n  6420,\n  6421,\n  6422,\n  6423,\n  6424,\n  6425,\n  6426,\n  6427,\n  6428,\n  6429,\n  6430,\n  6431,\n  6432,\n  6433,\n  6434,\n  6435,\n  6436,\n  6437,\n  6438,\n  6439,\n  6440,\n  6441,\n  6442,\n  6443,\n  6444,\n  6445,\n  6446,\n  6447,\n  6448,\n  6449,\n  6450,\n  6451,\n  6452,\n  6453,\n  6454,\n  6455,\n  6456,\n  6457,\n  6458,\n  6459,\n  6460,\n  6461,\n  6462,\n  6463,\n  6464,\n  6465,\n  6466,\n  6467,\n  6468,\n  6469,\n  6470,\n  6471,\n  6472,\n  6473,\n  6474,\n  6475,\n  6476,\n  6477,\n  6478,\n  6479,\n  6480,\n  6481,\n  6482,\n  6483,\n  6484,\n  6485,\n  6486,\n  6487,\n  6488,\n  6489,\n  6490,\n  6491,\n  6492,\n  6493,\n  6494,\n  6495,\n  6496,\n  6497,\n  6498,\n  6499,\n  6500,\n  6501,\n  6502,\n  6503,\n  6504,\n  6505,\n  6506,\n  6507,\n  6508,\n  6509,\n  6510,\n  6511,\n  6512,\n  6513,\n  6514,\n  6515,\n  6516,\n  6517,\n  6518,\n  6519,\n  6520,\n  6521,\n  6522,\n  6523,\n  6524,\n  6525,\n  6526,\n  6527,\n  6528,\n  6529,\n  6530,\n  6531,\n  6532,\n  6533,\n  6534,\n  6535,\n  6536,\n  6537,\n  6538,\n  6539,\n  6540,\n  6541,\n  6542,\n  6543,\n  6544,\n  6545,\n  6546,\n  6547,\n  6548,\n  6549,\n  6550,\n  6551,\n  6552,\n  6553,\n  6554,\n  6555,\n  6556,\n  6557,\n  6558,\n  6559,\n  6560,\n  6561,\n  6562,\n  6563,\n  6564,\n  6565,\n  6566,\n  6567,\n  6568,\n  6569,\n  6570,\n  6571,\n  6572,\n  6573,\n  6574,\n  6575,\n  6576,\n  6577,\n  6578,\n  6579,\n  6580,\n  6581,\n  6582,\n  6583,\n  6584,\n  6585,\n  6586,\n  6587,\n  6588,\n  6589,\n  6590,\n  6591,\n  6592,\n  6593,\n  6594,\n  6595,\n  6596,\n  6597,\n  6598,\n  6599,\n  6600,\n  6601,\n  6602,\n  6603,\n  6604,\n  6605,\n  6606,\n  6607,\n  6608,\n  6609,\n  6610,\n  6611,\n  6612,\n  6613,\n  6614,\n  6615,\n  6616,\n  6617,\n  6618,\n  6619,\n  6620,\n  6621,\n  6622,\n  6623,\n  6624,\n  6625,\n  6626,\n  6627,\n  6628,\n  6629,\n  6630,\n  6631,\n  6632,\n  6633,\n  6634,\n  6635,\n  6636,\n  6637,\n  6638,\n  6639,\n  6640,\n  6641,\n  6642,\n  6643,\n  6644,\n  6645,\n  6646,\n  6647,\n  6648,\n  6649,\n  6650,\n  6651,\n  6652,\n  6653,\n  6654,\n  6655,\n  6656,\n  6657,\n  6658,\n  6659,\n  6660,\n  6661,\n  6662,\n  6663,\n  6664,\n  6665,\n  6666,\n  6667,\n  6668,\n  6669,\n  6670,\n  6671,\n  6672,\n  6673,\n  6674,\n  6675,\n  6676,\n  6677,\n  6678,\n  6679,\n  6680,\n  6681,\n  6682,\n  6683,\n  6684,\n  6685,\n  6686,\n  6687,\n  6688,\n  6689,\n  6690,\n  6691,\n  6692,\n  6693,\n  6694,\n  6695,\n  6696,\n  6697,\n  6698,\n  6699,\n  6700,\n  6701,\n  6702,\n  6703,\n  6704,\n  6705,\n  6706,\n  6707,\n  6708,\n  6709,\n  6710,\n  6711,\n  6712,\n  6713,\n  6714,\n  6715,\n  6716,\n  6717,\n  6718,\n  6719,\n  6720,\n  6721,\n  6722,\n  6723,\n  6724,\n  6725,\n  6726,\n  6727,\n  6728,\n  6729,\n  6730,\n  6731,\n  6732,\n  6733,\n  6734,\n  6735,\n  6736,\n  6737,\n  6738,\n  6739,\n  6740,\n  6741,\n  6742,\n  6743,\n  6744,\n  6745,\n  6746,\n  6747,\n  6748,\n  6749,\n  6750,\n  6751,\n  6752,\n  6753,\n  6754,\n  6755,\n  6756,\n  6757,\n  6758,\n  6759,\n  6760,\n  6761,\n  6762,\n  6763,\n  6764,\n  6765,\n  6766,\n  6767,\n  6768,\n  6769,\n  6770,\n  6771,\n  6772,\n  6773,\n  6774,\n  6775,\n  6776,\n  6777,\n  6778,\n  6779,\n  6780,\n  6781,\n  6782,\n  6783,\n  6784,\n  6785,\n  6786,\n  6787,\n  6788,\n  6789,\n  6790,\n  6791,\n  6792,\n  6793,\n  6794,\n  6795,\n  6796,\n  6797,\n  6798,\n  6799,\n  6800,\n  6801,\n  6802,\n  6803,\n  6804,\n  6805,\n  6806,\n  6807,\n  6808,\n  6809,\n  6810,\n  6811,\n  6812,\n  6813,\n  6814,\n  6815,\n  6816,\n  6817,\n  6818,\n  6819,\n  6820,\n  6821,\n  6822,\n  6823,\n  6824,\n  6825,\n  6826,\n  6827,\n  6828,\n  6829,\n  6830,\n  6831,\n  6832,\n  6833,\n  6834,\n  6835,\n  6836,\n  6837,\n  6838,\n  6839,\n  6840,\n  6841,\n  6842,\n  6843,\n  6844,\n  6845,\n  6846,\n  6847,\n  6848,\n  6849,\n  6850,\n  6851,\n  6852,\n  6853,\n  6854,\n  6855,\n  6856,\n  6857,\n  6858,\n  6859,\n  6860,\n  6861,\n  6862,\n  6863,\n  6864,\n  6865,\n  6866,\n  6867,\n  6868,\n  6869,\n  6870,\n  6871,\n  6872,\n  6873,\n  6874,\n  6875,\n  6876,\n  6877,\n  6878,\n  6879,\n  6880,\n  6881,\n  6882,\n  6883,\n  6884,\n  6885,\n  6886,\n  6887,\n  6888,\n  6889,\n  6890,\n  6891,\n  6892,\n  6893,\n  6894,\n  6895,\n  6896,\n  6897,\n  6898,\n  6899,\n  6900,\n  6901,\n  6902,\n  6903,\n  6904,\n  6905,\n  6906,\n  6907,\n  6908,\n  6909,\n  6910,\n  6911,\n  6912,\n  6913,\n  6914,\n  6915,\n  6916,\n  6917,\n  6918,\n  6919,\n  6920,\n  6921,\n  6922,\n  6923,\n  6924,\n  6925,\n  6926,\n  6927,\n  6928,\n  6929,\n  6930,\n  6931,\n  6932,\n  6933,\n  6934,\n  6935,\n  6936,\n  6937,\n  6938,\n  6939,\n  6940,\n  6941,\n  6942,\n  6943,\n  6944,\n  6945,\n  6946,\n  6947,\n  6948,\n  6949,\n  6950,\n  6951,\n  6952,\n  6953,\n  6954,\n  6955,\n  6956,\n  6957,\n  6958,\n  6959,\n  6960,\n  6961,\n  6962,\n  6963,\n  6964,\n  6965,\n  6966,\n  6967,\n  6968,\n  6969,\n  6970,\n  6971,\n  6972,\n  6973,\n  6974,\n  6975,\n  6976,\n  6977,\n  6978,\n  6979,\n  6980,\n  6981,\n  6982,\n  6983,\n  6984,\n  6985,\n  6986,\n  6987,\n  6988,\n  6989,\n  6990,\n  6991,\n  6992,\n  6993,\n  6994,\n  6995,\n  6996,\n  6997,\n  6998,\n  6999,\n  7000,\n  7001,\n  7002,\n  7003,\n  7004,\n  7005,\n  7006,\n  7007,\n  7008,\n  7009,\n  7010,\n  7011,\n  7012,\n  7013,\n  7014,\n  7015,\n  7016,\n  7017,\n  7018,\n  7019,\n  7020,\n  7021,\n  7022,\n  7023,\n  7024,\n  7025,\n  7026,\n  7027,\n  7028,\n  7029,\n  7030,\n  7031,\n  7032,\n  7033,\n  7034,\n  7035,\n  7036,\n  7037,\n  7038,\n  7039,\n  7040,\n  7041,\n  7042,\n  7043,\n  7044,\n  7045,\n  7046,\n  7047,\n  7048,\n  7049,\n  7050,\n  7051,\n  7052,\n  7053,\n  7054,\n  7055,\n  7056,\n  7057,\n  7058,\n  7059,\n  7060,\n  7061,\n  7062,\n  7063,\n  7064,\n  7065,\n  7066,\n  7067,\n  7068,\n  7069,\n  7070,\n  7071,\n  7072,\n  7073,\n  7074,\n  7075,\n  7076,\n  7077,\n  7078,\n  7079,\n  7080,\n  7081,\n  7082,\n  7083,\n  7084,\n  7085,\n  7086,\n  7087,\n  7088,\n  7089,\n  7090,\n  7091,\n  7092,\n  7093,\n  7094,\n  7095,\n  7096,\n  7097,\n  7098,\n  7099,\n  7100,\n  7101,\n  7102,\n  7103,\n  7104,\n  7105,\n  7106,\n  7107,\n  7108,\n  7109,\n  7110,\n  7111,\n  7112,\n  7113,\n  7114,\n  7115,\n  7116,\n  7117,\n  7118,\n  7119,\n  7120,\n  7121,\n  7122,\n  7123,\n  7124,\n  7125,\n  7126,\n  7127,\n  7128,\n  7129,\n  7130,\n  7131,\n  7132,\n  7133,\n  7134,\n  7135,\n  7136,\n  7137,\n  7138,\n  7139,\n  7140,\n  7141,\n  7142,\n  7143,\n  7144,\n  7145,\n  7146,\n  7147,\n  7148,\n  7149,\n  7150,\n  7151,\n  7152,\n  7153,\n  7154,\n  7155,\n  7156,\n  7157,\n  7158,\n  7159,\n  7160,\n  7161,\n  7162,\n  7163,\n  7164,\n  7165,\n  7166,\n  7167,\n  7168,\n  7169,\n  7170,\n  7171,\n  7172,\n  7173,\n  7174,\n  7175,\n  7176,\n  7177,\n  7178,\n  7179,\n  7180,\n  7181,\n  7182,\n  7183,\n  7184,\n  7185,\n  7186,\n  7187,\n  7188,\n  7189,\n  7190,\n  7191,\n  7192,\n  7193,\n  7194,\n  7195,\n  7196,\n  7197,\n  7198,\n  7199,\n  7200,\n  7201,\n  7202,\n  7203,\n  7204,\n  7205,\n  7206,\n  7207,\n  7208,\n  7209,\n  7210,\n  7211,\n  7212,\n  7213,\n  7214,\n  7215,\n  7216,\n  7217,\n  7218,\n  7219,\n  7220,\n  7221,\n  7222,\n  7223,\n  7224,\n  7225,\n  7226,\n  7227,\n  7228,\n  7229,\n  7230,\n  7231,\n  7232,\n  7233,\n  7234,\n  7235,\n  7236,\n  7237,\n  7238,\n  7239,\n  7240,\n  7241,\n  7242,\n  7243,\n  7244,\n  7245,\n  7246,\n  7247,\n  7248,\n  7249,\n  7250,\n  7251,\n  7252,\n  7253,\n  7254,\n  7255,\n  7256,\n  7257,\n  7258,\n  7259,\n  7260,\n  7261,\n  7262,\n  7263,\n  7264,\n  7265,\n  7266,\n  7267,\n  7268,\n  7269,\n  7270,\n  7271,\n  7272,\n  7273,\n  7274,\n  7275,\n  7276,\n  7277,\n  7278,\n  7279,\n  7280,\n  7281,\n  7282,\n  7283,\n  7284,\n  7285,\n  7286,\n  7287,\n  7288,\n  7289,\n  7290,\n  7291,\n  7292,\n  7293,\n  7294,\n  7295,\n  7296,\n  7297,\n  7298,\n  7299,\n  7300,\n  7301,\n  7302,\n  7303,\n  7304,\n  7305,\n  7306,\n  7307,\n  7308,\n  7309,\n  7310,\n  7311,\n  7312,\n  7313,\n  7314,\n  7315,\n  7316,\n  7317,\n  7318,\n  7319,\n  7320,\n  7321,\n  7322,\n  7323,\n  7324,\n  7325,\n  7326,\n  7327,\n  7328,\n  7329,\n  7330,\n  7331,\n  7332,\n  7333,\n  7334,\n  7335,\n  7336,\n  7337,\n  7338,\n  7339,\n  7340,\n  7341,\n  7342,\n  7343,\n  7344,\n  7345,\n  7346,\n  7347,\n  7348,\n  7349,\n  7350,\n  7351,\n  7352,\n  7353,\n  7354,\n  7355,\n  7356,\n  7357,\n  7358,\n  7359,\n  7360,\n  7361,\n  7362,\n  7363,\n  7364,\n  7365,\n  7366,\n  7367,\n  7368,\n  7369,\n  7370,\n  7371,\n  7372,\n  7373,\n  7374,\n  7375,\n  7376,\n  7377,\n  7378,\n  7379,\n  7380,\n  7381,\n  7382,\n  7383,\n  7384,\n  7385,\n  7386,\n  7387,\n  7388,\n  7389,\n  7390,\n  7391,\n  7392,\n  7393,\n  7394,\n  7395,\n  7396,\n  7397,\n  7398,\n  7399,\n  7400,\n  7401,\n  7402,\n  7403,\n  7404,\n  7405,\n  7406,\n  7407,\n  7408,\n  7409,\n  7410,\n  7411,\n  7412,\n  7413,\n  7414,\n  7415,\n  7416,\n  7417,\n  7418,\n  7419,\n  7420,\n  7421,\n  7422,\n  7423,\n  7424,\n  7425,\n  7426,\n  7427,\n  7428,\n  7429,\n  7430,\n  7431,\n  7432,\n  7433,\n  7434,\n  7435,\n  7436,\n  7437,\n  7438,\n  7439,\n  7440,\n  7441,\n  7442,\n  7443,\n  7444,\n  7445,\n  7446,\n  7447,\n  7448,\n  7449,\n  7450,\n  7451,\n  7452,\n  7453,\n  7454,\n  7455,\n  7456,\n  7457,\n  7458,\n  7459,\n  7460,\n  7461,\n  7462,\n  7463,\n  7464,\n  7465,\n  7466,\n  7467,\n  7468,\n  7469,\n  7470,\n  7471,\n  7472,\n  7473,\n  7474,\n  7475,\n  7476,\n  7477,\n  7478,\n  7479,\n  7480,\n  7481,\n  7482,\n  7483,\n  7484,\n  7485,\n  7486,\n  7487,\n  7488,\n  7489,\n  7490,\n  7491,\n  7492,\n  7493,\n  7494,\n  7495,\n  7496,\n  7497,\n  7498,\n  7499,\n  7500,\n  7501,\n  7502,\n  7503,\n  7504,\n  7505,\n  7506,\n  7507,\n  7508,\n  7509,\n  7510,\n  7511,\n  7512,\n  7513,\n  7514,\n  7515,\n  7516,\n  7517,\n  7518,\n  7519,\n  7520,\n  7521,\n  7522,\n  7523,\n  7524,\n  7525,\n  7526,\n  7527,\n  7528,\n  7529,\n  7530,\n  7531,\n  7532,\n  7533,\n  7534,\n  7535,\n  7536,\n  7537,\n  7538,\n  7539,\n  7540,\n  7541,\n  7542,\n  7543,\n  7544,\n  7545,\n  7546,\n  7547,\n  7548,\n  7549,\n  7550,\n  7551,\n  7552,\n  7553,\n  7554,\n  7555,\n  7556,\n  7557,\n  7558,\n  7559,\n  7560,\n  7561,\n  7562,\n  7563,\n  7564,\n  7565,\n  7566,\n  7567,\n  7568,\n  7569,\n  7570,\n  7571,\n  7572,\n  7573,\n  7574,\n  7575,\n  7576,\n  7577,\n  7578,\n  7579,\n  7580,\n  7581,\n  7582,\n  7583,\n  7584,\n  7585,\n  7586,\n  7587,\n  7588,\n  7589,\n  7590,\n  7591,\n  7592,\n  7593,\n  7594,\n  7595,\n  7596,\n  7597,\n  7598,\n  7599,\n  7600,\n  7601,\n  7602,\n  7603,\n  7604,\n  7605,\n  7606,\n  7607,\n  7608,\n  7609,\n  7610,\n  7611,\n  7612,\n  7613,\n  7614,\n  7615,\n  7616,\n  7617,\n  7618,\n  7619,\n  7620,\n  7621,\n  7622,\n  7623,\n  7624,\n  7625,\n  7626,\n  7627,\n  7628,\n  7629,\n  7630,\n  7631,\n  7632,\n  7633,\n  7634,\n  7635,\n  7636,\n  7637,\n  7638,\n  7639,\n  7640,\n  7641,\n  7642,\n  7643,\n  7644,\n  7645,\n  7646,\n  7647,\n  7648,\n  7649,\n  7650,\n  7651,\n  7652,\n  7653,\n  7654,\n  7655,\n  7656,\n  7657,\n  7658,\n  7659,\n  7660,\n  7661,\n  7662,\n  7663,\n  7664,\n  7665,\n  7666,\n  7667,\n  7668,\n  7669,\n  7670,\n  7671,\n  7672,\n  7673,\n  7674,\n  7675,\n  7676,\n  7677,\n  7678,\n  7679,\n  7680,\n  7681,\n  7682,\n  7683,\n  7684,\n  7685,\n  7686,\n  7687,\n  7688,\n  7689,\n  7690,\n  7691,\n  7692,\n  7693,\n  7694,\n  7695,\n  7696,\n  7697,\n  7698,\n  7699,\n  7700,\n  7701,\n  7702,\n  7703,\n  7704,\n  7705,\n  7706,\n  7707,\n  7708,\n  7709,\n  7710,\n  7711,\n  7712,\n  7713,\n  7714,\n  7715,\n  7716,\n  7717,\n  7718,\n  7719,\n  7720,\n  7721,\n  7722,\n  7723,\n  7724,\n  7725,\n  7726,\n  7727,\n  7728,\n  7729,\n  7730,\n  7731,\n  7732,\n  7733,\n  7734,\n  7735,\n  7736,\n  7737,\n  7738,\n  7739,\n  7740,\n  7741,\n  7742,\n  7743,\n  7744,\n  7745,\n  7746,\n  7747,\n  7748,\n  7749,\n  7750,\n  7751,\n  7752,\n  7753,\n  7754,\n  7755,\n  7756,\n  7757,\n  7758,\n  7759,\n  7760,\n  7761,\n  7762,\n  7763,\n  7764,\n  7765,\n  7766,\n  7767,\n  7768,\n  7769,\n  7770,\n  7771,\n  7772,\n  7773,\n  7774,\n  7775,\n  7776,\n  7777,\n  7778,\n  7779,\n  7780,\n  7781,\n  7782,\n  7783,\n  7784,\n  7785,\n  7786,\n  7787,\n  7788,\n  7789,\n  7790,\n  7791,\n  7792,\n  7793,\n  7794,\n  7795,\n  7796,\n  7797,\n  7798,\n  7799,\n  7800,\n  7801,\n  7802,\n  7803,\n  7804,\n  7805,\n  7806,\n  7807,\n  7808,\n  7809,\n  7810,\n  7811,\n  7812,\n  7813,\n  7814,\n  7815,\n  7816,\n  7817,\n  7818,\n  7819,\n  7820,\n  7821,\n  7822,\n  7823,\n  7824,\n  7825,\n  7826,\n  7827,\n  7828,\n  7829,\n  7830,\n  7831,\n  7832,\n  7833,\n  7834,\n  7835,\n  7836,\n  7837,\n  7838,\n  7839,\n  7840,\n  7841,\n  7842,\n  7843,\n  7844,\n  7845,\n  7846,\n  7847,\n  7848,\n  7849,\n  7850,\n  7851,\n  7852,\n  7853,\n  7854,\n  7855,\n  7856,\n  7857,\n  7858,\n  7859,\n  7860,\n  7861,\n  7862,\n  7863,\n  7864,\n  7865,\n  7866,\n  7867,\n  7868,\n  7869,\n  7870,\n  7871,\n  7872,\n  7873,\n  7874,\n  7875,\n  7876,\n  7877,\n  7878,\n  7879,\n  7880,\n  7881,\n  7882,\n  7883,\n  7884,\n  7885,\n  7886,\n  7887,\n  7888,\n  7889,\n  7890,\n  7891,\n  7892,\n  7893,\n  7894,\n  7895,\n  7896,\n  7897,\n  7898,\n  7899,\n  7900,\n  7901,\n  7902,\n  7903,\n  7904,\n  7905,\n  7906,\n  7907,\n  7908,\n  7909,\n  7910,\n  7911,\n  7912,\n  7913,\n  7914,\n  7915,\n  7916,\n  7917,\n  7918,\n  7919,\n  7920,\n  7921,\n  7922,\n  7923,\n  7924,\n  7925,\n  7926,\n  7927,\n  7928,\n  7929,\n  7930,\n  7931,\n  7932,\n  7933,\n  7934,\n  7935,\n  7936,\n  7937,\n  7938,\n  7939,\n  7940,\n  7941,\n  7942,\n  7943,\n  7944,\n  7945,\n  7946,\n  7947,\n  7948,\n  7949,\n  7950,\n  7951,\n  7952,\n  7953,\n  7954,\n  7955,\n  7956,\n  7957,\n  7958,\n  7959,\n  7960,\n  7961,\n  7962,\n  7963,\n  7964,\n  7965,\n  7966,\n  7967,\n  7968,\n  7969,\n  7970,\n  7971,\n  7972,\n  7973,\n  7974,\n  7975,\n  7976,\n  7977,\n  7978,\n  7979,\n  7980,\n  7981,\n  7982,\n  7983,\n  7984,\n  7985,\n  7986,\n  7987,\n  7988,\n  7989,\n  7990,\n  7991,\n  7992,\n  7993,\n  7994,\n  7995,\n  7996,\n  7997,\n  7998,\n  7999,\n  8000,\n  8001,\n  8002,\n  8003,\n  8004,\n  8005,\n  8006,\n  8007,\n  8008,\n  8009,\n  8010,\n  8011,\n  8012,\n  8013,\n  8014,\n  8015,\n  8016,\n  8017,\n  8018,\n  8019,\n  8020,\n  8021,\n  8022,\n  8023,\n  8024,\n  8025,\n  8026,\n  8027,\n  8028,\n  8029,\n  8030,\n  8031,\n  8032,\n  8033,\n  8034,\n  8035,\n  8036,\n  8037,\n  8038,\n  8039,\n  8040,\n  8041,\n  8042,\n  8043,\n  8044,\n  8045,\n  8046,\n  8047,\n  8048,\n  8049,\n  8050,\n  8051,\n  8052,\n  8053,\n  8054,\n  8055,\n  8056,\n  8057,\n  8058,\n  8059,\n  8060,\n  8061,\n  8062,\n  8063,\n  8064,\n  8065,\n  8066,\n  8067,\n  8068,\n  8069,\n  8070,\n  8071,\n  8072,\n  8073,\n  8074,\n  8075,\n  8076,\n  8077,\n  8078,\n  8079,\n  8080,\n  8081,\n  8082,\n  8083,\n  8084,\n  8085,\n  8086,\n  8087,\n  8088,\n  8089,\n  8090,\n  8091,\n  8092,\n  8093,\n  8094,\n  8095,\n  8096,\n  8097,\n  8098,\n  8099,\n  8100,\n  8101,\n  8102,\n  8103,\n  8104,\n  8105,\n  8106,\n  8107,\n  8108,\n  8109,\n  8110,\n  8111,\n  8112,\n  8113,\n  8114,\n  8115,\n  8116,\n  8117,\n  8118,\n  8119,\n  8120,\n  8121,\n  8122,\n  8123,\n  8124,\n  8125,\n  8126,\n  8127,\n  8128,\n  8129,\n  8130,\n  8131,\n  8132,\n  8133,\n  8134,\n  8135,\n  8136,\n  8137,\n  8138,\n  8139,\n  8140,\n  8141,\n  8142,\n  8143,\n  8144,\n  8145,\n  8146,\n  8147,\n  8148,\n  8149,\n  8150,\n  8151,\n  8152,\n  8153,\n  8154,\n  8155,\n  8156,\n  8157,\n  8158,\n  8159,\n  8160,\n  8161,\n  8162,\n  8163,\n  8164,\n  8165,\n  8166,\n  8167,\n  8168,\n  8169,\n  8170,\n  8171,\n  8172,\n  8173,\n  8174,\n  8175,\n  8176,\n  8177,\n  8178,\n  8179,\n  8180,\n  8181,\n  8182,\n  8183,\n  8184,\n  8185,\n  8186,\n  8187,\n  8188,\n  8189,\n  8190,\n  8191,\n  8192,\n  8193,\n  8194,\n  8195,\n  8196,\n  8197,\n  8198,\n  8199,\n  8200,\n  8201,\n  8202,\n  8203,\n  8204,\n  8205,\n  8206,\n  8207,\n  8208,\n  8209,\n  8210,\n  8211,\n  8212,\n  8213,\n  8214,\n  8215,\n  8216,\n  8217,\n  8218,\n  8219,\n  8220,\n  8221,\n  8222,\n  8223,\n  8224,\n  8225,\n  8226,\n  8227,\n  8228,\n  8229,\n  8230,\n  8231,\n  8232,\n  8233,\n  8234,\n  8235,\n  8236,\n  8237,\n  8238,\n  8239,\n  8240,\n  8241,\n  8242,\n  8243,\n  8244,\n  8245,\n  8246,\n  8247,\n  8248,\n  8249,\n  8250,\n  8251,\n  8252,\n  8253,\n  8254,\n  8255,\n  8256,\n  8257,\n  8258,\n  8259,\n  8260,\n  8261,\n  8262,\n  8263,\n  8264,\n  8265,\n  8266,\n  8267,\n  8268,\n  8269,\n  8270,\n  8271,\n  8272,\n  8273,\n  8274,\n  8275,\n  8276,\n  8277,\n  8278,\n  8279,\n  8280,\n  8281,\n  8282,\n  8283,\n  8284,\n  8285,\n  8286,\n  8287,\n  8288,\n  8289,\n  8290,\n  8291,\n  8292,\n  8293,\n  8294,\n  8295,\n  8296,\n  8297,\n  8298,\n  8299,\n  8300,\n  8301,\n  8302,\n  8303,\n  8304,\n  8305,\n  8306,\n  8307,\n  8308,\n  8309,\n  8310,\n  8311,\n  8312,\n  8313,\n  8314,\n  8315,\n  8316,\n  8317,\n  8318,\n  8319,\n  8320,\n  8321,\n  8322,\n  8323,\n  8324,\n  8325,\n  8326,\n  8327,\n  8328,\n  8329,\n  8330,\n  8331,\n  8332,\n  8333,\n  8334,\n  8335,\n  8336,\n  8337,\n  8338,\n  8339,\n  8340,\n  8341,\n  8342,\n  8343,\n  8344,\n  8345,\n  8346,\n  8347,\n  8348,\n  8349,\n  8350,\n  8351,\n  8352,\n  8353,\n  8354,\n  8355,\n  8356,\n  8357,\n  8358,\n  8359,\n  8360,\n  8361,\n  8362,\n  8363,\n  8364,\n  8365,\n  8366,\n  8367,\n  8368,\n  8369,\n  8370,\n  8371,\n  8372,\n  8373,\n  8374,\n  8375,\n  8376,\n  8377,\n  8378,\n  8379,\n  8380,\n  8381,\n  8382,\n  8383,\n  8384,\n  8385,\n  8386,\n  8387,\n  8388,\n  8389,\n  8390,\n  8391,\n  8392,\n  8393,\n  8394,\n  8395,\n  8396,\n  8397,\n  8398,\n  8399,\n  8400,\n  8401,\n  8402,\n  8403,\n  8404,\n  8405,\n  8406,\n  8407,\n  8408,\n  8409,\n  8410,\n  8411,\n  8412,\n  8413,\n  8414,\n  8415,\n  8416,\n  8417,\n  8418,\n  8419,\n  8420,\n  8421,\n  8422,\n  8423,\n  8424,\n  8425,\n  8426,\n  8427,\n  8428,\n  8429,\n  8430,\n  8431,\n  8432,\n  8433,\n  8434,\n  8435,\n  8436,\n  8437,\n  8438,\n  8439,\n  8440,\n  8441,\n  8442,\n  8443,\n  8444,\n  8445,\n  8446,\n  8447,\n  8448,\n  8449,\n  8450,\n  8451,\n  8452,\n  8453,\n  8454,\n  8455,\n  8456,\n  8457,\n  8458,\n  8459,\n  8460,\n  8461,\n  8462,\n  8463,\n  8464,\n  8465,\n  8466,\n  8467,\n  8468,\n  8469,\n  8470,\n  8471,\n  8472,\n  8473,\n  8474,\n  8475,\n  8476,\n  8477,\n  8478,\n  8479,\n  8480,\n  8481,\n  8482,\n  8483,\n  8484,\n  8485,\n  8486,\n  8487,\n  8488,\n  8489,\n  8490,\n  8491,\n  8492,\n  8493,\n  8494,\n  8495,\n  8496,\n  8497,\n  8498,\n  8499,\n  8500,\n  8501,\n  8502,\n  8503,\n  8504,\n  8505,\n  8506,\n  8507,\n  8508,\n  8509,\n  8510,\n  8511,\n  8512,\n  8513,\n  8514,\n  8515,\n  8516,\n  8517,\n  8518,\n  8519,\n  8520,\n  8521,\n  8522,\n  8523,\n  8524,\n  8525,\n  8526,\n  8527,\n  8528,\n  8529,\n  8530,\n  8531,\n  8532,\n  8533,\n  8534,\n  8535,\n  8536,\n  8537,\n  8538,\n  8539,\n  8540,\n  8541,\n  8542,\n  8543,\n  8544,\n  8545,\n  8546,\n  8547,\n  8548,\n  8549,\n  8550,\n  8551,\n  8552,\n  8553,\n  8554,\n  8555,\n  8556,\n  8557,\n  8558,\n  8559,\n  8560,\n  8561,\n  8562,\n  8563,\n  8564,\n  8565,\n  8566,\n  8567,\n  8568,\n  8569,\n  8570,\n  8571,\n  8572,\n  8573,\n  8574,\n  8575,\n  8576,\n  8577,\n  8578,\n  8579,\n  8580,\n  8581,\n  8582,\n  8583,\n  8584,\n  8585,\n  8586,\n  8587,\n  8588,\n  8589,\n  8590,\n  8591,\n  8592,\n  8593,\n  8594,\n  8595,\n  8596,\n  8597,\n  8598,\n  8599,\n  8600,\n  8601,\n  8602,\n  8603,\n  8604,\n  8605,\n  8606,\n  8607,\n  8608,\n  8609,\n  8610,\n  8611,\n  8612,\n  8613,\n  8614,\n  8615,\n  8616,\n  8617,\n  8618,\n  8619,\n  8620,\n  8621,\n  8622,\n  8623,\n  8624,\n  8625,\n  8626,\n  8627,\n  8628,\n  8629,\n  8630,\n  8631,\n  8632,\n  8633,\n  8634,\n  8635,\n  8636,\n  8637,\n  8638,\n  8639,\n  8640,\n  8641,\n  8642,\n  8643,\n  8644,\n  8645,\n  8646,\n  8647,\n  8648,\n  8649,\n  8650,\n  8651,\n  8652,\n  8653,\n  8654,\n  8655,\n  8656,\n  8657,\n  8658,\n  8659,\n  8660,\n  8661,\n  8662,\n  8663,\n  8664,\n  8665,\n  8666,\n  8667,\n  8668,\n  8669,\n  8670,\n  8671,\n  8672,\n  8673,\n  8674,\n  8675,\n  8676,\n  8677,\n  8678,\n  8679,\n  8680,\n  8681,\n  8682,\n  8683,\n  8684,\n  8685,\n  8686,\n  8687,\n  8688,\n  8689,\n  8690,\n  8691,\n  8692,\n  8693,\n  8694,\n  8695,\n  8696,\n  8697,\n  8698,\n  8699,\n  8700,\n  8701,\n  8702,\n  8703,\n  8704,\n  8705,\n  8706,\n  8707,\n  8708,\n  8709,\n  8710,\n  8711,\n  8712,\n  8713,\n  8714,\n  8715,\n  8716,\n  8717,\n  8718,\n  8719,\n  8720,\n  8721,\n  8722,\n  8723,\n  8724,\n  8725,\n  8726,\n  8727,\n  8728,\n  8729,\n  8730,\n  8731,\n  8732,\n  8733,\n  8734,\n  8735,\n  8736,\n  8737,\n  8738,\n  8739,\n  8740,\n  8741,\n  8742,\n  8743,\n  8744,\n  8745,\n  8746,\n  8747,\n  8748,\n  8749,\n  8750,\n  8751,\n  8752,\n  8753,\n  8754,\n  8755,\n  8756,\n  8757,\n  8758,\n  8759,\n  8760,\n  8761,\n  8762,\n  8763,\n  8764,\n  8765,\n  8766,\n  8767,\n  8768,\n  8769,\n  8770,\n  8771,\n  8772,\n  8773,\n  8774,\n  8775,\n  8776,\n  8777,\n  8778,\n  8779,\n  8780,\n  8781,\n  8782,\n  8783,\n  8784,\n  8785,\n  8786,\n  8787,\n  8788,\n  8789,\n  8790,\n  8791,\n  8792,\n  8793,\n  8794,\n  8795,\n  8796,\n  8797,\n  8798,\n  8799,\n  8800,\n  8801,\n  8802,\n  8803,\n  8804,\n  8805,\n  8806,\n  8807,\n  8808,\n  8809,\n  8810,\n  8811,\n  8812,\n  8813,\n  8814,\n  8815,\n  8816,\n  8817,\n  8818,\n  8819,\n  8820,\n  8821,\n  8822,\n  8823,\n  8824,\n  8825,\n  8826,\n  8827,\n  8828,\n  8829,\n  8830,\n  8831,\n  8832,\n  8833,\n  8834,\n  8835,\n  8836,\n  8837,\n  8838,\n  8839,\n  8840,\n  8841,\n  8842,\n  8843,\n  8844,\n  8845,\n  8846,\n  8847,\n  8848,\n  8849,\n  8850,\n  8851,\n  8852,\n  8853,\n  8854,\n  8855,\n  8856,\n  8857,\n  8858,\n  8859,\n  8860,\n  8861,\n  8862,\n  8863,\n  8864,\n  8865,\n  8866,\n  8867,\n  8868,\n  8869,\n  8870,\n  8871,\n  8872,\n  8873,\n  8874,\n  8875,\n  8876,\n  8877,\n  8878,\n  8879,\n  8880,\n  8881,\n  8882,\n  8883,\n  8884,\n  8885,\n  8886,\n  8887,\n  8888,\n  8889,\n  8890,\n  8891,\n  8892,\n  8893,\n  8894,\n  8895,\n  8896,\n  8897,\n  8898,\n  8899,\n  8900,\n  8901,\n  8902,\n  8903,\n  8904,\n  8905,\n  8906,\n  8907,\n  8908,\n  8909,\n  8910,\n  8911,\n  8912,\n  8913,\n  8914,\n  8915,\n  8916,\n  8917,\n  8918,\n  8919,\n  8920,\n  8921,\n  8922,\n  8923,\n  8924,\n  8925,\n  8926,\n  8927,\n  8928,\n  8929,\n  8930,\n  8931,\n  8932,\n  8933,\n  8934,\n  8935,\n  8936,\n  8937,\n  8938,\n  8939,\n  8940,\n  8941,\n  8942,\n  8943,\n  8944,\n  8945,\n  8946,\n  8947,\n  8948,\n  8949,\n  8950,\n  8951,\n  8952,\n  8953,\n  8954,\n  8955,\n  8956,\n  8957,\n  8958,\n  8959,\n  8960,\n  8961,\n  8962,\n  8963,\n  8964,\n  8965,\n  8966,\n  8967,\n  8968,\n  8969,\n  8970,\n  8971,\n  8972,\n  8973,\n  8974,\n  8975,\n  8976,\n  8977,\n  8978,\n  8979,\n  8980,\n  8981,\n  8982,\n  8983,\n  8984,\n  8985,\n  8986,\n  8987,\n  8988,\n  8989,\n  8990,\n  8991,\n  8992,\n  8993,\n  8994,\n  8995,\n  8996,\n  8997,\n  8998,\n  8999,\n  9000,\n  9001,\n  9002,\n  9003,\n  9004,\n  9005,\n  9006,\n  9007,\n  9008,\n  9009,\n  9010,\n  9011,\n  9012,\n  9013,\n  9014,\n  9015,\n  9016,\n  9017,\n  9018,\n  9019,\n  9020,\n  9021,\n  9022,\n  9023,\n  9024,\n  9025,\n  9026,\n  9027,\n  9028,\n  9029,\n  9030,\n  9031,\n  9032,\n  9033,\n  9034,\n  9035,\n  9036,\n  9037,\n  9038,\n  9039,\n  9040,\n  9041,\n  9042,\n  9043,\n  9044,\n  9045,\n  9046,\n  9047,\n  9048,\n  9049,\n  9050,\n  9051,\n  9052,\n  9053,\n  9054,\n  9055,\n  9056,\n  9057,\n  9058,\n  9059,\n  9060,\n  9061,\n  9062,\n  9063,\n  9064,\n  9065,\n  9066,\n  9067,\n  9068,\n  9069,\n  9070,\n  9071,\n  9072,\n  9073,\n  9074,\n  9075,\n  9076,\n  9077,\n  9078,\n  9079,\n  9080,\n  9081,\n  9082,\n  9083,\n  9084,\n  9085,\n  9086,\n  9087,\n  9088,\n  9089,\n  9090,\n  9091,\n  9092,\n  9093,\n  9094,\n  9095,\n  9096,\n  9097,\n  9098,\n  9099,\n  9100,\n  9101,\n  9102,\n  9103,\n  9104,\n  9105,\n  9106,\n  9107,\n  9108,\n  9109,\n  9110,\n  9111,\n  9112,\n  9113,\n  9114,\n  9115,\n  9116,\n  9117,\n  9118,\n  9119,\n  9120,\n  9121,\n  9122,\n  9123,\n  9124,\n  9125,\n  9126,\n  9127,\n  9128,\n  9129,\n  9130,\n  9131,\n  9132,\n  9133,\n  9134,\n  9135,\n  9136,\n  9137,\n  9138,\n  9139,\n  9140,\n  9141,\n  9142,\n  9143,\n  9144,\n  9145,\n  9146,\n  9147,\n  9148,\n  9149,\n  9150,\n  9151,\n  9152,\n  9153,\n  9154,\n  9155,\n  9156,\n  9157,\n  9158,\n  9159,\n  9160,\n  9161,\n  9162,\n  9163,\n  9164,\n  9165,\n  9166,\n  9167,\n  9168,\n  9169,\n  9170,\n  9171,\n  9172,\n  9173,\n  9174,\n  9175,\n  9176,\n  9177,\n  9178,\n  9179,\n  9180,\n  9181,\n  9182,\n  9183,\n  9184,\n  9185,\n  9186,\n  9187,\n  9188,\n  9189,\n  9190,\n  9191,\n  9192,\n  9193,\n  9194,\n  9195,\n  9196,\n  9197,\n  9198,\n  9199,\n  9200,\n  9201,\n  9202,\n  9203,\n  9204,\n  9205,\n  9206,\n  9207,\n  9208,\n  9209,\n  9210,\n  9211,\n  9212,\n  9213,\n  9214,\n  9215,\n  9216,\n  9217,\n  9218,\n  9219,\n  9220,\n  9221,\n  9222,\n  9223,\n  9224,\n  9225,\n  9226,\n  9227,\n  9228,\n  9229,\n  9230,\n  9231,\n  9232,\n  9233,\n  9234,\n  9235,\n  9236,\n  9237,\n  9238,\n  9239,\n  9240,\n  9241,\n  9242,\n  9243,\n  9244,\n  9245,\n  9246,\n  9247,\n  9248,\n  9249,\n  9250,\n  9251,\n  9252,\n  9253,\n  9254,\n  9255,\n  9256\n]\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/medium-list.json",
    "content": "[ 1,\n  2,\n  3,\n  4,\n  5,\n  6,\n  7,\n  8,\n  9,\n  10,\n  11,\n  12,\n  13,\n  14,\n  15,\n  16,\n  17,\n  18,\n  19,\n  20,\n  21,\n  22,\n  23,\n  24,\n  25,\n  26,\n  27,\n  28,\n  29,\n  30,\n  31,\n  32,\n  33,\n  34,\n  35,\n  36,\n  37,\n  38,\n  39,\n  40,\n  41,\n  42,\n  43,\n  44,\n  45,\n  46,\n  47,\n  48,\n  49,\n  50,\n  51,\n  52,\n  53,\n  54,\n  55,\n  56,\n  57,\n  58,\n  59,\n  60,\n  61,\n  62,\n  63,\n  64,\n  65,\n  66,\n  67,\n  68,\n  69,\n  70,\n  71,\n  72,\n  73,\n  74,\n  75,\n  76,\n  77,\n  78,\n  79,\n  80,\n  81,\n  82,\n  83,\n  84,\n  85,\n  86,\n  87,\n  88,\n  89,\n  90,\n  91,\n  92,\n  93,\n  94,\n  95,\n  96,\n  97,\n  98,\n  99,\n  100\n]\n"
  },
  {
    "path": "test-suite/executable-benchmark/serial-interop/test.sh",
    "content": "#!/usr/bin/env bash\n\nset -e\n\nmorloc make foo.loc\n\n# warmup\nhyperfine \\\n  -w 10 \\\n  -L test pTenBaseline,rTenBaseline,cTenBaseline \\\n  \"./nexus {test}\"\n\nhyperfine \\\n  -w 5 \\\n  -L test cZeroBaseline,pZeroBaseline,pZeroFromForeign,pZeroToForeign,rZeroBaseline,rZeroFromForeign,rZeroToForeign,cTenBaseline,pTenBaseline,pTenFromForeign,pTenToForeign,rTenBaseline,rTenFromForeign,rTenToForeign,rMarginalCost1,rMarginalCost2,rMarginalCost3,rMarginalCost4,pMarginalCost1,pMarginalCost2,pMarginalCost3,pMarginalCost4 \\\n  --export-markdown stats.markdown \\\n  --export-csv stats.csv \\\n  \"./nexus {test}\"\n\nhyperfine -w 5 -L test memtest \"./nexus {test} medium-list.json\"\n\nhyperfine -w 5 -L test mapManyPCP,mapManyPCR \"./nexus {test} 5 long-list.json\"\n"
  },
  {
    "path": "test-suite/golden-tests/.gitignore",
    "content": "obs.txt\n0-*\n*.out\n000*\n__pycache__/\npool-rust\ngood-*\nlog\n*.gdb\n.Rhistory\n.RData\n*.err\nmain\nnexus\n"
  },
  {
    "path": "test-suite/golden-tests/README.md",
    "content": "Many of these tests are created in sets of `*-forms-<id>`, e.g., serial-form-8.\nThese sets are intended to enumerate all possible combinations (or all to a\ncertain depth). How the combinations are enumerated is scrawled in my\nReMarkable tablet somewhere, but I'll try to also copy them here.\n\n# `serial-form-*`\n\nThese are all combinations to depth 2 of simple serial forms, record serial\nforms, and constructed serial forms.\n\n 1. (S) simple - a data type that maps immediately to JSON. Example: `[(Str, Int)]`\n\n 2. (C) constructed - a parameterized data type where the parameters do not\n    fully describe the constructor inputs and a constructor must be provided by\n    the programmer. Example: `Map a b`\n\n 3. (R) record - a data type with parameters that are fully described\n    (corresponds to \"data\" constructors in Haskell). These can automatically be\n    serialized/deserialized using existing constructors in the target language\n    and record accessors. Example: `data Person a = Person {age :: Int, info : a}`\n\nThe combinations are:\n\n 1.  S - (Str,Int,Bool)\n 2.  C - Map Str Int\n 3.  R - Person Str\n 4.  S(S) - `[(Str,Int)]`\n 5.  S(C) - `[Map Str Int]`\n 6.  S(R) - `[Person Str]\n 7.  C(S) - `Map Str Int`\n 8.  C(C) - `Map Str (Map Str Int)`\n 9.  C(R) - `Map Str (Person Str)`\n 10. R(S) - `Person Str`\n 11. R(C) - `Person (Map Str Int)` \n 12. R(R) - `Person (Person Str)`\n\nThere are infinitely more complex forms, but all of these can be systematically\nreduced/expanded in (de)serialization. So long as these base cases work,\neverything should be awesome. I might add a 10th case for valid recursive\nstructures (trees) and maybe an 11th really deep structure just for good feels.\n\n# Experimental tests\n\nThe `x-` tests cover unimplemented features with syntactic support. They are\njust tests of the parser.\nimport pybase (map, add)\nexport foo\n\n# Eta reduction\n\n```\nfoo = map (add 1.0)\n```\n\n 1. Synthesize map type |- (a -> b) -> [a] -> [b]\n 2. (type args map) == 2\n 3. (expr args map) == 1\n 4. So add an expr arg:\n       `map (add 1.0)  -->  `\\xs -> map (add 1.0) xs`\n 5. And do the same for add\n       `\\xs -> map (\\x -> add 1.0 x) xs`\n\n\nIf an expression has too many arguments:\n\n```\nbar x = add (mul 2 x)\nfoo y = bar 4 y \n```\n\nThis is not really an eta-reduction problem, but a currying problem. The type of\nbar is `Real -> (Real -> Real)`. This should be equivalent to `Real -> Real ->\nReal`, but due to the how functions are encoded the number of expected arguments\nis set to 1, rather than 2. I need to normalize the function definitions.\nBetter, I should rewrite them to naturally curry and avoid all this non-sense\nfrom the start.\n\n# (un)packers\n\nPacking and unpacking functions are special in morloc. The programmer does not\nexplicitly evoke them, rather they are used as needed in the compiled code. So\nwhere should they be defined and how should they be imported? Types matter,\nalready we import type aliases and they are required for general type\ninference. So packers should be imported explicitly as types. If they are not\nimported, then an informative error should be raised. I had previously thought\nof making them silently available everywhere, but that would cause hidden state.\n\nA function is tagged as a (un)packer in the type signature:\n\n```\npackMap :: pack => [(a,b)] -> Map\nunpackMap :: unpack => Map -> [(a,b)]\n```\n\nTo this point, I've been treating packers as existing in their own\nuniverses. But why must this be the case? They should just be normal morloc\nfunctions. And yet they are special. It is necessary that all input/output pairs\nhave compatible serialization forms. But this can be typechecked. Also, it is\nnot necessary that all serial forms for all types are the same across the\nprogram. This may be useful though for the morloc environment; if a given type\nalways has the same serialization form serialized data can be more easily passed\nbetween systems. But this is not something morloc can enforce, rather this is a\ndecision the community must make. So the best morloc can do is typecheck the\npackers.\n\nPerhaps the best way to handle these packers is to make them a typeclass. Of\ncourse, I'll need to add support for general typeclasses first, and that is a\nlarge job. If packing functions are written just like any other typeclass, then\nall my current language-specific serialization code could be be extracted from\nthe compiler. Instead, they could be standard morloc modules. And then you would\nsimply install them as you would install any other morloc modules. The same\ncould go for a fair bit of code generation, in fact. Such as hooks in the\nmanifolds, caching functions, etc.\n\nBut I should leave this for later. First I need to fix the current packing\nparadigm and write the goddamn paper. Adding typeclasses can be the next\nstep. It will probably be a 6 month rewrite. One thing I should guarantee is\nthat code using the packable types should not change when I convert to\ntypeclasses. So the following will be backward compatible:\n\n```\nimport map (Map)\n\nfoo :: Map a b -> b\nfoo = ...\n```\n\nOnly the modules that actually define `Map` will need to change. Usually, I\nshould have one module for each of these types. They should encapsulate\neverything that needs to be known about the type. Maybe. Actually, it might make\nsense to have one general module defining a type, then many language-specific\nmodules that import the general module and add the language-specific instance,\nand then maybe one base module that imports many language-specific modules. This\nis what I am currently doing with `conventions`, `pybase`/`cppbase`/`rbase`, and\n`base`.\n\nThe next paper, then would add typeclasses, replace (un)packers with a Packable\ntypeclass, and add other manifold functions such as debuggers (at various\nplaces) and cachers, diagnostics etc. That is, in the next paper, I would loop\nback to the original ideas of morloc. Also I'd extend the prelude library and\nreach a rough useable version.\n\n# Data packet tests\n\nThe purpose of these tests are to ensure that data is passed correctly between\nthe nexus and the pool and between pools at each of the different size\ncategories.\n\nCurrently, the nexus can read data as either raw JSON or files of JSON. These\nare passed as MESG and FILE packets, respectively. MESG packets are limited\n(currently) in size to 2^16 characters. This reduces the amount of memory stored\npotentially in multiple pools. When pools transmit data, either to each other or\nback to the nexus, they pack data into MESG packets if they are small or FILE\npackets if they are large.\n\nAnother consideration is buffer size. Data is transferred over the sockets in\nbuffers of (currently) 4096 characters. Packets larger than this need to be\nstreamed. This streaming needs to be tested across all languages.\n\nSo there are three size partitions. 0-4096, 4097-65536, and 65537+. Special\nsizes of data are 0, 1, 4096, 4097, 65536, 65537.\n\nFor CALL packets, multiple DATA packets may be stored after the header. I need\nto test cases where the sum of packets passes size partition.\n\nFrom a pool's perspective, a call directly from the nexus is no different from a\ncall from another pool. However, a return is different from a foreign call. So\nwe need to ensure that every language is tested for foreign calls. \n\nSo the dimensions for the nexus-to-pool call are:\n\n 1. file/mesg nexus input [file,mesg]\n 2. size [0,1,4096,4097,65536, 65537]\n 3. number of arguments [0,1,n]\n\nAnd the dimensions for the pool-to-pool call are\n\n 1. size [0,1,4096,4097,65536, 65537]\n 2. number of arguments [0,1,n]\n 3. direction [receive, foreign call, return]\n\n\nnexus file -> pool file -> pool file -> nexus file\n\nnexus mesg -> pool mesg -> pool mesg -> nexus mesg\n\nnexus () -> pool () -> nexus ()\n"
  },
  {
    "path": "test-suite/golden-tests/alias-array-monoid/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testArrayMonoidPy ---\" > obs.txt\n\t./nexus testArrayMonoidPy >> obs.txt 2>> obs.err\n\techo \"--- testArrayMonoidCpp ---\" >> obs.txt\n\t./nexus testArrayMonoidCpp >> obs.txt 2>> obs.err\n\techo \"--- testArrayMonoidR ---\" >> obs.txt\n\t./nexus testArrayMonoidR >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/alias-array-monoid/exp.txt",
    "content": "--- testArrayMonoidPy ---\ntrue\n--- testArrayMonoidCpp ---\ntrue\n--- testArrayMonoidR ---\ntrue\n"
  },
  {
    "path": "test-suite/golden-tests/alias-array-monoid/main.loc",
    "content": "-- Test that Array has Semigroup and Monoid instances.\n-- Array is a List alias, same as Deque, so it should support the same\n-- Semigroup/Monoid operations. Types are resolved via annotations at\n-- call sites rather than type-casting wrapper functions.\n\nmodule main (testArrayMonoidPy, testArrayMonoidCpp, testArrayMonoidR)\n\nimport root-py\nimport root-cpp\nimport root-r\n\ntestArrayMonoidPy :: Bool\ntestArrayMonoidPy =\n  let t1 = (([1,2] :: Array Int) <> [3,4]) == [1,2,3,4]\n      t2 = (mempty :: Array Int) == []\n      t3 = concat ([[1,2],[3],[4,5]] :: Array (Array Int)) == [1,2,3,4,5]\n  in t1 && t2 && t3\n\ntestArrayMonoidCpp :: Bool\ntestArrayMonoidCpp =\n  let t1 = (([1,2] :: Array Int) <> [3,4]) == [1,2,3,4]\n      t2 = (mempty :: Array Int) == []\n      t3 = concat ([[1,2],[3],[4,5]] :: Array (Array Int)) == [1,2,3,4,5]\n  in t1 && t2 && t3\n\ntestArrayMonoidR :: Bool\ntestArrayMonoidR =\n  let t1 = (([1,2] :: Array Int) <> [3,4]) == [1,2,3,4]\n      t2 = (mempty :: Array Int) == []\n      t3 = concat ([[1,2],[3],[4,5]] :: Array (Array Int)) == [1,2,3,4,5]\n  in t1 && t2 && t3\n"
  },
  {
    "path": "test-suite/golden-tests/alias-concrete-bugs/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- bug1_nested_alias_py ---\" > obs.txt\n\t./nexus bug1_nested_alias_py >> obs.txt 2>> obs.err\n\techo \"--- bug1_nested_alias_cpp ---\" >> obs.txt\n\t./nexus bug1_nested_alias_cpp >> obs.txt 2>> obs.err\n\techo \"--- bug2_int64_eq_cpp ---\" >> obs.txt\n\t./nexus bug2_int64_eq_cpp >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/alias-concrete-bugs/exp.txt",
    "content": "--- bug1_nested_alias_py ---\ntrue\n--- bug1_nested_alias_cpp ---\ntrue\n--- bug2_int64_eq_cpp ---\ntrue\n"
  },
  {
    "path": "test-suite/golden-tests/alias-concrete-bugs/main.loc",
    "content": "-- Minimal reproducers for two concrete-type bugs with type aliases.\n--\n-- BUG 1: Nested containers with mixed concrete types.\n-- [Deque Int] = List (Deque Int) = std::vector<std::deque<int>> in C++.\n-- The code generator produces brace initialization {n1, n2} where n1/n2\n-- are std::vector<int> (inner list literal default), not std::deque<int>.\n-- C++ cannot implicitly convert vector to deque in an initializer list.\n--\n-- BUG 2: Integer alias literal type mismatch.\n-- Int64 = Int at the general level, but int64_t vs int in C++.\n-- When comparing `mul64 x y == 20000`, the literal 20000 is Int (int)\n-- but the LHS is Int64 (int64_t). morloc_eq<A>(A, A) fails template\n-- deduction because int64_t != int.\n\nmodule main (bug1_nested_alias_py, bug1_nested_alias_cpp, bug2_int64_eq_cpp)\n\nimport root-py\nimport root-cpp\n\n-- BUG 1: Minimal repro - nested container with alias inner type\nnestedDeque :: [Deque Int] -> [Deque Int]\nnestedDeque = map (map (\\x -> x + 1))\n\nbug1_nested_alias_py :: Bool\nbug1_nested_alias_py = nestedDeque [[1,2],[3]] == [[2,3],[4]]\n\nbug1_nested_alias_cpp :: Bool\nbug1_nested_alias_cpp = nestedDeque [[1,2],[3]] == [[2,3],[4]]\n\n-- BUG 2: Minimal repro - Int64 compared to Int literal\nmul64 :: Int64 -> Int64 -> Int64\nmul64 x y = x * y\n\nbug2_int64_eq_cpp :: Bool\nbug2_int64_eq_cpp = mul64 3 4 == 12\n"
  },
  {
    "path": "test-suite/golden-tests/alias-constructor-equiv/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testConcatPy ---\" > obs.txt\n\t./nexus testConcatPy >> obs.txt 2>> obs.err\n\techo \"--- testConcatCpp ---\" >> obs.txt\n\t./nexus testConcatCpp >> obs.txt 2>> obs.err\n\techo \"--- testConcatR ---\" >> obs.txt\n\t./nexus testConcatR >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/alias-constructor-equiv/exp.txt",
    "content": "--- testConcatPy ---\ntrue\n--- testConcatCpp ---\ntrue\n--- testConcatR ---\ntrue\n"
  },
  {
    "path": "test-suite/golden-tests/alias-constructor-equiv/main.loc",
    "content": "-- Test alias constructor equivalence for Monoid/Semigroup/Foldable interactions.\n--\n-- This tests the fix for \"Cannot compare types Array and Deque\" which occurred\n-- when resolveInstances solved an existential to one alias name (e.g. Array)\n-- and then tried to check it against another (e.g. Deque). Both are aliases\n-- for List, so the typechecker must recognize them as equivalent constructors.\n--\n-- Concrete alias types (List, Deque, Array) are selected via type annotations\n-- at call sites, letting typeclass resolution pick the correct implementation.\n\nmodule main (testConcatPy, testConcatCpp, testConcatR)\n\nimport root-py\nimport root-cpp\nimport root-r\n\ntestConcatPy :: Bool\ntestConcatPy =\n  let t1 = idpy $ concat ([[1,2],[3],[4,5]] :: List (List Int)) == [1,2,3,4,5]\n      t2 = idpy $ concat ([[1,2],[3],[4,5]] :: Deque (Deque Int)) == [1,2,3,4,5]\n      t3 = idpy $ concat ([[1,2],[3],[4,5]] :: Array (Array Int)) == [1,2,3,4,5]\n      t4 = idpy $ concatMap (\\x -> [x, x * 2]) ([1,2,3] :: [Int]) == [1,2,2,4,3,6]\n      t5 = idpy $ concatMap (\\x -> [x, x * 2]) ([1,2,3] :: Deque Int) == [1,2,2,4,3,6]\n      t6 = idpy $ intercalate [0] [[1,2],[3,4],[5]] == [1,2,0,3,4,0,5]\n      t7 = idpy $ ([1,2] :: [Int]) <> [3,4] == [1,2,3,4]\n      t8 = idpy $ ([1,2] :: Deque Int) <> [3,4] == [1,2,3,4]\n      t9 = idpy $ ([1,2] :: Array Int) <> [3,4] == [1,2,3,4]\n      t10 = idpy $ (mempty :: [Int]) == []\n      t11 = idpy $ (mempty :: Deque Int) == []\n      t12 = idpy $ (mempty :: Array Int) == []\n      t13 = idpy $ fold (+) 0 ([1,2,3,4] :: Deque Int) == 10\n      t14 = idpy $ fold (+) 0 ([1,2,3,4] :: Array Int) == 10\n      t15 = idpy $ map (\\x -> x + 1) ([1,2,3] :: Array Int) == [2,3,4]\n      t16 = idpy $ map (\\x -> x + 1) ([1,2,3] :: Deque Int) == [2,3,4]\n      t17 = idpy $ concatMap (\\x -> [x, x + 1]) ([1,2] :: [Int]) == [1,2,2,3]\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10 && t11 && t12 && t13 && t14 && t15 && t16 && t17\n\ntestConcatCpp :: Bool\ntestConcatCpp =\n  let t1 = idcpp $ concat ([[1,2],[3],[4,5]] :: List (List Int)) == [1,2,3,4,5]\n      t2 = idcpp $ concat ([[1,2],[3],[4,5]] :: Deque (Deque Int)) == [1,2,3,4,5]\n      t3 = idcpp $ concat ([[1,2],[3],[4,5]] :: Array (Array Int)) == [1,2,3,4,5]\n      t4 = idcpp $ concatMap (\\x -> [x, x * 2]) ([1,2,3] :: [Int]) == [1,2,2,4,3,6]\n      t5 = idcpp $ concatMap (\\x -> [x, x * 2]) ([1,2,3] :: Deque Int) == [1,2,2,4,3,6]\n      t6 = idcpp $ intercalate [0] [[1,2],[3,4],[5]] == [1,2,0,3,4,0,5]\n      t7 = idcpp $ ([1,2] :: [Int]) <> [3,4] == [1,2,3,4]\n      t8 = idcpp $ ([1,2] :: Deque Int) <> [3,4] == [1,2,3,4]\n      t9 = idcpp $ ([1,2] :: Array Int) <> [3,4] == [1,2,3,4]\n      t10 = idcpp $ (mempty :: [Int]) == []\n      t11 = idcpp $ (mempty :: Deque Int) == []\n      t12 = idcpp $ (mempty :: Array Int) == []\n      t13 = idcpp $ fold (+) 0 ([1,2,3,4] :: Deque Int) == 10\n      t14 = idcpp $ fold (+) 0 ([1,2,3,4] :: Array Int) == 10\n      t15 = idcpp $ map (\\x -> x + 1) ([1,2,3] :: Array Int) == [2,3,4]\n      t16 = idcpp $ map (\\x -> x + 1) ([1,2,3] :: Deque Int) == [2,3,4]\n      t17 = idcpp $ concatMap (\\x -> [x, x + 1]) ([1,2] :: [Int]) == [1,2,2,3]\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10 && t11 && t12 && t13 && t14 && t15 && t16 && t17\n\ntestConcatR :: Bool\ntestConcatR =\n  let t1 = idpy $ concat ([[1,2],[3],[4,5]] :: List (List Int)) == [1,2,3,4,5]\n      t2 = idpy $ concat ([[1,2],[3],[4,5]] :: Deque (Deque Int)) == [1,2,3,4,5]\n      t3 = idpy $ concat ([[1,2],[3],[4,5]] :: Array (Array Int)) == [1,2,3,4,5]\n      t4 = idpy $ concatMap (\\x -> [x, x * 2]) ([1,2,3] :: [Int]) == [1,2,2,4,3,6]\n      t5 = idpy $ concatMap (\\x -> [x, x * 2]) ([1,2,3] :: Deque Int) == [1,2,2,4,3,6]\n      t6 = idpy $ intercalate [0] [[1,2],[3,4],[5]] == [1,2,0,3,4,0,5]\n      t7 = idpy $ ([1,2] :: [Int]) <> [3,4] == [1,2,3,4]\n      t8 = idpy $ ([1,2] :: Deque Int) <> [3,4] == [1,2,3,4]\n      t9 = idpy $ ([1,2] :: Array Int) <> [3,4] == [1,2,3,4]\n      t10 = idpy $ (mempty :: [Int]) == []\n      t11 = idpy $ (mempty :: Deque Int) == []\n      t12 = idpy $ (mempty :: Array Int) == []\n      t13 = idpy $ fold (+) 0 ([1,2,3,4] :: Deque Int) == 10\n      t14 = idpy $ fold (+) 0 ([1,2,3,4] :: Array Int) == 10\n      t15 = idpy $ map (\\x -> x + 1) ([1,2,3] :: Array Int) == [2,3,4]\n      t16 = idpy $ map (\\x -> x + 1) ([1,2,3] :: Deque Int) == [2,3,4]\n      t17 = idpy $ concatMap (\\x -> [x, x + 1]) ([1,2] :: [Int]) == [1,2,2,3]\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10 && t11 && t12 && t13 && t14 && t15 && t16 && t17\n"
  },
  {
    "path": "test-suite/golden-tests/alias-dedup-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testAliasPy ---\" > obs.txt\n\t./nexus testAliasPy >> obs.txt 2>> obs.err\n\techo \"--- testAliasCpp ---\" >> obs.txt\n\t./nexus testAliasCpp >> obs.txt 2>> obs.err\n\techo \"--- testInt32Cpp ---\" >> obs.txt\n\t./nexus testInt32Cpp >> obs.txt 2>> obs.err\n\techo \"--- testDeepChainPy ---\" >> obs.txt\n\t./nexus testDeepChainPy >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/alias-dedup-1/exp.txt",
    "content": "--- testAliasPy ---\ntrue\n--- testAliasCpp ---\ntrue\n--- testInt32Cpp ---\ntrue\n--- testDeepChainPy ---\n194\n"
  },
  {
    "path": "test-suite/golden-tests/alias-dedup-1/main.loc",
    "content": "-- Test alias deduplication correctness and performance.\n-- Verifies that type aliases (Deque/Array = List, Int32/Int64 = Int)\n-- are handled correctly by the typechecker's instance resolution dedup.\n\nmodule main (testAliasPy, testAliasCpp, testInt32Cpp, testDeepChainPy)\n\nimport root-py\nimport root-cpp\n\n-- ===== EXPLICIT ALIAS ANNOTATIONS =====\n-- Each alias supports different typeclasses:\n--   List:   Functor, Foldable, Indexed, Stack, Queue\n--   Deque:  Functor, Foldable, Stack, Queue\n--   Array:  Functor, Foldable, Indexed\n\n-- Functor (all aliases have it)\nmapDeque :: Deque Int -> Deque Int\nmapDeque xs = map (\\x -> x + 1) xs\n\nmapArray :: Array Int -> Array Int\nmapArray xs = map (\\x -> x + 1) xs\n\n-- Foldable (all aliases have it)\nfoldDeque :: Deque Int -> Int\nfoldDeque xs = fold (\\a x -> a + x) 0 xs\n\nfoldArray :: Array Int -> Int\nfoldArray xs = fold (\\a x -> a + x) 0 xs\n\n-- Indexed (List, Array -- NOT Deque)\nindexList :: [Int] -> Int\nindexList xs = at 0 xs\n\nindexArray :: Array Int -> Int\nindexArray xs = at 0 xs\n\n-- Stack (List, Deque -- NOT Array)\nconsDeque :: Deque Int -> Deque Int\nconsDeque xs = cons 0 xs\n\n-- Queue (List, Deque -- NOT Array)\nsnocDeque :: Deque Int -> Deque Int\nsnocDeque xs = snoc xs 99\n\nsnocList :: [Int] -> [Int]\nsnocList xs = snoc xs 99\n\n-- ===== MULTI-TYPECLASS CHAINS ON SPECIFIC ALIASES =====\n-- Deque: map (Functor) + cons (Stack) + fold (Foldable)\nchainDeque :: Deque Int -> Int\nchainDeque xs =\n  let ys = map (\\x -> x + 1) xs\n      zs = cons 0 ys\n  in fold (\\a x -> a + x) 0 zs\n\n-- List: map (Functor) + snoc (Queue) + at (Indexed)\nchainList :: [Int] -> Int\nchainList xs =\n  let ys = map (\\x -> x + 1) xs\n      zs = snoc ys 100\n  in at 0 zs\n\n-- Array: map (Functor) + at (Indexed) + fold (Foldable)\nchainArray :: Array Int -> Int\nchainArray xs =\n  let ys = map (\\x -> x * 2) xs\n  in fold (\\a x -> a + x) 0 ys\n\n-- ===== INT32/INT64 ARITHMETIC (C++ only) =====\n-- Int32 = Int at the general level, but \"int32_t\" in C++.\nadd32 :: Int32 -> Int32 -> Int32\nadd32 x y = x + y\n\nmul64 :: Int64 -> Int64 -> Int64\nmul64 x y = x * y\n\n-- ===== DEEP CHAIN (performance stress) =====\n-- 8 chained operations. Without alias dedup this would cause\n-- exponential branching in instance resolution.\ndeepChain :: [Int] -> Int\ndeepChain xs =\n  let a = map (\\x -> x + 1) xs\n      b = map (\\x -> x * 2) a\n      c = cons 0 b\n      d = snoc c 100\n      e = map (\\x -> x - 1) d\n      f = filter (\\x -> x > 0) e\n      g = map (\\x -> x + 10) f\n  in fold (\\acc x -> acc + x) 0 g\n\n-- ===== NESTED ALIAS CONTAINERS =====\nnestedAliasMap :: [Deque Int] -> [Deque Int]\nnestedAliasMap = map (map (\\x -> x + 1))\n\n-- ===== Eq on aliases =====\neqDeque :: Bool\neqDeque = mapDeque [1,2,3] == [2,3,4]\n\neqArray :: Bool\neqArray = mapArray [1,2,3] == [2,3,4]\n\n-- ===== TEST RUNNERS =====\n\ntestAliasPy :: Bool\ntestAliasPy =\n  let t1 = mapDeque [1,2,3] == [2,3,4]\n      t2 = mapArray [1,2,3] == [2,3,4]\n      t3 = foldDeque [1,2,3] == 6\n      t4 = foldArray [1,2,3] == 6\n      t5 = indexList [10,20,30] == 10\n      t6 = indexArray [10,20,30] == 10\n      t7 = consDeque [1,2] == [0,1,2]\n      t8 = snocDeque [1,2] == [1,2,99]\n      t9 = snocList [1,2] == [1,2,99]\n      t10 = chainDeque [1,2,3] == 9\n      t11 = chainList [1,2,3] == 2\n      t12 = chainArray [1,2,3] == 12\n      t13 = nestedAliasMap [[1,2],[3]] == [[2,3],[4]]\n      t14 = eqDeque && eqArray\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10 && t11 && t12 && t13 && t14\n\ntestAliasCpp :: Bool\ntestAliasCpp =\n  let t1 = mapDeque [1,2,3] == [2,3,4]\n      t2 = mapArray [1,2,3] == [2,3,4]\n      t3 = foldDeque [1,2,3] == 6\n      t4 = foldArray [1,2,3] == 6\n      t5 = indexList [10,20,30] == 10\n      t6 = indexArray [10,20,30] == 10\n      t7 = consDeque [1,2] == [0,1,2]\n      t8 = snocDeque [1,2] == [1,2,99]\n      t9 = snocList [1,2] == [1,2,99]\n      t10 = chainDeque [1,2,3] == 9\n      t11 = chainList [1,2,3] == 2\n      t12 = chainArray [1,2,3] == 12\n      t13 = nestedAliasMap [[1,2],[3]] == [[2,3],[4]]\n      t14 = eqDeque && eqArray\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10 && t11 && t12 && t13 && t14\n\ntestInt32Cpp :: Bool\ntestInt32Cpp =\n  let t1 = add32 10 20 == 30\n      t2 = mul64 100 200 == 20000\n  in t1 && t2\n\ntestDeepChainPy :: Int\ntestDeepChainPy = deepChain [1,2,3,4,5]\n"
  },
  {
    "path": "test-suite/golden-tests/alias-no-cross-instance/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err && echo \"UNEXPECTED_SUCCESS\" > obs.txt || echo \"build_rejected\" > obs.txt\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/alias-no-cross-instance/exp.txt",
    "content": "build_rejected\n"
  },
  {
    "path": "test-suite/golden-tests/alias-no-cross-instance/main.loc",
    "content": "-- Negative test: aliases do NOT inherit typeclass instances from\n-- the underlying type. Vector has no Stack instance (no cons),\n-- Array has no Queue instance (no snoc), Deque has no Indexed (no at).\n-- Each subcommand tests one such case; all should fail at build time.\n\nmodule main (consVector, snocArray, indexDeque)\n\nimport root-py\n\n-- Vector has Functor, Foldable, Indexed, Queue -- but NOT Stack\nconsVector :: Vector Int -> Vector Int\nconsVector xs = cons 0 xs\n\n-- Array has Functor, Foldable, Indexed -- but NOT Queue\nsnocArray :: Array Int -> Array Int\nsnocArray xs = snoc xs 99\n\n-- Deque has Functor, Foldable, Stack, Queue -- but NOT Indexed\nindexDeque :: Deque Int -> Int\nindexDeque xs = at 0 xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-c/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\n-- full application with constants\n-- can potentially optimize out this manifold\nfoo :: Real -> Real\nfoo x = x + 2.0 * 20.0\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-py/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- full application with constants\n-- can potentially optimize out this manifold\nfoo x = x + 2.0 * 20.0\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-r/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-1-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- full application with constants\n-- can potentially optimize out this manifold\nfoo x = x + 2.0 * 20.0\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-c/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\n-- full application with variable passing\n-- cannot optimize out this manifold\nfoo x = x + 20.0 * x\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-py/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- full application with variable passing\n-- cannot optimize out this manifold\nfoo x = x + 20.0 * x\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-r/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-2-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- full application with variable passing\n-- cannot optimize out this manifold\nfoo x = x + 20.0 * x\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-c/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\ny = 40.0\n\n-- external constant\n-- y will be turned into a manifold, making this the same as the f2 case\n-- can optimize out the manifold\nfoo x = x + y\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-py/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\ny = 40.0\n\n-- external constant\n-- y will be turned into a manifold, making this the same as the f2 case\n-- can optimize out the manifold\nfoo x = x + y\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-r/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-3-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\ny = 40.0\n\n-- external constant\n-- y will be turned into a manifold, making this the same as the f2 case\n-- can optimize out the manifold\nfoo x = x + y\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-c/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\nf = 20.0 * 2.0\n\n-- external 0 input argument\n-- cannot optimize out the manifold\n-- a better example would be a random number generator\nfoo x = x + f\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-py/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nf = 20.0 * 2.0\n\n-- external 0 input argument\n-- cannot optimize out the manifold\n-- a better example would be a random number generator\nfoo x = x + f\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-r/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-4-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\nf = 20.0 * 2.0\n\n-- external 0 input argument\n-- cannot optimize out the manifold\n-- a better example would be a random number generator\nfoo x = x + f\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-c/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\n-- literal function with constant application\nfoo :: [Real] -> [Real]\nfoo xs = map ((*) 2.0) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-py/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- literal function with constant application\nfoo :: [Real] -> [Real]\nfoo xs = map ((*) 2.0) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-r/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-5-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- literal function with constant application\nfoo :: [Real] -> [Real]\nfoo xs = map ((*) 2.0) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] [2,3,4] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-c/exp.txt",
    "content": "[2,6,12]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\n-- literal function with no application\nfoo :: [Real] -> [Real] -> [Real]\nfoo xs ys = zipWith (*) xs ys\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] [2,3,4] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-py/exp.txt",
    "content": "[2,6,12]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- literal function with no application\nfoo :: [Real] -> [Real] -> [Real]\nfoo xs ys = zipWith (*) xs ys\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] [2,3,4] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-r/exp.txt",
    "content": "[2,6,12]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-6-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- literal function with no application\nfoo :: [Real] -> [Real] -> [Real]\nfoo xs ys = zipWith (*) xs ys\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-c/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\n-- literal partially-applied function passed as argument\nfoo :: [Real] -> [Real]\nfoo xs = map ((*) 2.0) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-py/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- literal partially-applied function passed as argument\nfoo :: [Real] -> [Real]\nfoo xs = map ((*) 2.0) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-r/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-7-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- literal partially-applied function passed as argument\nfoo :: [Real] -> [Real]\nfoo xs = map ((*) 2.0) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-c/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\n-- variable partially-applied function passed as argument\nfoo :: Real -> [Real] -> [Real]\nfoo x xs = map ((*) x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-py/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- variable partially-applied function passed as argument\nfoo :: Real -> [Real] -> [Real]\nfoo x xs = map ((*) x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-r/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/argument-form-8-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- variable partially-applied function passed as argument\nfoo :: Real -> [Real] -> [Real]\nfoo x xs = map ((*) x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-cp/exp.txt",
    "content": "249750\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-cp/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-cpp\nimport root-py\n\ntable Stats = Stats {idx :: Int, value :: Real}\ntable Cpp => Stats = \"arrow\"\ntable Py  => Stats = \"arrow\"\n\nsource Cpp from \"src.hpp\" (\"makeLargeTable\", \"makeIndices\", \"sumReals\")\nsource Py from \"src.py\" (\"lookupValue\")\n\nmakeLargeTable :: Int -> Stats\nlookupValue :: Stats -> Int -> Real\nmakeIndices :: Int -> [Int]\nsumReals :: [Real] -> Real\n\nresult ::\n  --' number of rows in table\n  --' arg: --nrows\n  --' default: 100000\n  Int ->\n  --' number of foreign calls\n  --' arg: --ncalls\n  --' default: 1000\n  Int ->\n  Real\nresult nrows ncalls =\n  let tbl = makeLargeTable nrows\n  in sumReals (map (lookupValue tbl) (makeIndices ncalls))\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-cp/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n\n#include \"mlc_arrow.hpp\"\n#include <nanoarrow/nanoarrow.h>\n\nmlc::ArrowTable makeLargeTable(int n) {\n    struct ArrowSchema schema;\n    struct ArrowArray array;\n\n    ArrowSchemaInit(&schema);\n    ArrowSchemaSetTypeStruct(&schema, 2);\n\n    ArrowSchemaSetFormat(schema.children[0], \"i\");\n    ArrowSchemaSetName(schema.children[0], \"idx\");\n\n    ArrowSchemaSetFormat(schema.children[1], \"g\");\n    ArrowSchemaSetName(schema.children[1], \"value\");\n\n    ArrowArrayInitFromSchema(&array, &schema, nullptr);\n    ArrowArrayStartAppending(&array);\n\n    for (int i = 0; i < n; i++) {\n        ArrowArrayAppendInt(array.children[0], i);\n        ArrowArrayAppendDouble(array.children[1], (double)i * 0.5);\n        ArrowArrayFinishElement(&array);\n    }\n\n    ArrowArrayFinishBuildingDefault(&array, nullptr);\n    return mlc::ArrowTable(std::move(schema), std::move(array));\n}\n\nstd::vector<int> makeIndices(int n) {\n    std::vector<int> v(n);\n    for (int i = 0; i < n; i++) v[i] = i;\n    return v;\n}\n\ndouble sumReals(const std::vector<double>& xs) {\n    double s = 0.0;\n    for (double x : xs) s += x;\n    return s;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-cp/src.py",
    "content": "def lookupValue(table, i):\n    return table.column(\"value\")[i].as_py()\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pc/exp.txt",
    "content": "249750\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pc/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-cpp\nimport root-py\n\ntable Stats = Stats {idx :: Int, value :: Real}\ntable Cpp => Stats = \"arrow\"\ntable Py  => Stats = \"arrow\"\n\nsource Py from \"src.py\" (\"makeLargeTable\", \"makeIndices\", \"sumReals\")\nsource Cpp from \"src.hpp\" (\"lookupValue\")\n\nmakeLargeTable :: Int -> Stats\nlookupValue :: Stats -> Int -> Real\nmakeIndices :: Int -> [Int]\nsumReals :: [Real] -> Real\n\nresult ::\n  --' number of rows in table\n  --' arg: --nrows\n  --' default: 100000\n  Int ->\n  --' number of foreign calls\n  --' arg: --ncalls\n  --' default: 1000\n  Int ->\n  Real\nresult nrows ncalls =\n  let tbl = makeLargeTable nrows\n  in sumReals (map (lookupValue tbl) (makeIndices ncalls))\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pc/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n\n#include \"mlc_arrow.hpp\"\n#include <nanoarrow/nanoarrow.h>\n\ndouble lookupValue(const mlc::ArrowTable& table, int i) {\n    struct ArrowArrayView view;\n    ArrowArrayViewInitFromSchema(&view, table.schema(), nullptr);\n    ArrowArrayViewSetArray(&view, table.array(), nullptr);\n\n    double val = ArrowArrayViewGetDoubleUnsafe(view.children[1], (int64_t)i);\n\n    ArrowArrayViewReset(&view);\n    return val;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pc/src.py",
    "content": "import pyarrow as pa\n\ndef makeLargeTable(n):\n    return pa.RecordBatch.from_pydict({\n        \"idx\": list(range(n)),\n        \"value\": [float(i) * 0.5 for i in range(n)]\n    })\n\ndef makeIndices(n):\n    return list(range(n))\n\ndef sumReals(xs):\n    return sum(xs)\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pr/exp.txt",
    "content": "249750\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pr/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-py\nimport root-r (Int, Real)\n\ntable Stats = Stats {idx :: Int, value :: Real}\ntable Py => Stats = \"arrow\"\ntable R  => Stats = \"arrow\"\n\nsource Py from \"src.py\" (\"makeLargeTable\", \"makeIndices\", \"sumReals\")\nsource R from \"src.R\" (\"lookupValue\")\n\nmakeLargeTable :: Int -> Stats\nlookupValue :: Stats -> Int -> Real\nmakeIndices :: Int -> [Int]\nsumReals :: [Real] -> Real\n\nresult ::\n  --' number of rows in table\n  --' arg: --nrows\n  --' default: 100000\n  Int ->\n  --' number of foreign calls\n  --' arg: --ncalls\n  --' default: 1000\n  Int ->\n  Real\nresult nrows ncalls =\n  let tbl = makeLargeTable nrows\n  in sumReals (map (lookupValue tbl) (makeIndices ncalls))\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pr/src.R",
    "content": "lookupValue <- function(table, i) {\n  as.double(as.vector(table$value)[i + 1L])\n}\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-pr/src.py",
    "content": "import pyarrow as pa\n\ndef makeLargeTable(n):\n    return pa.RecordBatch.from_pydict({\n        \"idx\": list(range(n)),\n        \"value\": [float(i) * 0.5 for i in range(n)]\n    })\n\ndef makeIndices(n):\n    return list(range(n))\n\ndef sumReals(xs):\n    return sum(xs)\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-rp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-rp/exp.txt",
    "content": "249750\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-rp/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-py\nimport root-r (Int, Real)\n\ntable Stats = Stats {idx :: Int, value :: Real}\ntable Py => Stats = \"arrow\"\ntable R  => Stats = \"arrow\"\n\nsource R from \"src.R\" (\"makeLargeTable\", \"makeIndices\", \"sumReals\")\nsource Py from \"src.py\" (\"lookupValue\")\n\nmakeLargeTable :: Int -> Stats\nlookupValue :: Stats -> Int -> Real\nmakeIndices :: Int -> [Int]\nsumReals :: [Real] -> Real\n\nresult ::\n  --' number of rows in table\n  --' arg: --nrows\n  --' default: 100000\n  Int ->\n  --' number of foreign calls\n  --' arg: --ncalls\n  --' default: 1000\n  Int ->\n  Real\nresult nrows ncalls =\n  let tbl = makeLargeTable nrows\n  in sumReals (map (lookupValue tbl) (makeIndices ncalls))\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-rp/src.R",
    "content": "makeLargeTable <- function(n) {\n  arrow::record_batch(\n    idx = 0L:(n - 1L),\n    value = as.double(0:(n - 1)) * 0.5\n  )\n}\n\nmakeIndices <- function(n) {\n  as.integer(0:(n - 1))\n}\n\nsumReals <- function(xs) {\n  sum(xs)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-immutable-rp/src.py",
    "content": "def lookupValue(table, i):\n    return table.column(\"value\")[i].as_py()\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus makePeople > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-cpp/exp.txt",
    "content": "[{\"name\":\"Alice\",\"age\":30},{\"name\":\"Bob\",\"age\":25}]\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-cpp/main.loc",
    "content": "module main (makePeople)\n\nimport root\nimport root-cpp\n\ntable Person = Person {name :: Str, age :: Int}\ntable Cpp => Person = \"arrow\"\n\nsource Cpp from \"src.hpp\" (\"makePeople\")\n\nmakePeople :: Person\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-cpp/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n\n#include \"mlc_arrow.hpp\"\n#include <nanoarrow/nanoarrow.h>\n\nmlc::ArrowTable makePeople() {\n    struct ArrowSchema schema;\n    struct ArrowArray array;\n\n    ArrowSchemaInit(&schema);\n    ArrowSchemaSetTypeStruct(&schema, 2);\n\n    ArrowSchemaSetFormat(schema.children[0], \"u\");\n    ArrowSchemaSetName(schema.children[0], \"name\");\n\n    ArrowSchemaSetFormat(schema.children[1], \"i\");\n    ArrowSchemaSetName(schema.children[1], \"age\");\n\n    ArrowArrayInitFromSchema(&array, &schema, nullptr);\n    ArrowArrayStartAppending(&array);\n\n    ArrowArrayAppendString(array.children[0], ArrowCharView(\"Alice\"));\n    ArrowArrayAppendInt(array.children[1], 30);\n    ArrowArrayFinishElement(&array);\n\n    ArrowArrayAppendString(array.children[0], ArrowCharView(\"Bob\"));\n    ArrowArrayAppendInt(array.children[1], 25);\n    ArrowArrayFinishElement(&array);\n\n    ArrowArrayFinishBuildingDefault(&array, nullptr);\n\n    return mlc::ArrowTable(std::move(schema), std::move(array));\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus makePeople > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-py/exp.txt",
    "content": "[{\"name\":\"Alice\",\"age\":30},{\"name\":\"Bob\",\"age\":25}]\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-py/main.loc",
    "content": "module main (makePeople)\n\nimport root\nimport root-py\n\ntable Person = Person {name :: Str, age :: Int}\ntable Py => Person = \"arrow\"\n\nsource Py from \"src.py\" (\"makePeople\")\n\nmakePeople :: Person\n"
  },
  {
    "path": "test-suite/golden-tests/arrow-nexus-py/src.py",
    "content": "import pyarrow as pa\n\ndef makePeople():\n    return pa.RecordBatch.from_pydict({\"name\": [\"Alice\", \"Bob\"], \"age\": [30, 25]})\n"
  },
  {
    "path": "test-suite/golden-tests/bare-selector-args/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus starlingTest '[1,2]' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/bare-selector-args/exp.txt",
    "content": "1\n"
  },
  {
    "path": "test-suite/golden-tests/bare-selector-args/main.loc",
    "content": "module test (starlingTest)\n\nimport root\nimport root-py\n\nstarlingTest :: (Int, Int) -> Int\nstarlingTest = starling const .0 .1\n"
  },
  {
    "path": "test-suite/golden-tests/bare-selector-chain/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus getNestedFirst '[[1,2],3]' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/bare-selector-chain/exp.txt",
    "content": "1\n"
  },
  {
    "path": "test-suite/golden-tests/bare-selector-chain/main.loc",
    "content": "module test (getNestedFirst)\n\nimport root-py\n\ngetNestedFirst :: ((Int, Int), Int) -> Int\ngetNestedFirst = .0.0\n"
  },
  {
    "path": "test-suite/golden-tests/bug-intrinsic-schema-crash/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus schemaStable 42 > obs.txt 2> obs.err\n\t./nexus typeofStable 42 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n"
  },
  {
    "path": "test-suite/golden-tests/bug-intrinsic-schema-crash/exp.txt",
    "content": "true\ntrue\n"
  },
  {
    "path": "test-suite/golden-tests/bug-intrinsic-schema-crash/helper.py",
    "content": "def identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/bug-intrinsic-schema-crash/main.loc",
    "content": "-- BUG-006: @schema and @typeof crash the compiler\n--\n-- In v0.74.0, using @schema or @typeof causes a compiler crash at codegen:\n--     morloc: Runtime intrinsic @IntrSchema reached code generation without schema\n--     CallStack (from HasCallStack):\n--       error, called at library/Morloc/CodeGenerator/Grammars/Translator/Imperative.hs:513:3\n--\n-- Root cause: Serialize.hs:293-313 (intrinsicSchema) has explicit cases for\n-- IntrHash/IntrSave/IntrSaveM/IntrSaveJ/IntrShow/IntrLoad/IntrRead but is\n-- MISSING cases for IntrSchema and IntrTypeof. They fall through to the\n-- catch-all that returns Nothing, and Imperative.hs:512-513 then errors\n-- because the intrinsic reached lowering without a computed schema.\n--\n-- This test asserts: @schema and @typeof compile without crashing AND\n-- produce deterministic output (the same schema/typeof string for the same\n-- type across multiple calls).\nmodule main (schemaStable, typeofStable)\n\nimport root-py\n\nsource Py from \"helper.py\" (\"identity\")\nidentity :: a -> a\n\n-- @schema of the same type should be identical across calls\nschemaStable :: Int -> Bool\nschemaStable x = (@schema (identity x)) == (@schema (identity (identity x)))\n\n-- @typeof of the same type should be identical across calls\ntypeofStable :: Int -> Bool\ntypeofStable x = (@typeof (identity x)) == (@typeof (identity (identity x)))\n"
  },
  {
    "path": "test-suite/golden-tests/bug-load-type-infer/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus roundTrip 42 '\"/tmp/bug-load-type-infer.bin\"' > obs.txt 2> obs.err\n\trm -f /tmp/bug-load-type-infer.bin\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n\trm -f /tmp/bug-load-type-infer.bin\n"
  },
  {
    "path": "test-suite/golden-tests/bug-load-type-infer/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/bug-load-type-infer/helper.py",
    "content": "def identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/bug-load-type-infer/main.loc",
    "content": "-- BUG-003: @load type inference fails in do-blocks\n--\n-- In v0.74.0, a @save/@load round-trip in a do-block fails to typecheck:\n--     Type mismatch:\n--       expected: ?Int\n--       inferred: <IO> ?<a>\n--     Subtype error: Type mismatch fall through\n--       <IO> ?<a> <: ?Int\n--\n-- Root cause: @load is assigned a fresh type variable in\n-- Typecheck.hs:625-627 (intrinsicTypeG IntrLoad returns <IO> ?a), but the\n-- do-block handler does not propagate the enclosing function's declared\n-- return type inward to constrain that fresh variable.\n--\n-- This test asserts: roundTrip 42 path writes 42 to the file and loads it\n-- back, returning 42.\nmodule main (roundTrip)\n\nimport root-py\n\nsource Py from \"helper.py\" (\"identity\")\nidentity :: a -> a\n\nroundTrip :: Int -> Str -> <IO> ?Int\nroundTrip x path = do\n  @save (identity x) path\n  x <- @load path\n  x\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus addTwo 5 > obs.txt 2> obs.err\n\t./nexus doubleList '[1,2,3,4]' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-1/exp.txt",
    "content": "7\n[2,4,6,8]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-1/main.loc",
    "content": "module main (addTwo, doubleList)\n\nimport root-py\n\n-- Simple function that adds 2 to a number\naddTwo :: Int -> Int\naddTwo x = x + 2\n\n-- Function that doubles all elements in a list\ndoubleList :: [Int] -> [Int]\ndoubleList xs = map ((*) 2) xs\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus analyze --sample 1 > obs.txt 2> obs.err\n\t./nexus motifs --sample 1 --kmer-length 3 2>> obs.err  >> obs.txt\n\t./nexus align --seq1 1 --seq2 2 2>> obs.err  >> obs.txt\n\t./nexus gcContent 1 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/README.md",
    "content": "# Claude-Test-10: DNA Sequence Analyzer\n\n## Overview\nDemonstrates bioinformatics workflows with Python for sequence I/O, C++ for alignment algorithms, and R for statistical analysis of nucleotide sequences.\n\n## Features Tested\n- **String algorithms**: DNA sequence analysis, alignment, motif finding\n- **Record field access**: `.nucleotides`, `.seqId` accessor syntax\n- **Lists of strings**: K-mer lists\n- **Records with strings**: Sequence, Alignment, KmerProfile records\n- **String validation**: C++ validates DNA sequences\n- **Cross-language biology pipelines**: Python → C++ → R\n\n## Architecture\n\n### Python (`sequence_io.py`)\n- `getSampleSequence(id)`: Retrieve sample DNA sequences\n- `createSequence(id, desc, nucleotides)`: Create Sequence record\n- `formatAlignment(alignment)`: Format alignment results\n\n### C++ (`sequence_align.hpp`)\n- `alignSmithWaterman(seq1, seq2)`: Local sequence alignment\n- `findKmers(seq, k)`: Extract all k-mers of length k\n- `countKmer(seq, kmer)`: Count k-mer occurrences\n- `validateSequence(seq)`: Validate DNA alphabet (ATGC only)\n\n### R (`sequence_stats.R`)\n- `calculateGC(seq)`: Compute GC content percentage\n- `sequenceComplexity(seq)`: Measure sequence complexity (unique k-mers ratio)\n- `analyzeComposition(seq)`: Full composition analysis\n\n## Biological Background\n\n**GC Content:** Percentage of guanine (G) and cytosine (C) bases. Important for:\n- Gene prediction\n- Species classification\n- Thermal stability\n\n**Smith-Waterman Alignment:** Finds optimal local alignment between sequences\n- Match: +2\n- Mismatch: -1\n- Gap: -1\n\n**K-mers:** Substrings of length k used for:\n- Motif discovery\n- Sequence assembly\n- Pattern recognition\n\n## Data Flow\n\n1. **analyzeSequence**: Python gets sequence → R analyzes composition\n2. **findMotifs**: Python gets sequence → C++ finds all k-mers\n3. **alignSequences**: Python gets 2 sequences → C++ aligns them\n4. **gcContent**: Python gets sequence → R calculates GC%\n\n## Sample Sequences\n\n1. **SEQ001**: Sample E. coli sequence (mixed content)\n2. **SEQ002**: Sample human sequence (mixed content)\n3. **SEQ003**: High GC content (all GC)\n4. **SEQ004**: Low complexity (ATAT repeat)\n5. **SEQ005**: Random sequence\n\n## Example Usage\n\n```bash\n# Analyze sequence composition\n./nexus analyze --sample 1\n\n# Find all 3-mers\n./nexus motifs --sample 1 --kmer-length 3\n\n# Align sequences 1 and 2\n./nexus align --seq1 1 --seq2 2\n\n# Calculate GC content\n./nexus gc-content --sample 3\n```\n\n## What This Tests\n\n1. **String-heavy operations** across languages\n2. **Record field accessors**: `.nucleotides`, `.seqId`\n3. **Lists of strings** serialization\n4. **Algorithm implementation**: Smith-Waterman in C++\n5. **String validation**: DNA alphabet checking\n6. **Statistical string analysis** in R\n7. **Domain-specific types**: DNA sequences as strings\n8. **Pattern matching** and motif finding\n\n## Expected Results\n\n- **GC content (SEQ001)**: ~47.5%\n- **GC content (SEQ003)**: 100%\n- **GC content (SEQ004)**: 0%\n- **Alignment SEQ001 vs SEQ002**: High similarity\n- **3-mers in SEQ001**: Multiple overlapping k-mers\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/exp.txt",
    "content": "{\"gcContent\":0.5,\"atContent\":0.5,\"length\":40,\"complexity\":0.342105263157895}\n[\"ATG\",\"TGC\",\"GCG\",\"CGA\",\"GAT\",\"ATC\",\"TCG\",\"CGA\",\"GAT\",\"ATC\",\"TCG\",\"CGA\",\"GAT\",\"ATC\",\"TCG\",\"CGA\",\"GAT\",\"ATC\",\"TCG\",\"CGT\",\"GTA\",\"TAG\",\"AGC\",\"GCT\",\"CTA\",\"TAG\",\"AGC\",\"GCT\",\"CTA\",\"TAG\",\"AGC\",\"GCT\",\"CTA\",\"TAG\",\"AGC\",\"GCT\",\"CTA\",\"TAG\"]\n{\"score\":56,\"seq1\":\"ATGCGATCGATCGATCGATCGTAGCTAGCTAGCTAGCTAG\",\"seq2\":\"ATGCGATCGTAGCTAGCTAGCTGCATGCTAGCTAGCTAG\",\"alignedSeq1\":\"ATGCGATCGATCGATCGATCGTAGCTAGCTAGCTAGCTA\",\"alignedSeq2\":\"ATGCGATCGTAGCTAGCTAGCTGCATGCTAGCTAGCTAG\",\"identity\":0.282051282051282}\n0.5\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/main.loc",
    "content": "-- desc: DNA Sequence Analyzer - Bioinformatics\n-- author: Claude\n--\n-- Demonstrates bioinformatics workflows with Python for I/O,\n-- C++ for sequence alignment, and R for statistical analysis.\nmodule main (analyzeSequence, findMotifs, alignSequences, gcContent)\n\n-- Type mappings\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Py => Real = \"float\"\ntype R => Real = \"double\"\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\n\n-- DNA sequence record\nrecord Sequence where\n  seqId :: Str\n  description :: Str\n  nucleotides :: Str\n  length :: Int\n\nrecord Py => Sequence = \"dict\"\nrecord Cpp => Sequence = \"struct\"\nrecord R => Sequence = \"list\"\n\n-- Alignment result\nrecord Alignment where\n  score :: Real\n  seq1 :: Str\n  seq2 :: Str\n  alignedSeq1 :: Str\n  alignedSeq2 :: Str\n  identity :: Real\n\nrecord Py => Alignment = \"dict\"\nrecord Cpp => Alignment = \"Alignment\"\nrecord R => Alignment = \"list\"\n\n-- K-mer profile\nrecord KmerProfile where\n  kmer :: Str\n  frequency :: Int\n  positions :: [Int]\n\nrecord Py => KmerProfile = \"dict\"\nrecord Cpp => KmerProfile = \"struct\"\nrecord R => KmerProfile = \"list\"\n\n-- Sequence statistics\nrecord SeqStats where\n  gcContent :: Real\n  atContent :: Real\n  length :: Int\n  complexity :: Real\n\nrecord Py => SeqStats = \"dict\"\nrecord Cpp => SeqStats = \"struct\"\nrecord R => SeqStats = \"list\"\n\n-- Python I/O\nsource Py from \"sequence_io.py\" (\"createSequence\", \"getSampleSequence\", \"formatAlignment\")\ncreateSequence :: Str -> Str -> Str -> Sequence\ngetSampleSequence :: Int -> Sequence\nformatAlignment :: Alignment -> Str\n\n-- C++ sequence algorithms\nsource Cpp from \"sequence_align.hpp\" (\"alignSmithWaterman\", \"findKmers\", \"countKmer\", \"validateSequence\")\nalignSmithWaterman :: Str -> Str -> Alignment\nfindKmers :: Str -> Int -> [Str]\ncountKmer :: Str -> Str -> Int\nvalidateSequence :: Str -> Bool\n\n-- R statistics\nsource R from \"sequence_stats.R\" (\"calculateGC\", \"sequenceComplexity\", \"analyzeComposition\")\ncalculateGC :: Str -> Real\nsequenceComplexity :: Str -> Real\nanalyzeComposition :: Str -> SeqStats\n\n--' Analyze a DNA sequence\n--'\n--' Computes GC content and complexity statistics using R.\n--'\n--' name: analyze\nanalyzeSequence ::\n  --' Sample sequence ID (1-5)\n  --' arg: --sample\n  --' metavar: ID\n  --' default: 1\n  Int ->\n  --' return: Sequence statistics\n  SeqStats\nanalyzeSequence sampleId = analyzeComposition (.nucleotides seq) where\n  seq = getSampleSequence sampleId\n\n--' Find motifs (k-mers) in sequence\n--'\n--' Finds all k-mers of specified length in the sequence.\n--'\n--' name: motifs\nfindMotifs ::\n  --' Sample sequence ID\n  --' arg: --sample\n  --' metavar: ID\n  --' default: 1\n  Int ->\n  --' K-mer length\n  --' arg: -k/--kmer-length\n  --' metavar: K\n  --' default: 3\n  Int ->\n  --' return: List of k-mers\n  [Str]\nfindMotifs sampleId k = findKmers (.nucleotides seq) k where\n  seq = getSampleSequence sampleId\n\n--' Align two sequences\n--'\n--' Performs Smith-Waterman local alignment on two sample sequences.\n--'\n--' name: align\nalignSequences ::\n  --' First sequence ID\n  --' arg: --seq1\n  --' metavar: ID1\n  --' default: 1\n  Int ->\n  --' Second sequence ID\n  --' arg: --seq2\n  --' metavar: ID2\n  --' default: 2\n  Int ->\n  --' return: Alignment result\n  Alignment\nalignSequences id1 id2 = alignSmithWaterman (.nucleotides seq1) (.nucleotides seq2) where\n  seq1 = getSampleSequence id1\n  seq2 = getSampleSequence id2\n\n--' Calculate GC content\n--'\n--' Computes the GC content percentage of a sequence.\n--'\n--' name: gcContent\ngcContent ::\n  --' Sample sequence ID\n  --' metavar: ID\n  Int ->\n  --' return: GC content (0-1)\n  Real\ngcContent sampleId = calculateGC (.nucleotides seq) where\n  seq = getSampleSequence sampleId\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/sequence_align.hpp",
    "content": "#include <vector>\n#include <string>\n#include <algorithm>\n#include <cctype>\n\nstruct Alignment {\n    double score;\n    std::string seq1;\n    std::string seq2;\n    std::string alignedSeq1;\n    std::string alignedSeq2;\n    double identity;\n};\n\n// Simple Smith-Waterman local alignment\nAlignment alignSmithWaterman(std::string seq1, std::string seq2) {\n    int m = seq1.length();\n    int n = seq2.length();\n\n    // Scoring scheme\n    const int match = 2;\n    const int mismatch = -1;\n    const int gap = -1;\n\n    // Initialize matrix\n    std::vector<std::vector<int>> H(m + 1, std::vector<int>(n + 1, 0));\n\n    int maxScore = 0;\n    int maxI = 0, maxJ = 0;\n\n    // Fill matrix\n    for (int i = 1; i <= m; i++) {\n        for (int j = 1; j <= n; j++) {\n            int matchScore = (seq1[i-1] == seq2[j-1]) ? match : mismatch;\n            int score = std::max({\n                0,\n                H[i-1][j-1] + matchScore,\n                H[i-1][j] + gap,\n                H[i][j-1] + gap\n            });\n            H[i][j] = score;\n\n            if (score > maxScore) {\n                maxScore = score;\n                maxI = i;\n                maxJ = j;\n            }\n        }\n    }\n\n    // Traceback (simplified - just return simple alignment)\n    std::string aligned1 = seq1.substr(0, std::min(m, n));\n    std::string aligned2 = seq2.substr(0, std::min(m, n));\n\n    // Calculate identity\n    int matches = 0;\n    int len = std::min(aligned1.length(), aligned2.length());\n    for (int i = 0; i < len; i++) {\n        if (aligned1[i] == aligned2[i]) matches++;\n    }\n    double identity = (len > 0) ? static_cast<double>(matches) / len : 0.0;\n\n    Alignment result;\n    result.score = maxScore;\n    result.seq1 = seq1;\n    result.seq2 = seq2;\n    result.alignedSeq1 = aligned1;\n    result.alignedSeq2 = aligned2;\n    result.identity = identity;\n\n    return result;\n}\n\n// Find all k-mers in a sequence\nstd::vector<std::string> findKmers(std::string seq, int k) {\n    std::vector<std::string> kmers;\n\n    for (size_t i = 0; i + k <= seq.length(); i++) {\n        kmers.push_back(seq.substr(i, k));\n    }\n\n    return kmers;\n}\n\n// Count occurrences of a k-mer in sequence\nint countKmer(std::string seq, std::string kmer) {\n    int count = 0;\n    size_t pos = 0;\n\n    while ((pos = seq.find(kmer, pos)) != std::string::npos) {\n        count++;\n        pos++;\n    }\n\n    return count;\n}\n\n// Validate DNA sequence (only A, T, G, C)\nbool validateSequence(std::string seq) {\n    for (char c : seq) {\n        char upper = std::toupper(c);\n        if (upper != 'A' && upper != 'T' && upper != 'G' && upper != 'C') {\n            return false;\n        }\n    }\n    return true;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/sequence_io.py",
    "content": "SAMPLE_SEQUENCES = {\n    1: (\"SEQ001\", \"Sample E. coli sequence\", \"ATGCGATCGATCGATCGATCGTAGCTAGCTAGCTAGCTAG\"),\n    2: (\"SEQ002\", \"Sample human sequence\", \"ATGCGATCGTAGCTAGCTAGCTGCATGCTAGCTAGCTAG\"),\n    3: (\"SEQ003\", \"High GC content\", \"GCGCGCGCGCGCGCGCGCGCGCGCGCGCGCGCGCGC\"),\n    4: (\"SEQ004\", \"Low complexity\", \"ATATATATATATATATATATATATATATATATAT\"),\n    5: (\"SEQ005\", \"Random sequence\", \"ACGTACGTTAGCTAGCTAGCTAGCTACGTACGTACGT\")\n}\n\ndef getSampleSequence(sampleId):\n    \"\"\"Get a sample DNA sequence by ID\"\"\"\n    if sampleId in SAMPLE_SEQUENCES:\n        seqId, desc, nucs = SAMPLE_SEQUENCES[sampleId]\n    else:\n        seqId, desc, nucs = SAMPLE_SEQUENCES[1]\n\n    return {\n        \"seqId\": seqId,\n        \"description\": desc,\n        \"nucleotides\": nucs,\n        \"length\": len(nucs)\n    }\n\ndef createSequence(seqId, description, nucleotides):\n    \"\"\"Create a Sequence record\"\"\"\n    return {\n        \"seqId\": seqId,\n        \"description\": description,\n        \"nucleotides\": nucleotides,\n        \"length\": len(nucleotides)\n    }\n\ndef formatAlignment(alignment):\n    \"\"\"Format alignment result as a string\"\"\"\n    lines = [\n        f\"Alignment Score: {alignment['score']:.2f}\",\n        f\"Identity: {alignment['identity']*100:.1f}%\",\n        \"\",\n        alignment['alignedSeq1'],\n        alignment['alignedSeq2']\n    ]\n    return \"\\n\".join(lines)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-10/sequence_stats.R",
    "content": "calculateGC <- function(seq) {\n  # Convert to uppercase\n  seq <- toupper(seq)\n\n  # Count G and C\n  chars <- strsplit(seq, \"\")[[1]]\n  gc_count <- sum(chars == \"G\" | chars == \"C\")\n  total <- length(chars)\n\n  gc_count / total\n}\n\nsequenceComplexity <- function(seq) {\n  # Simple complexity measure: unique k-mers / total k-mers\n  k <- 3\n  chars <- strsplit(seq, \"\")[[1]]\n\n  if (length(chars) < k) return(1.0)\n\n  kmers <- character(length(chars) - k + 1)\n  for (i in 1:(length(chars) - k + 1)) {\n    kmers[i] <- paste(chars[i:(i+k-1)], collapse=\"\")\n  }\n\n  unique_count <- length(unique(kmers))\n  total_count <- length(kmers)\n\n  unique_count / total_count\n}\n\nanalyzeComposition <- function(seq) {\n  seq <- toupper(seq)\n  chars <- strsplit(seq, \"\")[[1]]\n\n  gc <- sum(chars == \"G\" | chars == \"C\") / length(chars)\n  at <- sum(chars == \"A\" | chars == \"T\") / length(chars)\n\n  list(\n    gcContent = gc,\n    atContent = at,\n    length = length(chars),\n    complexity = sequenceComplexity(seq)\n  )\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-11/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- sysReport ---\" > obs.txt\n\t./nexus sysReport >> obs.txt 2>> obs.err\n\techo \"--- envCal ---\" >> obs.txt\n\t./nexus envCal >> obs.txt 2>> obs.err\n\techo \"--- cpuLoad ---\" >> obs.txt\n\t./nexus cpuLoad >> obs.txt 2>> obs.err\n\techo \"--- multiQuery ---\" >> obs.txt\n\t./nexus multiQuery >> obs.txt 2>> obs.err\n\techo \"--- sharedQuery ---\" >> obs.txt\n\t./nexus sharedQuery >> obs.txt 2>> obs.err\n\techo \"--- pipeline ---\" >> obs.txt\n\t./nexus pipeline >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-11/exp.txt",
    "content": "--- sysReport ---\nCLOCK_QUERY\nPID_QUERY\nUPTIME_QUERY\n\"[Jan-86400] code=524550 pid=1234\"\n--- envCal ---\nPID_QUERY\n\"Oct-2025\"\n--- cpuLoad ---\nCPU_QUERY\nUPTIME_QUERY\n904710\n--- multiQuery ---\nCPU_QUERY\nCPU_QUERY\n476269\n--- sharedQuery ---\nCPU_QUERY\n476269\n--- pipeline ---\nENV_QUERY\nCLOCK_QUERY\nUPTIME_QUERY\nPID_QUERY\n\"[\\/home\\/user] code=904707 pid=1234\"\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-11/main.loc",
    "content": "-- Test effects, do-notation, cross-language composition, and let-sharing\n-- with OS-introspection-style functions returning deterministic fake values.\n-- Each effectful call prints a trace message to stdout for evaluation verification.\n\nmodule main (sysReport, envCal, cpuLoad, multiQuery, sharedQuery, pipeline)\n\nimport root-cpp\nimport root-py\nimport root-r\n\nsource Cpp from \"sysinfo.hpp\" (\"clockResNs\", \"cpuCount\", \"hashStr\")\nsource Py from \"sysinfo.py\" (\"procId\", \"envHome\", \"calMonth\")\nsource R from \"sysinfo.R\" (\"uptimeSec\", \"fmtReport\")\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\n\ntype R => Int = \"integer\"\ntype R => Str = \"character\"\n\nclockResNs :: <IO> Int\ncpuCount :: <IO> Int\nhashStr :: Str -> Int\n\nprocId :: <IO> Int\nenvHome :: <IO> Str\ncalMonth :: Int -> Int -> Str\n\nuptimeSec :: <IO> Int\nfmtReport :: Str -> Int -> Int -> Str\n\n-- Cross-language do-block: C++, Python, R effects; pure functions on results\nsysReport :: <IO> Str\nsysReport = do\n    clk <- clockResNs\n    pid <- procId\n    up <- uptimeSec\n    fmtReport (calMonth up clk) (hashStr \"/home/user\") pid\n\n-- Do-block with effect evaluation as argument to pure function\nenvCal :: <IO> Str\nenvCal = do\n    pid <- procId\n    calMonth 2025 pid\n\n-- Effectful results feeding into pure functions across languages\ncpuLoad :: <IO> Int\ncpuLoad = do\n    c <- cpuCount\n    u <- uptimeSec\n    hashStr (calMonth c u)\n\n-- Two independent evaluations of same effect (no sharing, 2x trace)\nmultiQuery :: <IO> Int\nmultiQuery = do\n    c1 <- cpuCount\n    c2 <- cpuCount\n    hashStr (calMonth c1 c2)\n\n-- Shared binding: evaluates once, result shared\nsharedQuery :: <IO> Int\nsharedQuery = do\n    c <- cpuCount\n    hashStr (calMonth c c)\n\n-- Long cross-language chain: 4-step do-block, 3 languages\npipeline :: <IO> Str\npipeline = do\n    home <- envHome\n    clk <- clockResNs\n    up <- uptimeSec\n    pid <- procId\n    fmtReport home (hashStr (calMonth clk up)) pid\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-11/sysinfo.R",
    "content": "uptimeSec <- function() {\n    cat(\"UPTIME_QUERY\\n\")\n    flush(stdout())\n    86400L\n}\n\nfmtReport <- function(label, code, id) {\n    paste0(\"[\", label, \"] code=\", code, \" pid=\", id)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-11/sysinfo.hpp",
    "content": "#ifndef __SYSINFO_HPP__\n#define __SYSINFO_HPP__\n\n#include <string>\n#include <iostream>\n\nint clockResNs() {\n    std::cout << \"CLOCK_QUERY\" << std::endl;\n    return 1;\n}\n\nint cpuCount() {\n    std::cout << \"CPU_QUERY\" << std::endl;\n    return 4;\n}\n\nint hashStr(std::string s) {\n    int hash = 0;\n    for (char c : s) {\n        hash = (hash * 31 + c) % 1000003;\n    }\n    return hash;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-11/sysinfo.py",
    "content": "import sys\n\ndef procId():\n    print(\"PID_QUERY\")\n    sys.stdout.flush()\n    return 1234\n\ndef envHome():\n    print(\"ENV_QUERY\")\n    sys.stdout.flush()\n    return \"/home/user\"\n\ndef calMonth(year, month):\n    months = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\"]\n    idx = (month - 1) % 12\n    return months[idx] + \"-\" + str(year)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-12/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- foo ---\" > obs.txt\n\t./nexus foo >> obs.txt 2>> obs.err\n\techo \"--- bar ---\" >> obs.txt\n\t./nexus bar >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-12/exp.txt",
    "content": "--- foo ---\n42\n--- bar ---\n3.14\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-12/helper.py",
    "content": "def my_id(x):\n    return x\n\ndef my_val():\n    return 3.14\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-12/main.loc",
    "content": "-- Test type annotations on standalone polymorphic functions (not typeclass methods).\n-- This exercises the fix for annotation lookup using the concrete index (ci)\n-- rather than the general index (gi) in checkG/synthG.\n\nmodule main (foo, bar)\n\nimport root-py\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\n\nmyId :: a -> a\nsource Py from \"helper.py\" (\"my_id\" as myId)\n\nmyVal :: a\nsource Py from \"helper.py\" (\"my_val\" as myVal)\n\nfoo :: Int\nfoo = (myId :: Int -> Int) 42\n\nbar :: Real\nbar = myVal :: Real\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-13/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- manyCallsSmall ---\" > obs.txt\n\t./nexus manyCallsSmall >> obs.txt 2> obs.err\n\techo \"--- manyCallsLarge ---\" >> obs.txt\n\t./nexus manyCallsLarge >> obs.txt 2>> obs.err\n\techo \"--- chainCppPy ---\" >> obs.txt\n\t./nexus chainCppPy >> obs.txt 2>> obs.err\n\techo \"--- chainPyCpp ---\" >> obs.txt\n\t./nexus chainPyCpp >> obs.txt 2>> obs.err\n\techo \"--- diamondStr ---\" >> obs.txt\n\t./nexus diamondStr >> obs.txt 2>> obs.err\n\techo \"--- diamondList ---\" >> obs.txt\n\t./nexus diamondList >> obs.txt 2>> obs.err\n\techo \"--- letSharedAcrossLangs ---\" >> obs.txt\n\t./nexus letSharedAcrossLangs >> obs.txt 2>> obs.err\n\techo \"--- deepDoChain ---\" >> obs.txt\n\t./nexus deepDoChain >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-13/exp.txt",
    "content": "--- manyCallsSmall ---\n10\n--- manyCallsLarge ---\n1000\n--- chainCppPy ---\n5350\n--- chainPyCpp ---\n5350\n--- diamondStr ---\n100\n--- diamondList ---\n420\n--- letSharedAcrossLangs ---\n300\n--- deepDoChain ---\n245\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-13/main.loc",
    "content": "-- SHM stress tests: exercises reference counting and deferred cleanup\n-- across many sequential cross-language calls with various data sizes.\n\nmodule main\n  ( manyCallsSmall\n  , manyCallsLarge\n  , chainCppPy\n  , chainPyCpp\n  , diamondStr\n  , diamondList\n  , letSharedAcrossLangs\n  , deepDoChain\n  )\n\nimport root-cpp\nimport root-py\n\nsource Cpp from \"stress.hpp\"\n  ( \"makeStr\" as cppMakeStr\n  , \"strLen\" as cppStrLen\n  , \"sumList\" as cppSumList\n  , \"makeRange\" as cppMakeRange\n  , \"incAll\" as cppIncAll\n  , \"idStr\" as cppIdStr\n  , \"idList\" as cppIdList\n  )\n\nsource Py from \"stress.py\"\n  ( \"makeStr\" as pyMakeStr\n  , \"strLen\" as pyStrLen\n  , \"sumList\" as pySumList\n  , \"makeRange\" as pyMakeRange\n  , \"incAll\" as pyIncAll\n  , \"idStr\" as pyIdStr\n  , \"idList\" as pyIdList\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\ncppMakeStr :: Int -> Str\ncppStrLen :: Str -> Int\ncppSumList :: [Int] -> Int\ncppMakeRange :: Int -> [Int]\ncppIncAll :: [Int] -> [Int]\ncppIdStr :: Str -> Str\ncppIdList :: [Int] -> [Int]\n\npyMakeStr :: Int -> Str\npyStrLen :: Str -> Int\npySumList :: [Int] -> Int\npyMakeRange :: Int -> [Int]\npyIncAll :: [Int] -> [Int]\npyIdStr :: Str -> Str\npyIdList :: [Int] -> [Int]\n\n-- Test 1: 30 sequential small cross-language calls.\n-- Each call crosses Cpp->Py or Py->Cpp boundary, allocating and freeing SHM.\n-- Without proper cleanup, this would exhaust the 32-volume SHM limit.\nmanyCallsSmall :: Int\nmanyCallsSmall =\n    let s1 = cppMakeStr 10\n        n1 = pyStrLen s1\n        s2 = pyMakeStr n1\n        n2 = cppStrLen s2\n        s3 = cppMakeStr n2\n        n3 = pyStrLen s3\n        s4 = pyMakeStr n3\n        n4 = cppStrLen s4\n        s5 = cppMakeStr n4\n        n5 = pyStrLen s5\n        s6 = pyMakeStr n5\n        n6 = cppStrLen s6\n        s7 = cppMakeStr n6\n        n7 = pyStrLen s7\n        s8 = pyMakeStr n7\n        n8 = cppStrLen s8\n        s9 = cppMakeStr n8\n        n9 = pyStrLen s9\n        s10 = pyMakeStr n9\n        n10 = cppStrLen s10\n        s11 = cppMakeStr n10\n        n11 = pyStrLen s11\n        s12 = pyMakeStr n11\n        n12 = cppStrLen s12\n        s13 = cppMakeStr n12\n        n13 = pyStrLen s13\n        s14 = pyMakeStr n13\n        n14 = cppStrLen s14\n        s15 = cppMakeStr n14\n    in pyStrLen s15\n\n-- Test 2: Cross-language calls with large strings (1000 chars each).\n-- Large strings have sub-allocations in SHM that need shfree_by_schema.\nmanyCallsLarge :: Int\nmanyCallsLarge =\n    let s1 = cppMakeStr 1000\n        s2 = pyIdStr s1\n        s3 = cppIdStr s2\n        s4 = pyIdStr s3\n        s5 = cppIdStr s4\n        s6 = pyIdStr s5\n        s7 = cppIdStr s6\n        s8 = pyIdStr s7\n        s9 = cppIdStr s8\n        s10 = pyIdStr s9\n    in cppStrLen s10\n\n-- Test 3: Chain from C++ to Python - result forwarded through multiple hops.\n-- Tests that forwarded result packets have proper refcounting.\nchainCppPy :: Int\nchainCppPy =\n    let xs = cppMakeRange 100\n        ys = pyIncAll xs\n        zs = cppIncAll ys\n        ws = pyIncAll zs\n    in cppSumList ws\n\n-- Test 4: Chain from Python to C++ - opposite direction.\nchainPyCpp :: Int\nchainPyCpp =\n    let xs = pyMakeRange 100\n        ys = cppIncAll xs\n        zs = pyIncAll ys\n        ws = cppIncAll zs\n    in pySumList ws\n\n-- Test 5: Diamond pattern - same string value sent to two different\n-- cross-language calls. Tests that refcount handles fan-out correctly.\ndiamondStr :: Int\ndiamondStr =\n    let s = cppMakeStr 50\n        a = pyStrLen s\n        b = cppStrLen (pyIdStr s)\n    in a + b\n\n-- Test 6: Diamond with list data - same list value sent to two paths.\ndiamondList :: Int\ndiamondList =\n    let xs = pyMakeRange 20\n        a = cppSumList xs\n        b = pySumList (cppIdList xs)\n    in a + b\n\n-- Test 7: Let-bound value from one language used multiple times in another.\n-- The let-bound value crosses the language boundary each time it is used.\nletSharedAcrossLangs :: Int\nletSharedAcrossLangs =\n    let s = cppMakeStr 100\n    in pyStrLen s + pyStrLen s + pyStrLen s\n\n-- Test 8: Deep chain with 20 sequential cross-language list transformations.\n-- Each step creates a new SHM allocation. Without cleanup, exhausts volumes.\ndeepDoChain :: Int\ndeepDoChain =\n    let xs = cppMakeRange 10\n        xs2 = pyIncAll xs\n        xs3 = cppIncAll xs2\n        xs4 = pyIncAll xs3\n        xs5 = cppIncAll xs4\n        xs6 = pyIncAll xs5\n        xs7 = cppIncAll xs6\n        xs8 = pyIncAll xs7\n        xs9 = cppIncAll xs8\n        xs10 = pyIncAll xs9\n        xs11 = cppIncAll xs10\n        xs12 = pyIncAll xs11\n        xs13 = cppIncAll xs12\n        xs14 = pyIncAll xs13\n        xs15 = cppIncAll xs14\n        xs16 = pyIncAll xs15\n        xs17 = cppIncAll xs16\n        xs18 = pyIncAll xs17\n        xs19 = cppIncAll xs18\n        xs20 = pyIncAll xs19\n    in cppSumList xs20\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-13/stress.hpp",
    "content": "#ifndef __STRESS_HPP__\n#define __STRESS_HPP__\n\n#include <string>\n#include <vector>\n\nstd::string makeStr(int n) {\n    return std::string(n, 'x');\n}\n\nint strLen(const std::string& s) {\n    return (int)s.size();\n}\n\nint sumList(const std::vector<int>& xs) {\n    int total = 0;\n    for (int x : xs) total += x;\n    return total;\n}\n\nstd::vector<int> makeRange(int n) {\n    std::vector<int> result(n);\n    for (int i = 0; i < n; i++) result[i] = i + 1;\n    return result;\n}\n\nstd::vector<int> incAll(const std::vector<int>& xs) {\n    std::vector<int> result(xs.size());\n    for (size_t i = 0; i < xs.size(); i++) result[i] = xs[i] + 1;\n    return result;\n}\n\nstd::string idStr(const std::string& s) {\n    return s;\n}\n\nstd::vector<int> idList(const std::vector<int>& xs) {\n    return xs;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-13/stress.py",
    "content": "def makeStr(n):\n    return \"y\" * n\n\ndef strLen(s):\n    return len(s)\n\ndef sumList(xs):\n    return sum(xs)\n\ndef makeRange(n):\n    return list(range(1, n + 1))\n\ndef incAll(xs):\n    return [x + 1 for x in xs]\n\ndef idStr(s):\n    return s\n\ndef idList(xs):\n    return list(xs)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-14/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- effectChain ---\" > obs.txt\n\t./nexus effectChain >> obs.txt 2> obs.err\n\techo \"--- effectFanOut ---\" >> obs.txt\n\t./nexus effectFanOut >> obs.txt 2>> obs.err\n\techo \"--- letForwardedTwice ---\" >> obs.txt\n\t./nexus letForwardedTwice >> obs.txt 2>> obs.err\n\techo \"--- nestedLetCross ---\" >> obs.txt\n\t./nexus nestedLetCross >> obs.txt 2>> obs.err\n\techo \"--- listAppendCross ---\" >> obs.txt\n\t./nexus listAppendCross >> obs.txt 2>> obs.err\n\techo \"--- resultForwardChain ---\" >> obs.txt\n\t./nexus resultForwardChain >> obs.txt 2>> obs.err\n\techo \"--- effectInLetRhs ---\" >> obs.txt\n\t./nexus effectInLetRhs >> obs.txt 2>> obs.err\n\techo \"--- tripleForward ---\" >> obs.txt\n\t./nexus tripleForward >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-14/edge.hpp",
    "content": "#ifndef __EDGE_HPP__\n#define __EDGE_HPP__\n\n#include <string>\n#include <vector>\n\nint counter() {\n    static int n = 0;\n    return ++n;\n}\n\nint addTwo(int a, int b) {\n    return a + b;\n}\n\nint doubleIt(int x) {\n    return x * 2;\n}\n\nstd::vector<int> makePair(int a, int b) {\n    return {a, b};\n}\n\nstd::string replicateStr(int n, const std::string& s) {\n    std::string result;\n    for (int i = 0; i < n; i++) result += s;\n    return result;\n}\n\nint lenStr(const std::string& s) {\n    return (int)s.size();\n}\n\nint sumVec(const std::vector<int>& xs) {\n    int s = 0;\n    for (int x : xs) s += x;\n    return s;\n}\n\nstd::vector<int> appendVec(const std::vector<int>& a, const std::vector<int>& b) {\n    std::vector<int> result = a;\n    result.insert(result.end(), b.begin(), b.end());\n    return result;\n}\n\nstd::vector<int> incVec(const std::vector<int>& xs) {\n    std::vector<int> result(xs.size());\n    for (size_t i = 0; i < xs.size(); i++) result[i] = xs[i] + 1;\n    return result;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-14/edge.py",
    "content": "_counter = 0\n\ndef counter():\n    global _counter\n    _counter += 1\n    return _counter\n\ndef addTwo(a, b):\n    return a + b\n\ndef doubleIt(x):\n    return x * 2\n\ndef makePair(a, b):\n    return [a, b]\n\ndef replicateStr(n, s):\n    return s * n\n\ndef lenStr(s):\n    return len(s)\n\ndef sumVec(xs):\n    return sum(xs)\n\ndef appendVec(a, b):\n    return list(a) + list(b)\n\ndef incVec(xs):\n    return [x + 1 for x in xs]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-14/exp.txt",
    "content": "--- effectChain ---\n4\n--- effectFanOut ---\n4\n--- letForwardedTwice ---\n24\n--- nestedLetCross ---\n12\n--- listAppendCross ---\n10\n--- resultForwardChain ---\n18\n--- effectInLetRhs ---\n4\n--- tripleForward ---\n49\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-14/main.loc",
    "content": "-- SHM reference counting edge cases: tests patterns that exercise\n-- the refcount increment/decrement logic at language boundaries.\n\nmodule main\n  ( effectChain\n  , effectFanOut\n  , letForwardedTwice\n  , nestedLetCross\n  , listAppendCross\n  , resultForwardChain\n  , effectInLetRhs\n  , tripleForward\n  )\n\nimport root-cpp\nimport root-py\n\nsource Cpp from \"edge.hpp\"\n  ( \"counter\" as cppCounter\n  , \"addTwo\" as cppAdd\n  , \"doubleIt\" as cppDouble\n  , \"makePair\" as cppMakePair\n  , \"replicateStr\" as cppReplicateStr\n  , \"lenStr\" as cppLenStr\n  , \"sumVec\" as cppSumVec\n  , \"appendVec\" as cppAppendVec\n  , \"incVec\" as cppIncVec\n  )\n\nsource Py from \"edge.py\"\n  ( \"counter\" as pyCounter\n  , \"addTwo\" as pyAdd\n  , \"doubleIt\" as pyDouble\n  , \"makePair\" as pyMakePair\n  , \"replicateStr\" as pyReplicateStr\n  , \"lenStr\" as pyLenStr\n  , \"sumVec\" as pySumVec\n  , \"appendVec\" as pyAppendVec\n  , \"incVec\" as pyIncVec\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\ncppCounter :: <IO> Int\ncppAdd :: Int -> Int -> Int\ncppDouble :: Int -> Int\ncppMakePair :: Int -> Int -> [Int]\ncppReplicateStr :: Int -> Str -> Str\ncppLenStr :: Str -> Int\ncppSumVec :: [Int] -> Int\ncppAppendVec :: [Int] -> [Int] -> [Int]\ncppIncVec :: [Int] -> [Int]\n\npyCounter :: <IO> Int\npyAdd :: Int -> Int -> Int\npyDouble :: Int -> Int\npyMakePair :: Int -> Int -> [Int]\npyReplicateStr :: Int -> Str -> Str\npyLenStr :: Str -> Int\npySumVec :: [Int] -> Int\npyAppendVec :: [Int] -> [Int] -> [Int]\npyIncVec :: [Int] -> [Int]\n\n\n-- Test 1: Chain of effects across languages.\n-- cppCounter returns 1, pyDouble(1) = 2, cppCounter returns 2, pyAdd(2,2) = 4\neffectChain :: <IO> Int\neffectChain = do\n    a <- cppCounter\n    let b = pyDouble a\n    c <- cppCounter\n    pyAdd b c\n\n-- Test 2: Fan-out of effectful result to multiple cross-language calls.\n-- pyCounter returns 1, cppDouble(1) = 2, cppAdd(1,1) = 2, pyAdd(2,2) = 4\neffectFanOut :: <IO> Int\neffectFanOut = do\n    x <- pyCounter\n    let a = cppDouble x\n        b = cppAdd x x\n    pyAdd a b\n\n-- Test 3: Let-bound cross-language result forwarded to two foreign calls.\n-- cppMakePair(3,7) = [3,7], cppIncVec = [4,8], pySumVec = 12, twice = 24\nletForwardedTwice :: Int\nletForwardedTwice =\n    let xs = cppMakePair 3 7\n        a = pySumVec (cppIncVec xs)\n        b = pySumVec (cppIncVec xs)\n    in a + b\n\n-- Test 4: Nested let where each binding crosses a language boundary.\n-- cppReplicateStr(3, \"ab\") = \"ababab\" (len 6)\n-- pyReplicateStr(2, \"ababab\") = \"abababababab\" (len 12)\n-- cppLenStr(\"abababababab\") = 12\nnestedLetCross :: Int\nnestedLetCross =\n    let s1 = cppReplicateStr 3 \"ab\"\n        s2 = pyReplicateStr 2 s1\n    in cppLenStr s2\n\n-- Test 5: Lists crossing boundaries with append operations.\n-- pyMakePair(1,2) = [1,2], cppMakePair(3,4) = [3,4]\n-- pyAppendVec([1,2], [3,4]) = [1,2,3,4], cppSumVec = 10\nlistAppendCross :: Int\nlistAppendCross =\n    let a = pyMakePair 1 2\n        b = cppMakePair 3 4\n        c = pyAppendVec a b\n    in cppSumVec c\n\n-- Test 6: Long chain where each result is forwarded.\n-- cppMakePair(1,1) = [1,1], 8 rounds of incVec: [9,9], sum = 18\nresultForwardChain :: Int\nresultForwardChain =\n    let xs = cppMakePair 1 1\n        xs2 = pyIncVec xs\n        xs3 = cppIncVec xs2\n        xs4 = pyIncVec xs3\n        xs5 = cppIncVec xs4\n        xs6 = pyIncVec xs5\n        xs7 = cppIncVec xs6\n        xs8 = pyIncVec xs7\n        xs9 = cppIncVec xs8\n    in pySumVec xs9\n\n-- Test 7: Effect evaluation in let-binding RHS crossing languages.\n-- cppCounter returns 1, pyDouble(1) = 2, cppAdd(2,2) = 4\neffectInLetRhs :: <IO> Int\neffectInLetRhs = do\n    x <- cppCounter\n    let y = pyDouble x\n    cppAdd y y\n\n-- Test 8: Value forwarded to three different cross-language calls.\n-- pyMakePair(5,10) = [5,10]\n-- cppSumVec([5,10]) = 15\n-- cppIncVec([5,10]) = [6,11], pySumVec([6,11]) = 17\n-- pyIncVec([5,10]) = [6,11], cppSumVec([6,11]) = 17\n-- Total = 15 + 17 + 17 = 49\ntripleForward :: Int\ntripleForward =\n    let xs = pyMakePair 5 10\n        a = cppSumVec xs\n        b = pySumVec (cppIncVec xs)\n        c = cppSumVec (pyIncVec xs)\n    in a + b + c\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-15/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- bigStrRoundTrip ---\" > obs.txt\n\t./nexus bigStrRoundTrip >> obs.txt 2> obs.err\n\techo \"--- bigVecRoundTrip ---\" >> obs.txt\n\t./nexus bigVecRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- vecOfStrsRoundTrip ---\" >> obs.txt\n\t./nexus vecOfStrsRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- nestedVecRoundTrip ---\" >> obs.txt\n\t./nexus nestedVecRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- bigStrChain ---\" >> obs.txt\n\t./nexus bigStrChain >> obs.txt 2>> obs.err\n\techo \"--- bigVecChain ---\" >> obs.txt\n\t./nexus bigVecChain >> obs.txt 2>> obs.err\n\techo \"--- manyBigStrings ---\" >> obs.txt\n\t./nexus manyBigStrings >> obs.txt 2>> obs.err\n\techo \"--- manyNestedVecs ---\" >> obs.txt\n\t./nexus manyNestedVecs >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-15/bigdata.hpp",
    "content": "#ifndef __BIGDATA_HPP__\n#define __BIGDATA_HPP__\n\n#include <string>\n#include <vector>\n\nstd::string bigStr(int n) {\n    return std::string(n, 'A');\n}\n\nint bigStrLen(const std::string& s) {\n    return (int)s.size();\n}\n\nstd::vector<int> bigVec(int n) {\n    std::vector<int> v(n);\n    for (int i = 0; i < n; i++) v[i] = i;\n    return v;\n}\n\nint bigVecSum(const std::vector<int>& v) {\n    long long s = 0;\n    for (int x : v) s += x;\n    return (int)(s % 1000000007);\n}\n\nstd::vector<std::string> vecOfStrs(int n, int m) {\n    std::string s(m, 'B');\n    return std::vector<std::string>(n, s);\n}\n\nint sumStrLens(const std::vector<std::string>& vs) {\n    int total = 0;\n    for (const auto& s : vs) total += (int)s.size();\n    return total;\n}\n\nstd::vector<std::vector<int>> nestedVec(int outer, int inner) {\n    std::vector<int> row(inner);\n    for (int i = 0; i < inner; i++) row[i] = i + 1;\n    return std::vector<std::vector<int>>(outer, row);\n}\n\nint nestedSum(const std::vector<std::vector<int>>& vv) {\n    int s = 0;\n    for (const auto& v : vv)\n        for (int x : v)\n            s += x;\n    return s;\n}\n\nstd::vector<std::string> idVecStr(const std::vector<std::string>& vs) {\n    return vs;\n}\n\nstd::vector<std::vector<int>> idNestedVec(const std::vector<std::vector<int>>& vv) {\n    return vv;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-15/bigdata.py",
    "content": "def bigStr(n):\n    return \"A\" * n\n\ndef bigStrLen(s):\n    return len(s)\n\ndef bigVec(n):\n    return list(range(n))\n\ndef bigVecSum(v):\n    return sum(v) % 1000000007\n\ndef vecOfStrs(n, m):\n    return [\"B\" * m] * n\n\ndef sumStrLens(vs):\n    return sum(len(s) for s in vs)\n\ndef nestedVec(outer, inner):\n    row = list(range(1, inner + 1))\n    return [list(row) for _ in range(outer)]\n\ndef nestedSum(vv):\n    return sum(x for v in vv for x in v)\n\ndef idVecStr(vs):\n    return list(vs)\n\ndef idNestedVec(vv):\n    return [list(v) for v in vv]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-15/exp.txt",
    "content": "--- bigStrRoundTrip ---\n10000\n--- bigVecRoundTrip ---\n12497500\n--- vecOfStrsRoundTrip ---\n5000\n--- nestedVecRoundTrip ---\n2100\n--- bigStrChain ---\n5000\n--- bigVecChain ---\n999000\n--- manyBigStrings ---\n20000\n--- manyNestedVecs ---\n2200\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-15/main.loc",
    "content": "-- SHM large data tests: exercises sub-allocation freeing with big objects\n-- crossing language boundaries. Large strings and arrays have sub-allocations\n-- in SHM that require shfree_by_schema for proper recursive cleanup.\n\nmodule main\n  ( bigStrRoundTrip\n  , bigVecRoundTrip\n  , vecOfStrsRoundTrip\n  , nestedVecRoundTrip\n  , bigStrChain\n  , bigVecChain\n  , manyBigStrings\n  , manyNestedVecs\n  )\n\nimport root-cpp\nimport root-py\n\nsource Cpp from \"bigdata.hpp\"\n  ( \"bigStr\" as cppBigStr\n  , \"bigStrLen\" as cppBigStrLen\n  , \"bigVec\" as cppBigVec\n  , \"bigVecSum\" as cppBigVecSum\n  , \"vecOfStrs\" as cppVecOfStrs\n  , \"sumStrLens\" as cppSumStrLens\n  , \"nestedVec\" as cppNestedVec\n  , \"nestedSum\" as cppNestedSum\n  , \"idVecStr\" as cppIdVecStr\n  , \"idNestedVec\" as cppIdNestedVec\n  )\n\nsource Py from \"bigdata.py\"\n  ( \"bigStr\" as pyBigStr\n  , \"bigStrLen\" as pyBigStrLen\n  , \"bigVec\" as pyBigVec\n  , \"bigVecSum\" as pyBigVecSum\n  , \"vecOfStrs\" as pyVecOfStrs\n  , \"sumStrLens\" as pySumStrLens\n  , \"nestedVec\" as pyNestedVec\n  , \"nestedSum\" as pyNestedSum\n  , \"idVecStr\" as pyIdVecStr\n  , \"idNestedVec\" as pyIdNestedVec\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\ncppBigStr :: Int -> Str\ncppBigStrLen :: Str -> Int\ncppBigVec :: Int -> [Int]\ncppBigVecSum :: [Int] -> Int\ncppVecOfStrs :: Int -> Int -> [Str]\ncppSumStrLens :: [Str] -> Int\ncppNestedVec :: Int -> Int -> [[Int]]\ncppNestedSum :: [[Int]] -> Int\ncppIdVecStr :: [Str] -> [Str]\ncppIdNestedVec :: [[Int]] -> [[Int]]\n\npyBigStr :: Int -> Str\npyBigStrLen :: Str -> Int\npyBigVec :: Int -> [Int]\npyBigVecSum :: [Int] -> Int\npyVecOfStrs :: Int -> Int -> [Str]\npySumStrLens :: [Str] -> Int\npyNestedVec :: Int -> Int -> [[Int]]\npyNestedSum :: [[Int]] -> Int\npyIdVecStr :: [Str] -> [Str]\npyIdNestedVec :: [[Int]] -> [[Int]]\n\n\n-- Test 1: Big string (10000 chars) created in C++, measured in Python.\nbigStrRoundTrip :: Int\nbigStrRoundTrip = pyBigStrLen (cppBigStr 10000)\n\n-- Test 2: Big vector (5000 elements) created in Python, summed in C++.\n-- sum(0..4999) = 12497500\nbigVecRoundTrip :: Int\nbigVecRoundTrip = cppBigVecSum (pyBigVec 5000)\n\n-- Test 3: Vector of 50 strings (each 100 chars) round-tripped.\n-- 50 * 100 = 5000\nvecOfStrsRoundTrip :: Int\nvecOfStrsRoundTrip = pySumStrLens (cppVecOfStrs 50 100)\n\n-- Test 4: Nested vector (10 outer x 20 inner) round-tripped.\n-- 10 * sum(1..20) = 10 * 210 = 2100\nnestedVecRoundTrip :: Int\nnestedVecRoundTrip = pyNestedSum (cppNestedVec 10 20)\n\n-- Test 5: Large string bounced between languages.\nbigStrChain :: Int\nbigStrChain =\n    let s1 = pyBigStr 5000\n        s2 = cppBigStrLen s1\n        s3 = pyBigStr s2\n        s4 = cppBigStrLen s3\n        s5 = pyBigStr s4\n    in cppBigStrLen s5\n\n-- Test 6: Large vector created and summed in alternating languages.\nbigVecChain :: Int\nbigVecChain =\n    let xs = cppBigVec 1000\n        s1 = pyBigVecSum xs\n        xs2 = cppBigVec 1000\n        s2 = pyBigVecSum xs2\n    in s1 + s2\n\n-- Test 7: Many sequential big string allocations (10 rounds).\n-- Without SHM cleanup, these large allocations would exhaust volumes fast.\nmanyBigStrings :: Int\nmanyBigStrings =\n    let a1 = pyBigStrLen (cppBigStr 2000)\n        a2 = cppBigStrLen (pyBigStr 2000)\n        a3 = pyBigStrLen (cppBigStr 2000)\n        a4 = cppBigStrLen (pyBigStr 2000)\n        a5 = pyBigStrLen (cppBigStr 2000)\n        a6 = cppBigStrLen (pyBigStr 2000)\n        a7 = pyBigStrLen (cppBigStr 2000)\n        a8 = cppBigStrLen (pyBigStr 2000)\n        a9 = pyBigStrLen (cppBigStr 2000)\n        a10 = cppBigStrLen (pyBigStr 2000)\n    in a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9 + a10\n\n-- Test 8: Many sequential nested vector allocations (8 rounds).\n-- 5 * sum(1..10) = 5 * 55 = 275\nmanyNestedVecs :: Int\nmanyNestedVecs =\n    let n1 = pyNestedSum (cppNestedVec 5 10)\n        n2 = cppNestedSum (pyNestedVec 5 10)\n        n3 = pyNestedSum (cppNestedVec 5 10)\n        n4 = cppNestedSum (pyNestedVec 5 10)\n        n5 = pyNestedSum (cppNestedVec 5 10)\n        n6 = cppNestedSum (pyNestedVec 5 10)\n        n7 = pyNestedSum (cppNestedVec 5 10)\n        n8 = cppNestedSum (pyNestedVec 5 10)\n    in n1 + n2 + n3 + n4 + n5 + n6 + n7 + n8\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-16/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- manyCallsSmall ---\" > obs.txt\n\t./nexus manyCallsSmall >> obs.txt 2> obs.err\n\techo \"--- manyCallsLarge ---\" >> obs.txt\n\t./nexus manyCallsLarge >> obs.txt 2>> obs.err\n\techo \"--- chainCppPy ---\" >> obs.txt\n\t./nexus chainCppPy >> obs.txt 2>> obs.err\n\techo \"--- chainPyCpp ---\" >> obs.txt\n\t./nexus chainPyCpp >> obs.txt 2>> obs.err\n\techo \"--- deepDoChain ---\" >> obs.txt\n\t./nexus deepDoChain >> obs.txt 2>> obs.err\n\techo \"--- nestedLetCross ---\" >> obs.txt\n\t./nexus nestedLetCross >> obs.txt 2>> obs.err\n\techo \"--- listAppendCross ---\" >> obs.txt\n\t./nexus listAppendCross >> obs.txt 2>> obs.err\n\techo \"--- resultForwardChain ---\" >> obs.txt\n\t./nexus resultForwardChain >> obs.txt 2>> obs.err\n\techo \"--- bigStrRoundTrip ---\" >> obs.txt\n\t./nexus bigStrRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- bigVecRoundTrip ---\" >> obs.txt\n\t./nexus bigVecRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- vecOfStrsRoundTrip ---\" >> obs.txt\n\t./nexus vecOfStrsRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- nestedVecRoundTrip ---\" >> obs.txt\n\t./nexus nestedVecRoundTrip >> obs.txt 2>> obs.err\n\techo \"--- bigStrChain ---\" >> obs.txt\n\t./nexus bigStrChain >> obs.txt 2>> obs.err\n\techo \"--- manyBigStrings ---\" >> obs.txt\n\t./nexus manyBigStrings >> obs.txt 2>> obs.err\n\techo \"--- manyNestedVecs ---\" >> obs.txt\n\t./nexus manyNestedVecs >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-16/cstress.hpp",
    "content": "#ifndef __CSTRESS_HPP__\n#define __CSTRESS_HPP__\n\n#include <string>\n#include <vector>\n\nstd::string cmakeStr(int n) {\n    return std::string(n, 'x');\n}\n\nint cstrLen(const std::string& s) {\n    return (int)s.size();\n}\n\nint csumList(const std::vector<int>& xs) {\n    int total = 0;\n    for (int x : xs) total += x;\n    return total;\n}\n\nstd::vector<int> cmakeRange(int n) {\n    std::vector<int> result(n);\n    for (int i = 0; i < n; i++) result[i] = i + 1;\n    return result;\n}\n\nstd::vector<int> cincAll(const std::vector<int>& xs) {\n    std::vector<int> result(xs.size());\n    for (size_t i = 0; i < xs.size(); i++) result[i] = xs[i] + 1;\n    return result;\n}\n\nstd::string cidStr(const std::string& s) {\n    return s;\n}\n\nstd::vector<int> cidList(const std::vector<int>& xs) {\n    return xs;\n}\n\nint cdoubleIt(int x) {\n    return x * 2;\n}\n\nint caddTwo(int a, int b) {\n    return a + b;\n}\n\nstd::vector<int> cmakePair(int a, int b) {\n    return {a, b};\n}\n\nstd::string creplicateStr(int n, const std::string& s) {\n    std::string result;\n    for (int i = 0; i < n; i++) result += s;\n    return result;\n}\n\nstd::vector<int> cappendVec(const std::vector<int>& a, const std::vector<int>& b) {\n    std::vector<int> result = a;\n    result.insert(result.end(), b.begin(), b.end());\n    return result;\n}\n\nstd::vector<std::string> cvecOfStrs(int n, int m) {\n    std::string s(m, 'B');\n    return std::vector<std::string>(n, s);\n}\n\nint csumStrLens(const std::vector<std::string>& vs) {\n    int total = 0;\n    for (const auto& s : vs) total += (int)s.size();\n    return total;\n}\n\nstd::vector<std::vector<int>> cnestedVec(int outer, int inner) {\n    std::vector<int> row(inner);\n    for (int i = 0; i < inner; i++) row[i] = i + 1;\n    return std::vector<std::vector<int>>(outer, row);\n}\n\nint cnestedSum(const std::vector<std::vector<int>>& vv) {\n    int s = 0;\n    for (const auto& v : vv)\n        for (int x : v)\n            s += x;\n    return s;\n}\n\nint ccounter() {\n    static int n = 0;\n    return ++n;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-16/exp.txt",
    "content": "--- manyCallsSmall ---\n10\n--- manyCallsLarge ---\n1000\n--- chainCppPy ---\n5350\n--- chainPyCpp ---\n5350\n--- deepDoChain ---\n255\n--- nestedLetCross ---\n12\n--- listAppendCross ---\n10\n--- resultForwardChain ---\n18\n--- bigStrRoundTrip ---\n10000\n--- bigVecRoundTrip ---\n12502500\n--- vecOfStrsRoundTrip ---\n5000\n--- nestedVecRoundTrip ---\n2100\n--- bigStrChain ---\n5000\n--- manyBigStrings ---\n20000\n--- manyNestedVecs ---\n2200\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-16/main.loc",
    "content": "-- SHM reference counting comprehensive test.\n-- Tests stress, edge cases, and large data patterns.\n--\n-- NOTE: consecutive let-bindings with foreign calls to the same pool trigger\n-- a pre-existing codegen bug (documented in claude-test-17). Tests here\n-- use nested function application to avoid that bug and focus on SHM testing.\n\nmodule main\n  ( manyCallsSmall\n  , manyCallsLarge\n  , chainCppPy\n  , chainPyCpp\n  , deepDoChain\n  , nestedLetCross\n  , listAppendCross\n  , resultForwardChain\n  , bigStrRoundTrip\n  , bigVecRoundTrip\n  , vecOfStrsRoundTrip\n  , nestedVecRoundTrip\n  , bigStrChain\n  , manyBigStrings\n  , manyNestedVecs\n  )\n\nimport root-cpp\nimport root-py\n\nsource Cpp from \"cstress.hpp\"\n  ( \"cmakeStr\" as cppMakeStr\n  , \"cstrLen\" as cppStrLen\n  , \"csumList\" as cppSumList\n  , \"cmakeRange\" as cppMakeRange\n  , \"cincAll\" as cppIncAll\n  , \"cidStr\" as cppIdStr\n  , \"cidList\" as cppIdList\n  , \"cdoubleIt\" as cppDouble\n  , \"caddTwo\" as cppAdd\n  , \"cmakePair\" as cppMakePair\n  , \"creplicateStr\" as cppReplicateStr\n  , \"cappendVec\" as cppAppendVec\n  , \"cvecOfStrs\" as cppVecOfStrs\n  , \"csumStrLens\" as cppSumStrLens\n  , \"cnestedVec\" as cppNestedVec\n  , \"cnestedSum\" as cppNestedSum\n  )\n\nsource Py from \"pstress.py\"\n  ( \"pmakeStr\" as pyMakeStr\n  , \"pstrLen\" as pyStrLen\n  , \"psumList\" as pySumList\n  , \"pmakeRange\" as pyMakeRange\n  , \"pincAll\" as pyIncAll\n  , \"pidStr\" as pyIdStr\n  , \"pidList\" as pyIdList\n  , \"pdoubleIt\" as pyDouble\n  , \"paddTwo\" as pyAdd\n  , \"pmakePair\" as pyMakePair\n  , \"preplicateStr\" as pyReplicateStr\n  , \"pappendVec\" as pyAppendVec\n  , \"pvecOfStrs\" as pyVecOfStrs\n  , \"psumStrLens\" as pySumStrLens\n  , \"pnestedVec\" as pyNestedVec\n  , \"pnestedSum\" as pyNestedSum\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\ncppMakeStr :: Int -> Str\ncppStrLen :: Str -> Int\ncppSumList :: [Int] -> Int\ncppMakeRange :: Int -> [Int]\ncppIncAll :: [Int] -> [Int]\ncppIdStr :: Str -> Str\ncppIdList :: [Int] -> [Int]\ncppDouble :: Int -> Int\ncppAdd :: Int -> Int -> Int\ncppMakePair :: Int -> Int -> [Int]\ncppReplicateStr :: Int -> Str -> Str\ncppAppendVec :: [Int] -> [Int] -> [Int]\ncppVecOfStrs :: Int -> Int -> [Str]\ncppSumStrLens :: [Str] -> Int\ncppNestedVec :: Int -> Int -> [[Int]]\ncppNestedSum :: [[Int]] -> Int\n\npyMakeStr :: Int -> Str\npyStrLen :: Str -> Int\npySumList :: [Int] -> Int\npyMakeRange :: Int -> [Int]\npyIncAll :: [Int] -> [Int]\npyIdStr :: Str -> Str\npyIdList :: [Int] -> [Int]\npyDouble :: Int -> Int\npyAdd :: Int -> Int -> Int\npyMakePair :: Int -> Int -> [Int]\npyReplicateStr :: Int -> Str -> Str\npyAppendVec :: [Int] -> [Int] -> [Int]\npyVecOfStrs :: Int -> Int -> [Str]\npySumStrLens :: [Str] -> Int\npyNestedVec :: Int -> Int -> [[Int]]\npyNestedSum :: [[Int]] -> Int\n\n------------------------------------------------------------\n-- STRESS TESTS: many sequential cross-language calls\n------------------------------------------------------------\n\n-- Test 1: 15 alternating cross-language calls using nested application.\n-- cppMk -> pyLen -> cppMk -> pyLen -> ... -> pyLen\n-- Each call allocates SHM. Without cleanup, would exhaust volumes.\nmanyCallsSmall :: Int\nmanyCallsSmall =\n    pyStrLen (cppMakeStr (pyStrLen (cppMakeStr (pyStrLen (cppMakeStr (pyStrLen (cppMakeStr (pyStrLen (cppMakeStr (pyStrLen (cppMakeStr (pyStrLen (cppMakeStr (pyStrLen (cppMakeStr 10)))))))))))))))\n\n-- Test 2: Large strings (1000 chars) bounced between languages 10 times.\nmanyCallsLarge :: Int\nmanyCallsLarge =\n    cppStrLen (pyIdStr (cppIdStr (pyIdStr (cppIdStr (pyIdStr (cppIdStr (pyIdStr (cppIdStr (pyIdStr (cppMakeStr 1000))))))))))\n\n-- Test 3: List chain C++->Py->C++->Py->C++ with incrementing.\n-- [1..100] -> +1 -> +1 -> +1 -> sum = sum(4..103) = 5350\nchainCppPy :: Int\nchainCppPy = cppSumList (pyIncAll (cppIncAll (pyIncAll (cppMakeRange 100))))\n\n-- Test 4: Opposite direction.\nchainPyCpp :: Int\nchainPyCpp = pySumList (cppIncAll (pyIncAll (cppIncAll (pyMakeRange 100))))\n\n-- Test 5: Deep chain with 20 cross-language list transformations.\n-- [1..10] + 20*1 = [21..30], sum = sum(21..30) = 255\ndeepDoChain :: Int\ndeepDoChain = cppSumList (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (cppMakeRange 10)))))))))))))))))))))\n\n------------------------------------------------------------\n-- EDGE CASES: reference counting patterns (no let bindings)\n------------------------------------------------------------\n\n-- Test 6: Nested lets crossing boundaries (pure nesting, no let).\nnestedLetCross :: Int\nnestedLetCross = cppStrLen (pyReplicateStr 2 (cppReplicateStr 3 \"ab\"))\n\n-- Test 7: List append across languages.\nlistAppendCross :: Int\nlistAppendCross = cppSumList (pyAppendVec (pyMakePair 1 2) (cppMakePair 3 4))\n\n-- Test 8: 8 rounds of alternating incVec.\n-- [1,1] + 8*1 = [9,9], sum = 18\nresultForwardChain :: Int\nresultForwardChain = pySumList (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppIncAll (pyIncAll (cppMakePair 1 1)))))))))\n\n------------------------------------------------------------\n-- LARGE DATA: sub-allocation freeing\n------------------------------------------------------------\n\n-- Test 9: Big string (10000 chars) round-tripped.\nbigStrRoundTrip :: Int\nbigStrRoundTrip = pyStrLen (cppMakeStr 10000)\n\n-- Test 10: Big vector round-tripped. pyMakeRange 5000 = [1..5000]\n-- cppSumList [1..5000] = 12502500\nbigVecRoundTrip :: Int\nbigVecRoundTrip = cppSumList (pyMakeRange 5000)\n\n-- Test 11: Vector of 50 strings (100 chars each) round-tripped.\nvecOfStrsRoundTrip :: Int\nvecOfStrsRoundTrip = pySumStrLens (cppVecOfStrs 50 100)\n\n-- Test 12: Nested vector (10x20) round-tripped.\nnestedVecRoundTrip :: Int\nnestedVecRoundTrip = pyNestedSum (cppNestedVec 10 20)\n\n-- Test 13: Large string bounced between languages via nested calls.\nbigStrChain :: Int\nbigStrChain = cppStrLen (pyMakeStr (cppStrLen (pyMakeStr (cppStrLen (pyMakeStr 5000)))))\n\n-- Test 14: Many big string allocations (10 rounds, 2000 chars each).\nmanyBigStrings :: Int\nmanyBigStrings =\n    pyStrLen (cppMakeStr 2000) + cppStrLen (pyMakeStr 2000)\n    + pyStrLen (cppMakeStr 2000) + cppStrLen (pyMakeStr 2000)\n    + pyStrLen (cppMakeStr 2000) + cppStrLen (pyMakeStr 2000)\n    + pyStrLen (cppMakeStr 2000) + cppStrLen (pyMakeStr 2000)\n    + pyStrLen (cppMakeStr 2000) + cppStrLen (pyMakeStr 2000)\n\n-- Test 15: Many nested vector allocations (8 rounds).\nmanyNestedVecs :: Int\nmanyNestedVecs =\n    pyNestedSum (cppNestedVec 5 10) + cppNestedSum (pyNestedVec 5 10)\n    + pyNestedSum (cppNestedVec 5 10) + cppNestedSum (pyNestedVec 5 10)\n    + pyNestedSum (cppNestedVec 5 10) + cppNestedSum (pyNestedVec 5 10)\n    + pyNestedSum (cppNestedVec 5 10) + cppNestedSum (pyNestedVec 5 10)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-16/pstress.py",
    "content": "def pmakeStr(n):\n    return \"y\" * n\n\ndef pstrLen(s):\n    return len(s)\n\ndef psumList(xs):\n    return sum(xs)\n\ndef pmakeRange(n):\n    return list(range(1, n + 1))\n\ndef pincAll(xs):\n    return [x + 1 for x in xs]\n\ndef pidStr(s):\n    return s\n\ndef pidList(xs):\n    return list(xs)\n\ndef pdoubleIt(x):\n    return x * 2\n\ndef paddTwo(a, b):\n    return a + b\n\ndef pmakePair(a, b):\n    return [a, b]\n\ndef preplicateStr(n, s):\n    return s * n\n\ndef pappendVec(a, b):\n    return list(a) + list(b)\n\ndef pvecOfStrs(n, m):\n    return [\"B\" * m] * n\n\ndef psumStrLens(vs):\n    return sum(len(s) for s in vs)\n\ndef pnestedVec(outer, inner):\n    row = list(range(1, inner + 1))\n    return [list(row) for _ in range(outer)]\n\ndef pnestedSum(vv):\n    return sum(x for v in vv for x in v)\n\n_counter = 0\ndef pcounter():\n    global _counter\n    _counter += 1\n    return _counter\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-17/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- foo ---\" > obs.txt\n\t./nexus foo >> obs.txt 2>> obs.err\n\techo \"--- bar ---\" >> obs.txt\n\t./nexus bar >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-17/exp.txt",
    "content": "--- foo ---\n3\n--- bar ---\n[1,2,3]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-17/helper.py",
    "content": "def my_len(xs):\n    return len(xs)\n\ndef make_three():\n    return [1, 2, 3]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-17/main.loc",
    "content": "-- Test type-level Nat literals in type signatures.\n-- Verifies that integer literals parse correctly in type positions\n-- and that types parameterized by Nat values typecheck.\n\nmodule main (foo, bar)\n\nimport root-py\n\ntype Py => Int = \"int\"\n\ntype SizedList n a = [a]\ntype Py => (SizedList n a) = \"list\" a\n\n-- A function that takes a \"SizedList 3 Int\" and returns its length\nmyLen :: SizedList 3 Int -> Int\nsource Py from \"helper.py\" (\"my_len\" as myLen)\n\n-- A function that creates a SizedList 3 Int\nmakeThree :: SizedList 3 Int\nsource Py from \"helper.py\" (\"make_three\" as makeThree)\n\nfoo :: Int\nfoo = myLen makeThree\n\nbar :: SizedList 3 Int\nbar = makeThree\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-18/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- foo ---\" > obs.txt\n\t./nexus foo >> obs.txt 2>> obs.err\n\techo \"--- bar ---\" >> obs.txt\n\t./nexus bar >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-18/exp.txt",
    "content": "--- foo ---\n[30,40,50]\n--- bar ---\n[10,20,30,40,50]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-18/helper.py",
    "content": "def my_id(xs):\n    return xs\n\ndef my_append(xs, ys):\n    return xs + ys\n\ndef make_two():\n    return [10, 20]\n\ndef make_three():\n    return [30, 40, 50]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-18/main.loc",
    "content": "-- Test type-level Nat literals and arithmetic in type signatures.\n-- Verifies:\n--   1. Nat literals unify with forall-bound type variables\n--   2. NatAddU (m + n) in return types works through codegen\n\nmodule main (foo, bar)\n\nimport root-py\n\ntype Py => Int = \"int\"\n\ntype SizedList n a = [a]\ntype Py => (SizedList n a) = \"list\" a\n\n-- Polymorphic function with Nat parameter\nmyId :: SizedList n a -> SizedList n a\nsource Py from \"helper.py\" (\"my_id\" as myId)\n\n-- Function with Nat arithmetic in return type\nmyAppend :: SizedList m a -> SizedList n a -> SizedList (m + n) a\nsource Py from \"helper.py\" (\"my_append\" as myAppend)\n\nmakeTwo :: SizedList 2 Int\nsource Py from \"helper.py\" (\"make_two\" as makeTwo)\n\nmakeThree :: SizedList 3 Int\nsource Py from \"helper.py\" (\"make_three\" as makeThree)\n\nfoo :: SizedList 3 Int\nfoo = myId makeThree\n\nbar :: SizedList (2 + 3) Int\nbar = myAppend makeTwo makeThree\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-19/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- foo ---\" > obs.txt\n\t./nexus foo >> obs.txt 2>> obs.err\n\techo \"--- bar ---\" >> obs.txt\n\t./nexus bar >> obs.txt 2>> obs.err\n\techo \"--- baz ---\" >> obs.txt\n\t./nexus baz >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-19/exp.txt",
    "content": "--- foo ---\n[10,20,30,40,50]\n--- bar ---\n[30,40,50,10,20]\n--- baz ---\n[10,20,30,40,50]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-19/helper.py",
    "content": "def my_append(xs, ys):\n    return xs + ys\n\ndef make_two():\n    return [10, 20]\n\ndef make_three():\n    return [30, 40, 50]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-19/main.loc",
    "content": "-- Test SOP-based Nat constraint solving.\n-- Verifies:\n--   1. Cross-form equality: (2 + 3) ~ 5 via SOP normalization\n--   2. Commutativity: (3 + 2) ~ (2 + 3)\n--   3. Nat variable solving: ?m + 3 = 5 => ?m = 2\n\nmodule main (foo, bar, baz)\n\nimport root-py\n\ntype Py => Int = \"int\"\n\ntype SizedList n a = [a]\ntype Py => (SizedList n a) = \"list\" a\n\nmyAppend :: SizedList m a -> SizedList n a -> SizedList (m + n) a\nsource Py from \"helper.py\" (\"my_append\" as myAppend)\n\nmakeTwo :: SizedList 2 Int\nsource Py from \"helper.py\" (\"make_two\" as makeTwo)\n\nmakeThree :: SizedList 3 Int\nsource Py from \"helper.py\" (\"make_three\" as makeThree)\n\n-- Cross-form: return type is SizedList (2+3) Int, expected SizedList 5 Int\nfoo :: SizedList 5 Int\nfoo = myAppend makeTwo makeThree\n\n-- Commutativity: myAppend returns SizedList (3+2) Int, expected SizedList (2+3) Int\nbar :: SizedList (2 + 3) Int\nbar = myAppend makeThree makeTwo\n\n-- Mixed: ensure Nat solving through variable instantiation still works\nbaz :: SizedList (2 + 3) Int\nbaz = myAppend makeTwo makeThree\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus pythonToCpp 3 2 > obs.txt 2> obs.err\n\t./nexus cppToPython 4 7 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-2/exp.txt",
    "content": "26\n27\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-2/main.loc",
    "content": "module main (pythonToCpp, cppToPython)\n\nimport root-py\nimport root-cpp\n\n-- These helper functions force language selection through identity functions\n-- idpy and idcpp come from root-py and root-cpp respectively\npyAdd x = idpy . (+) x\ncppMul x = idcpp . (*) x\n\n-- Compose Python add followed by C++ multiply\npythonToCpp x y = cppMul y (pyAdd x (10.0 :: Real))\n\n-- Compose C++ multiply followed by Python add\ncppToPython x y = pyAdd y (cppMul x (5.0 :: Real))\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-20/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err && echo \"UNEXPECTED_SUCCESS\" > obs.txt || echo \"build_rejected\" > obs.txt\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-20/exp.txt",
    "content": "build_rejected\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-20/helper.py",
    "content": "def my_append(xs, ys):\n    return xs + ys\n\ndef make_two():\n    return [10, 20]\n\ndef make_three():\n    return [30, 40, 50]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-20/main.loc",
    "content": "-- Negative test: Nat constraint mismatch should be rejected at compile time.\n-- SizedList (2 + 3) Int has 5 elements, but foo claims 4.\n\nmodule main (foo)\n\nimport root-py\n\ntype Py => Int = \"int\"\n\ntype SizedList n a = [a]\ntype Py => (SizedList n a) = \"list\" a\n\nmyAppend :: SizedList m a -> SizedList n a -> SizedList (m + n) a\nsource Py from \"helper.py\" (\"my_append\" as myAppend)\n\nmakeTwo :: SizedList 2 Int\nsource Py from \"helper.py\" (\"make_two\" as makeTwo)\n\nmakeThree :: SizedList 3 Int\nsource Py from \"helper.py\" (\"make_three\" as makeThree)\n\n-- ERROR: 2 + 3 = 5, not 4\nfoo :: SizedList 4 Int\nfoo = myAppend makeTwo makeThree\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus threeLangChain 5 > obs.txt 2> obs.err\n\t./nexus reverseChain 25 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus poolsgc\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-3/exp.txt",
    "content": "-11\n25\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-3/main.loc",
    "content": "module main (threeLangChain, reverseChain)\n\nimport root-py\nimport root-cpp\nimport root-r\n\n-- Force language selection with identity functions\npyAdd x = idpy . (+) x\ncppMul x = idcpp . (*) x\nrSub x = idr . (-) x\n\n-- Chain: Python add -> C++ multiply -> R subtract\n-- Input goes through all three languages\nthreeLangChain x = rSub 5.0 (cppMul 2.0 (pyAdd 3.0 (x :: Real)))\n\n-- Reverse chain: R subtract -> C++ multiply -> Python add\nreverseChain x = pyAdd 10.0 (cppMul 3.0 (rSub x (20.0 :: Real)))\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc > /dev/null 2> build.err\n\t./nexus processNumbers '[1,2,3]' > obs.txt 2> obs.err\n\t./nexus composeMaps '[5.0,10.0]' 2>> obs.err  >> obs.txt\n\t./nexus nestedLambda 10 '[1,2,3]' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-4/exp.txt",
    "content": "[12,14,16]\n[18,33]\n[12,14,16]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-4/main.loc",
    "content": "module main (processNumbers, composeMaps, nestedLambda)\n\nimport root-py\nimport root-cpp\n\n-- Use map with lambda across languages\n-- Python map, C++ operations\nprocessNumbers :: [Int] -> [Int]\nprocessNumbers xs = map (\\x -> idcpp (2 * (x + 5))) xs\n\n-- Compose two map operations\n-- First map in Python, second map in C++\ncomposeMaps :: [Real] -> [Real]\ncomposeMaps xs = map (\\x -> idcpp (x * 3.0)) (map (\\y -> idpy (y + 1.0)) xs)\n\n-- Nested lambda with multiple arguments captured\n-- Tests closure and cross-language lambda evaluation\nnestedLambda :: Int -> [Int] -> [Int]\nnestedLambda offset xs = map (\\x -> offset + x * 2) xs\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testStep --seed 42 > obs.txt 2> obs.err\n\t./nexus play --seed 12345 --time 0.0 --steps 1 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-5/exp.txt",
    "content": "1\n43\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-5/game.hpp",
    "content": "#include <vector>\n#include <random>\n\n// Initialize a random Game of Life board\nstd::vector<std::vector<int>> initGame(int seed, int width, int height) {\n    std::mt19937 gen(seed);\n    std::uniform_int_distribution<> dis(0, 1);\n\n    std::vector<std::vector<int>> board(height, std::vector<int>(width));\n    for (int i = 0; i < height; i++) {\n        for (int j = 0; j < width; j++) {\n            board[i][j] = dis(gen);\n        }\n    }\n    return board;\n}\n\n// Apply one step of Conway's Game of Life\nstd::vector<std::vector<int>> updateGame(std::vector<std::vector<int>> board) {\n    int height = board.size();\n    if (height == 0) return board;\n    int width = board[0].size();\n\n    std::vector<std::vector<int>> newBoard(height, std::vector<int>(width));\n\n    for (int i = 0; i < height; i++) {\n        for (int j = 0; j < width; j++) {\n            int neighbors = 0;\n\n            // Count neighbors (wrap around edges)\n            for (int di = -1; di <= 1; di++) {\n                for (int dj = -1; dj <= 1; dj++) {\n                    if (di == 0 && dj == 0) continue;\n                    int ni = (i + di + height) % height;\n                    int nj = (j + dj + width) % width;\n                    neighbors += board[ni][nj];\n                }\n            }\n\n            // Apply Game of Life rules\n            if (board[i][j] == 1) {\n                // Live cell survives with 2-3 neighbors\n                newBoard[i][j] = (neighbors == 2 || neighbors == 3) ? 1 : 0;\n            } else {\n                // Dead cell becomes alive with exactly 3 neighbors\n                newBoard[i][j] = (neighbors == 3) ? 1 : 0;\n            }\n        }\n    }\n    return newBoard;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-5/game.py",
    "content": "import time\nimport os\n\ndef drawGame(board):\n    \"\"\"Draw the game board and return it (for composition)\"\"\"\n    # Clear screen (works on Unix-like systems)\n    print(\"\\033[2J\\033[H\", end=\"\")  # ANSI escape codes\n\n    # Draw board\n    for row in board:\n        print(''.join(['█' if cell else '·' for cell in row]))\n    print()  # Extra newline\n\n    return board\n\ndef doSleep(board, duration):\n    \"\"\"Sleep for duration seconds and return board (for composition)\"\"\"\n    time.sleep(duration)\n    return board\n\ndef gameLoop(initBoard, updateFn, sleepTime, steps):\n    \"\"\"Main game loop - applies draw, sleep, update for N steps\"\"\"\n    board = initBoard\n\n    for i in range(steps):\n        # Draw current state\n        print(\"\\033[2J\\033[H\", end=\"\")\n        for row in board:\n            print(''.join(['█' if cell else '·' for cell in row]))\n        print(f\"Step: {i+1}/{steps}\")\n\n        # Sleep\n        time.sleep(sleepTime)\n\n        # Update\n        board = updateFn(board)\n\n    # Draw final state\n    print(\"\\033[2J\\033[H\", end=\"\")\n    for row in board:\n        print(''.join(['█' if cell else '·' for cell in row]))\n    print(f\"Step: {steps}/{steps} (Final)\")\n\n    return board\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-5/main.loc",
    "content": "-- desc: Conway's Game of Life - Polyglot Implementation\n-- author: Claude\n--\n-- This module main cross-language composition by implementing\n-- Conway's Game of Life with C++ for computation and Python for display.\nmodule main (playGame, testStep)\n\n-- Type mappings\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Py => List a = \"list\" a\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Py => Real = \"float\"\n\n-- C++ game logic functions\nsource Cpp from \"game.hpp\" (\"initGame\", \"updateGame\")\ninitGame :: Int -> Int -> Int -> [[Int]]\nupdateGame :: [[Int]] -> [[Int]]\n\n-- Python display and timing functions\nsource Py from \"game.py\" (\"drawGame\", \"doSleep\", \"gameLoop\")\ndrawGame :: [[Int]] -> [[Int]]\ndoSleep :: [[Int]] -> Real -> [[Int]]\ngameLoop :: [[Int]] -> ([[Int]] -> [[Int]]) -> Real -> Int -> [[Int]]\n\n-- Compose one game step: draw -> sleep -> update\n-- This demonstrates the composition pattern requested\noneStep :: Real -> [[Int]] -> [[Int]]\noneStep sleepTime board = updateGame (doSleep (drawGame board) sleepTime)\n\n--' Test a single step of the game\n--'\n--' This function creates a small 20x10 board and runs one update cycle.\n--' Returns just the board state without drawing (for testing).\n--'\n--' name: testStep\ntestStep ::\n  --' Random seed for board initialization\n  --' arg: --seed\n  --' metavar: SEED\n  --' default: 42\n  Int ->\n  --' return: The board state after one update\n  [[Int]]\ntestStep seed = updateGame (initGame seed 20 10)\n\n--' Play Conway's Game of Life\n--'\n--' Runs an animated Game of Life simulation in the terminal.\n--' The board is initialized with random cells based on the seed.\n--' C++ handles the game logic while Python manages display and timing.\n--'\n--' name: play\nplayGame ::\n  --' Random seed for initial board state\n  --' arg: -s/--seed\n  --' metavar: SEED\n  --' default: 12345\n  Int ->\n  --' Sleep duration in seconds between frames\n  --' arg: -t/--time\n  --' metavar: SECONDS\n  --' default: 0.5\n  Real ->\n  --' Number of simulation steps to run\n  --' arg: -n/--steps\n  --' metavar: STEPS\n  --' default: 50\n  Int ->\n  --' return: Final board state\n  [[Int]]\nplayGame seed sleepTime steps = gameLoop (initGame seed 40 20) updateGame sleepTime steps\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-5/package.yaml",
    "content": "name: conway\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\n# Files to include when installing with `morloc make --install`\ninclude: [\"game.hpp\", \"game.py\"]\n\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus blur --width 10 --height 10 > obs.txt 2> obs.err\n\t./nexus edges --width 10 --height 10 2>> obs.err  >> obs.txt\n\t./nexus analyze --width 8 --height 8 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/README.md",
    "content": "# Claude-Test-6: Image Filter Pipeline\n\n## Overview\n\nDemonstrates cross-language image processing with Python for I/O, C++ for\ncomputationally intensive filtering, and R for statistical analysis.\n\n## Features Tested\n\n- **Records**: Multiple nested records (ImageMetadata, ColorStats, FilteredResult)\n- **Cross-language composition**: Python → C++ → R\n- **2D arrays**: Image pixel data as `[[Real]]`\n- **Where bindings**: Multi-step computations with intermediate variables\n- **Struct/dict mapping**: Records map to different native types per language\n\n## Architecture\n\n### Python (`image_io.py`)\n- `createTestImage(width, height)`: Generate random test images\n- `pixelsToDict(pixels, metadata, stats)`: Combine results into FilteredResult record\n\n### C++ (`filters.hpp`)\n- `blurFilter(pixels)`: Apply 3x3 box blur\n- `edgeDetect(pixels)`: Sobel-like edge detection\n- `getMetadata(pixels)`: Extract image dimensions\n\n### R (`stats.R`)\n- `computeColorStats(pixels)`: Calculate mean and standard deviation per channel\n\n## Data Flow\n\n1. **analyzeImage**: Python creates image → R computes statistics\n2. **applyBlur**: Python creates image → C++ blurs → R stats → Python assembles result\n3. **detectEdges**: Python creates image → C++ edge detection → R stats → Python assembles result\n\n## Example Usage\n\n```bash\n# Analyze 8x8 test image\n./nexus analyzeImage 8 8\n\n# Apply blur to 10x10 image\n./nexus blur --width 10 --height 10\n\n# Detect edges\n./nexus edges --width 12 --height 12\n```\n\n## What This Tests\n\n1. **Record serialization** across 3 languages\n2. **Nested records** (FilteredResult contains ImageMetadata and ColorStats)\n3. **2D array serialization** (pixel matrices)\n4. **Let bindings** for multi-step pipelines\n5. **Language-specific type mappings** (struct in C++, dict in Python, list in R)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/exp.txt",
    "content": "{\"pixels\":[[88.5124645574542,71.8248344472212,53.2941381091155,89.6736964753596,128.776912874218,158.126214412896,127.944791156772,128.357521761093,82.6137428093435,80.8125762868008],[122.926552194863,100.135815926087,69.3557253934204,100.949185158775,126.914410234863,144.703157661752,100.201469122092,114.952117793293,98.9340371517504,115.551917760727],[160.004508712088,132.255085508328,117.970762254393,119.455582414322,134.488174662077,129.250990759298,109.952305325868,140.6955322057,141.970130060156,157.513868034606],[161.126634383042,138.708434624715,113.221577496731,111.573820225094,112.056880300236,100.869993740308,88.9039857427642,123.56867577441,138.380164300018,165.358832826946],[121.845131803404,110.444706561671,119.434228324492,122.871020241791,133.152006375924,102.052955128652,116.765276859958,124.325497452641,130.561523136145,126.753428183668],[125.78331145061,113.657273142207,109.304724845062,128.439587907295,144.478973500098,104.544028339343,89.4854669464691,75.2229821176182,89.6967919530109,90.3791605185259],[142.931905817769,143.462967660245,153.811988474807,163.839423338309,166.119339407671,148.518321135725,122.274653974189,92.8922191921351,80.9664221319985,69.5962801842219],[153.319829971672,159.387148527789,163.218967992923,164.693434883792,146.92982920377,153.843679700023,128.422653187069,108.030871411127,63.2741636531421,52.3909138975773],[115.391642588102,140.763696188744,162.089080509925,141.838383350666,106.238654044517,142.322452993978,153.064345412625,162.422732350186,112.735047601714,105.459140519644],[99.5268133146645,124.859486446369,152.440523846624,134.281997691943,101.313672062729,129.944907856884,154.412686024922,174.007215150174,127.862291514445,125.580096549724]],\"metadata\":{\"width\":10,\"height\":10,\"channels\":1},\"stats\":{\"meanRed\":122.255651751921,\"meanGreen\":122.255651751921,\"meanBlue\":122.255651751921,\"stddevRed\":27.6812835801793,\"stddevGreen\":27.6812835801793,\"stddevBlue\":27.6812835801793}}\n{\"pixels\":[[0,0,0,0,0,0,0,0,0,0],[0,507.692513165403,307.933721942629,594.141621461922,338.44569802573,512.692652559272,526.984480328925,383.718032084751,408.873732729833,0],[0,531.026988905135,592.095841396817,628.851777469493,95.2364029085228,444.56055298287,333.392183606607,573.51196986859,205.383834052314,0],[0,541.381513725793,195.44292737003,188.21241260997,476.3598728794,274.981760802021,28.6700473681393,347.650379419914,179.095309652623,0],[0,559.427270170928,464.926072531052,26.2391203789429,262.027116536394,70.1436486822546,301.338533676107,402.037359166487,382.423199322948,0],[0,540.525517020466,611.494264524542,532.08223114212,648.058960431852,640.142399373039,183.300939929458,232.949529192443,268.555146219915,0],[0,494.901449881851,342.578556802197,245.788245834935,209.902059373713,377.712047181093,419.630782916545,145.919686133976,56.0246151237916,0],[0,376.771444273009,149.668633232663,87.0330144302201,331.952882860641,238.395178774245,380.972075867401,661.965551924579,173.148314464784,0],[0,412.019392675303,147.030124032927,356.521188629432,435.914283073864,624.217253470402,209.351073727266,677.154128402466,572.771772527998,0],[0,0,0,0,0,0,0,0,0,0]],\"metadata\":{\"width\":10,\"height\":10,\"channels\":1},\"stats\":{\"meanRed\":234.69351291231,\"meanGreen\":234.69351291231,\"meanBlue\":234.69351291231,\"stddevRed\":227.8315944865,\"stddevGreen\":227.8315944865,\"stddevBlue\":227.8315944865}}\n{\"meanRed\":119.088624280306,\"meanGreen\":119.088624280306,\"meanBlue\":119.088624280306,\"stddevRed\":73.4853614921213,\"stddevGreen\":73.4853614921213,\"stddevBlue\":73.4853614921213}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/filters.hpp",
    "content": "#include <vector>\n#include <cmath>\n#include <algorithm>\n\n// Simple box blur filter\nstd::vector<std::vector<double>> blurFilter(std::vector<std::vector<double>> pixels) {\n    int height = pixels.size();\n    if (height == 0) return pixels;\n    int width = pixels[0].size();\n\n    std::vector<std::vector<double>> result(height, std::vector<double>(width));\n\n    // Simple 3x3 box blur\n    for (int i = 0; i < height; i++) {\n        for (int j = 0; j < width; j++) {\n            double sum = 0;\n            int count = 0;\n\n            for (int di = -1; di <= 1; di++) {\n                for (int dj = -1; dj <= 1; dj++) {\n                    int ni = i + di;\n                    int nj = j + dj;\n                    if (ni >= 0 && ni < height && nj >= 0 && nj < width) {\n                        sum += pixels[ni][nj];\n                        count++;\n                    }\n                }\n            }\n            result[i][j] = sum / count;\n        }\n    }\n    return result;\n}\n\n// Simple edge detection (Sobel-like)\nstd::vector<std::vector<double>> edgeDetect(std::vector<std::vector<double>> pixels) {\n    int height = pixels.size();\n    if (height == 0) return pixels;\n    int width = pixels[0].size();\n\n    std::vector<std::vector<double>> result(height, std::vector<double>(width));\n\n    for (int i = 1; i < height - 1; i++) {\n        for (int j = 1; j < width - 1; j++) {\n            // Horizontal gradient\n            double gx = pixels[i-1][j+1] + 2*pixels[i][j+1] + pixels[i+1][j+1]\n                      - pixels[i-1][j-1] - 2*pixels[i][j-1] - pixels[i+1][j-1];\n\n            // Vertical gradient\n            double gy = pixels[i+1][j-1] + 2*pixels[i+1][j] + pixels[i+1][j+1]\n                      - pixels[i-1][j-1] - 2*pixels[i-1][j] - pixels[i-1][j+1];\n\n            result[i][j] = std::sqrt(gx*gx + gy*gy);\n        }\n    }\n    return result;\n}\n\n// Get image metadata\nstruct ImageMetadata {\n    int width;\n    int height;\n    int channels;\n};\n\nImageMetadata getMetadata(std::vector<std::vector<double>> pixels) {\n    ImageMetadata meta;\n    meta.height = pixels.size();\n    meta.width = (meta.height > 0) ? pixels[0].size() : 0;\n    meta.channels = 1;  // Grayscale\n    return meta;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/image_io.py",
    "content": "import random\n\ndef createTestImage(width, height):\n    \"\"\"Create a test image with random grayscale values\"\"\"\n    random.seed(42)  # Deterministic for testing\n    # Create 2D array of random values between 0 and 255\n    pixels = [[random.uniform(0, 255) for _ in range(width)] for _ in range(height)]\n    return pixels\n\ndef pixelsToDict(pixels, metadata, stats):\n    \"\"\"Combine pixels, metadata, and stats into a result dictionary\"\"\"\n    return {\n        \"pixels\": pixels,\n        \"metadata\": metadata,\n        \"stats\": stats\n    }\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/main.loc",
    "content": "-- desc: Image Filter Pipeline - Polyglot Image Processing\n-- author: Claude\n--\n-- Demonstrates cross-language image processing with Python for I/O,\n-- C++ for intensive filtering, and R for statistical analysis.\nmodule main (applyBlur, detectEdges, analyzeImage)\n\n-- Type mappings\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Py => Real = \"float\"\ntype R => Real = \"double\"\n\n-- Image metadata record\nrecord ImageMetadata where\n  width :: Int\n  height :: Int\n  channels :: Int\n\nrecord Py => ImageMetadata = \"dict\"\nrecord Cpp => ImageMetadata = \"ImageMetadata\"\nrecord R => ImageMetadata = \"list\"\n\n-- Color statistics record\nrecord ColorStats where\n  meanRed :: Real\n  meanGreen :: Real\n  meanBlue :: Real\n  stddevRed :: Real\n  stddevGreen :: Real\n  stddevBlue :: Real\n\nrecord Py => ColorStats = \"dict\"\nrecord Cpp => ColorStats = \"struct\"\nrecord R => ColorStats = \"list\"\n\n-- Filtered result with metadata\nrecord FilteredResult where\n  pixels :: [[Real]]\n  metadata :: ImageMetadata\n  stats :: ColorStats\n\nrecord Py => FilteredResult = \"dict\"\nrecord Cpp => FilteredResult = \"struct\"\nrecord R => FilteredResult = \"list\"\n\n-- Python I/O functions\nsource Py from \"image_io.py\" (\"createTestImage\", \"pixelsToDict\")\ncreateTestImage :: Int -> Int -> [[Real]]\npixelsToDict :: [[Real]] -> ImageMetadata -> ColorStats -> FilteredResult\n\n-- C++ filter functions\nsource Cpp from \"filters.hpp\" (\"blurFilter\", \"edgeDetect\", \"getMetadata\")\nblurFilter :: [[Real]] -> [[Real]]\nedgeDetect :: [[Real]] -> [[Real]]\ngetMetadata :: [[Real]] -> ImageMetadata\n\n-- R statistics functions\nsource R from \"stats.R\" (\"computeColorStats\")\ncomputeColorStats :: [[Real]] -> ColorStats\n\n--' Apply blur filter to an image\n--'\n--' Creates a test image and applies Gaussian blur using C++.\n--' Returns the filtered result with metadata and statistics.\n--'\n--' name: blur\napplyBlur ::\n  --' Image width in pixels\n  --' arg: --width\n  --' metavar: WIDTH\n  --' default: 10\n  Int ->\n  --' Image height in pixels\n  --' arg: --height\n  --' metavar: HEIGHT\n  --' default: 10\n  Int ->\n  --' return: Filtered image with metadata\n  FilteredResult\napplyBlur width height = pixelsToDict blurred metadata stats where\n  pixels = createTestImage width height\n  blurred = blurFilter pixels\n  metadata = getMetadata blurred\n  stats = computeColorStats blurred\n\n--' Detect edges in an image\n--'\n--' Creates a test image and applies edge detection using C++.\n--'\n--' name: edges\ndetectEdges ::\n  --' Image width in pixels\n  --' arg: --width\n  --' metavar: WIDTH\n  --' default: 10\n  Int ->\n  --' Image height in pixels\n  --' arg: --height\n  --' metavar: HEIGHT\n  --' default: 10\n  Int ->\n  --' return: Edge-detected image with metadata\n  FilteredResult\ndetectEdges width height = pixelsToDict edges metadata stats where\n  pixels = createTestImage width height\n  edges = edgeDetect pixels\n  metadata = getMetadata edges\n  stats = computeColorStats edges\n\n--' Analyze image statistics\n--'\n--' Creates a test image and computes color channel statistics using R.\n--'\n--' name: analyze\nanalyzeImage ::\n  --' Image width in pixels\n  --' arg: --width\n  --' metavar: WIDTH\n  --' default: 8\n  Int ->\n  --' Image height in pixels\n  --' arg: --height\n  --' metavar: HEIGHT\n  --' default: 8\n  Int ->\n  --' return: Image statistics\n  ColorStats\nanalyzeImage width height = computeColorStats pixels where\n  pixels = createTestImage width height\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-6/stats.R",
    "content": "computeColorStats <- function(pixels) {\n  # Flatten the matrix\n  flat <- unlist(pixels)\n\n  # For grayscale, all channels are the same\n  mean_val <- mean(flat)\n  sd_val <- sd(flat)\n\n  # Return as list (morloc record)\n  list(\n    meanRed = mean_val,\n    meanGreen = mean_val,\n    meanBlue = mean_val,\n    stddevRed = sd_val,\n    stddevGreen = sd_val,\n    stddevBlue = sd_val\n  )\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus analyze -n 50 > obs.txt 2> obs.err\n\t./nexus summary 2>> obs.err  >> obs.txt\n\t./nexus groupBy -n 50 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/README.md",
    "content": "# Claude-Test-7: CSV Data Analysis Dashboard\n\n## Overview\nDemonstrates data science workflows with Python for I/O, R for statistics, and C++ for fast aggregations. Tests records, lists of records, and field accessors.\n\n## Features Tested\n- **Records**: SalesRecord, Summary, CategorySummary\n- **Lists of records**: `[SalesRecord]`, `[CategorySummary]`\n- **Field accessors**: `.price`, `.category` to extract record fields\n- **Map with lambdas**: `map (\\s -> .price s) sales`\n- **Cross-language data pipeline**: Python → C++ → Python\n- **String generation**: Formatted reports from Python\n\n## Architecture\n\n### Python (`data_io.py`)\n- `generateSalesData(n)`: Create random sales records (deterministic seed)\n- `formatReport(summaries)`: Format category summaries as text\n\n### C++ (`aggregations.hpp`)\n- `calculateRevenue(sales)`: Fast sum of all revenues\n- `fastGroupBy(sales)`: Group by category and aggregate\n\n### R (`statistics.R`)\n- `computeSummary(values)`: Compute count, mean, stddev, min, max\n- `correlationAnalysis(x, y)`: Pearson correlation coefficient\n\n## Data Flow\n\n1. **analyzeSales**: Python generates data → C++ calculates total revenue\n2. **summarizeStats**: Python generates → Extract prices with map → R computes statistics\n3. **groupByCategory**: Python generates → C++ groups and aggregates → Python formats report\n\n## Example Usage\n\n```bash\n# Calculate total revenue from 100 sales\n./nexus analyze --records 100\n\n# Get summary statistics\n./nexus summary --records 50\n\n# Group by category and format report\n./nexus group-by --records 200\n```\n\n## What This Tests\n\n1. **Records with multiple fields** (strings, ints, reals)\n2. **Lists of records** serialization\n3. **Field accessor syntax** (`.price`, `.category`)\n4. **Map with lambda and field access**: `map (\\s -> .price s) sales`\n5. **Cross-language aggregation** patterns\n6. **String formatting** and text generation\n7. **Deterministic random data** for reproducible tests\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/aggregations.hpp",
    "content": "#include <vector>\n#include <string>\n#include <map>\n#include <algorithm>\n\nstruct SalesRecord {\n    std::string product;\n    std::string category;\n    int quantity;\n    double price;\n    std::string region;\n};\n\nstruct CategorySummary {\n    std::string category;\n    double totalSales;\n    int itemCount;\n    double avgPrice;\n};\n\n// Calculate total revenue from all sales\ndouble calculateRevenue(std::vector<SalesRecord> sales) {\n    double total = 0.0;\n    for (const auto& sale : sales) {\n        total += sale.quantity * sale.price;\n    }\n    return total;\n}\n\n// Group sales by category and compute aggregates\nstd::vector<CategorySummary> fastGroupBy(std::vector<SalesRecord> sales) {\n    std::map<std::string, std::pair<double, int>> categoryData;\n\n    // Aggregate by category\n    for (const auto& sale : sales) {\n        double revenue = sale.quantity * sale.price;\n        categoryData[sale.category].first += revenue;\n        categoryData[sale.category].second += 1;\n    }\n\n    // Convert to result vector\n    std::vector<CategorySummary> results;\n    for (const auto& entry : categoryData) {\n        CategorySummary summary;\n        summary.category = entry.first;\n        summary.totalSales = entry.second.first;\n        summary.itemCount = entry.second.second;\n        summary.avgPrice = entry.second.first / entry.second.second;\n        results.push_back(summary);\n    }\n\n    return results;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/data_io.py",
    "content": "import random\n\ndef generateSalesData(n):\n    \"\"\"Generate n sales records with deterministic random data\"\"\"\n    random.seed(42)\n\n    products = [\"Widget\", \"Gadget\", \"Doohickey\", \"Thingamajig\", \"Gizmo\"]\n    categories = [\"Electronics\", \"Hardware\", \"Software\", \"Services\"]\n    regions = [\"North\", \"South\", \"East\", \"West\"]\n\n    sales = []\n    for i in range(n):\n        record = {\n            \"product\": random.choice(products),\n            \"category\": random.choice(categories),\n            \"quantity\": random.randint(1, 100),\n            \"price\": round(random.uniform(10.0, 500.0), 2),\n            \"region\": random.choice(regions)\n        }\n        sales.append(record)\n\n    return sales\n\ndef formatReport(summaries):\n    \"\"\"Format category summaries as a text report\"\"\"\n    lines = [\"Category Sales Report\", \"=\" * 50]\n\n    for summary in summaries:\n        line = f\"{summary['category']:20s} | Total: ${summary['totalSales']:10.2f} | Items: {summary['itemCount']:5d} | Avg: ${summary['avgPrice']:8.2f}\"\n        lines.append(line)\n\n    lines.append(\"=\" * 50)\n    return \"\\n\".join(lines)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/exp.txt",
    "content": "624570.87\n{\"count\":50,\"total\":12875.4,\"mean\":257.508,\"stddev\":131.659373462007,\"minVal\":24.6,\"maxVal\":498.1}\n\"Category Sales Report\\n==================================================\\nElectronics          | Total: $ 239557.13 | Items:    17 | Avg: $14091.60\\nHardware             | Total: $  82341.63 | Items:     9 | Avg: $ 9149.07\\nServices             | Total: $ 237477.21 | Items:    16 | Avg: $14842.33\\nSoftware             | Total: $  65194.90 | Items:     8 | Avg: $ 8149.36\\n==================================================\"\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/main.loc",
    "content": "-- desc: CSV Data Analysis Dashboard\n-- author: Claude\n--\n-- Demonstrates table types, statistical aggregation with R,\n-- fast C++ operations, and Python data I/O.\nmodule main (analyzeSales, summarizeStats, groupByCategory)\n\nimport root-py\nimport root-r\nimport root-cpp\n\n-- Sales data record\nrecord SalesRecord where\n  product :: Str\n  category :: Str\n  quantity :: Int\n  price :: Real\n  region :: Str\n\nrecord Py => SalesRecord = \"dict\"\nrecord Cpp => SalesRecord = \"SalesRecord\"\nrecord R => SalesRecord = \"list\"\n\n-- Summary statistics record\nrecord Summary where\n  count :: Int\n  total :: Real\n  mean :: Real\n  stddev :: Real\n  minVal :: Real\n  maxVal :: Real\n\nrecord Py => Summary = \"dict\"\nrecord Cpp => Summary = \"struct\"\nrecord R => Summary = \"list\"\n\n-- Category summary\nrecord CategorySummary where\n  category :: Str\n  totalSales :: Real\n  itemCount :: Int\n  avgPrice :: Real\n\nrecord Py => CategorySummary = \"dict\"\nrecord Cpp => CategorySummary = \"CategorySummary\"\nrecord R => CategorySummary = \"list\"\n\n-- Python data generation and I/O\nsource Py from \"data_io.py\" (\"generateSalesData\", \"formatReport\")\ngenerateSalesData :: Int -> [SalesRecord]\nformatReport :: [CategorySummary] -> Str\n\n-- C++ fast aggregations\nsource Cpp from \"aggregations.hpp\" (\"fastGroupBy\", \"calculateRevenue\")\nfastGroupBy :: [SalesRecord] -> [CategorySummary]\ncalculateRevenue :: [SalesRecord] -> Real\n\n-- R statistical functions\nsource R from \"statistics.R\" (\"computeSummary\", \"correlationAnalysis\")\ncomputeSummary :: [Real] -> Summary\ncorrelationAnalysis :: [Real] -> [Real] -> Real\n\n--' Analyze sales data\n--'\n--' Generates sales data and computes total revenue using C++.\n--'\n--' name: analyze\nanalyzeSales ::\n  --' Number of sales records to generate\n  --' arg: -n/--records\n  --' metavar: COUNT\n  --' default: 100\n  Int ->\n  --' return: Total revenue\n  Real\nanalyzeSales n = calculateRevenue sales where\n  sales = generateSalesData n\n\n--' Compute summary statistics\n--'\n--' Generates sales data, extracts prices, and computes statistics using R.\n--'\n--' name: summary\nsummarizeStats ::\n  --' Number of records to analyze\n  --' arg: -n/--records\n  --' metavar: COUNT\n  --' default: 50\n  Int ->\n  --' return: Statistical summary\n  Summary\nsummarizeStats n = computeSummary prices where\n  sales = generateSalesData n\n  prices = map (\\s -> .price s) sales\n\n--' Group sales by category\n--'\n--' Groups sales data by category using C++ and formats a report with Python.\n--'\n--' name: groupBy\ngroupByCategory ::\n  --' Number of records to analyze\n  --' arg: -n/--records\n  --' metavar: COUNT\n  --' default: 100\n  Int ->\n  --' return: Formatted report\n  Str\ngroupByCategory n = formatReport grouped where\n  sales = generateSalesData n\n  grouped = fastGroupBy sales\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-7/statistics.R",
    "content": "computeSummary <- function(values) {\n  list(\n    count = length(values),\n    total = sum(values),\n    mean = mean(values),\n    stddev = sd(values),\n    minVal = min(values),\n    maxVal = max(values)\n  )\n}\n\ncorrelationAnalysis <- function(x, y) {\n  cor(x, y)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus analyze --sample 1 > obs.txt 2> obs.err\n\t./nexus topWords --sample 1 2>> obs.err  >> obs.txt\n\t./nexus sentiment --sample 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/README.md",
    "content": "# Claude-Test-8: Text Mining Pipeline\n\n## Overview\nDemonstrates text processing and NLP workflows with Python for tokenization, C++ for fast string operations, and R for sentiment analysis.\n\n## Features Tested\n- **String processing**: Tokenization, word counting, n-grams\n- **Lists of strings**: `[Str]` for tokens\n- **Tuples**: `(Str, Int)` for word-count pairs\n- **Helper functions**: `take`, `reverse` for list manipulation\n- **Pattern matching**: Case expressions on lists\n- **Records with strings**: Document, WordFreq, Sentiment records\n- **Cross-language text analysis**: Python → C++ → R\n\n## Architecture\n\n### Python (`text_processing.py`)\n- `getSampleText(id)`: Retrieve sample texts for testing\n- `tokenizeText(text)`: Tokenize into lowercase words\n- `createDocument(id, text)`: Create Document record\n\n### C++ (`string_ops.hpp`)\n- `countWords(tokens)`: Count word frequencies, return sorted list\n- `findNgrams(tokens, n)`: Generate n-grams\n- `calculateTfidf(word, tokens)`: Simple TF-IDF calculation\n\n### R (`text_stats.R`)\n- `computeSentiment(tokens)`: Sentiment analysis with word lists\n- `wordDiversity(tokens)`: Type-token ratio\n\n## Data Flow\n\n1. **analyzeText**: Python gets text → tokenizes → creates Document record\n2. **findTopWords**: Python tokenizes → C++ counts and sorts → morloc takes top N\n3. **computeSentiment**: Python tokenizes → R analyzes sentiment\n\n## Example Usage\n\n```bash\n# Analyze sample text 1\n./nexus analyze --sample 1\n\n# Find top 10 words from sample 3\n./nexus top-words --sample 3 --count 10\n\n# Compute sentiment for sample 2 (positive text)\n./nexus sentiment --sample 2\n```\n\n## What This Tests\n\n1. **String operations** across languages\n2. **Tuples**: `(Str, Int)` for word frequencies\n3. **Pattern matching**: Case expressions on lists (`case xs of`)\n4. **Helper functions**: `take`, `reverse` implemented in morloc\n5. **Lists of tuples** serialization\n6. **Records with string fields**\n7. **Cross-language text processing** pipelines\n\n## Sample Texts\n\n1. \"The quick brown fox...\" (neutral)\n2. \"This is a wonderful day!...\" (positive)\n3. \"The weather is terrible...\" (negative)\n4. \"Machine learning...\" (neutral/technical)\n5. \"The cat sat on the mat...\" (neutral)\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/exp.txt",
    "content": "{\"docId\":\"doc1\",\"text\":\"The quick brown fox jumps over the lazy dog. The dog was very lazy indeed.\",\"tokens\":[\"the\",\"quick\",\"brown\",\"fox\",\"jumps\",\"over\",\"the\",\"lazy\",\"dog\",\"the\",\"dog\",\"was\",\"very\",\"lazy\",\"indeed\"],\"wordCount\":15}\n[{\"word\":\"the\",\"count\":3},{\"word\":\"dog\",\"count\":2},{\"word\":\"lazy\",\"count\":2},{\"word\":\"brown\",\"count\":1},{\"word\":\"fox\",\"count\":1},{\"word\":\"indeed\",\"count\":1},{\"word\":\"jumps\",\"count\":1},{\"word\":\"over\",\"count\":1},{\"word\":\"quick\",\"count\":1},{\"word\":\"very\",\"count\":1},{\"word\":\"was\",\"count\":1}]\n{\"positive\":0.25,\"negative\":0,\"neutral\":0.75,\"overall\":0.25}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/main.loc",
    "content": "-- desc: Text Mining Pipeline - NLP and Text Analytics\n-- author: Claude\n--\n-- Demonstrates string processing, tokenization, and text analysis\n-- across Python, C++, and R.\nmodule main (analyzeText, findTopWords, analyzeSentiment)\n\n-- Type mappings\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Py => Real = \"float\"\ntype R => Real = \"double\"\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\ntype Cpp => Tuple2 a b = \"std::pair<$1,$2>\" a b\ntype Py => Tuple2 a b = \"tuple\" a b\ntype R => Tuple2 a b = \"list\" a b\n\n-- Document record\nrecord Document where\n  docId :: Str\n  text :: Str\n  tokens :: [Str]\n  wordCount :: Int\n\nrecord Py => Document = \"dict\"\nrecord Cpp => Document = \"struct\"\nrecord R => Document = \"list\"\n\n-- Word frequency record\nrecord WordFreq where\n  word :: Str\n  count :: Int\n\nrecord Py => WordFreq = \"dict\"\nrecord Cpp => WordFreq = \"WordFreq\"\nrecord R => WordFreq = \"list\"\n\n-- Sentiment scores\nrecord Sentiment where\n  positive :: Real\n  negative :: Real\n  neutral :: Real\n  overall :: Real\n\nrecord Py => Sentiment = \"dict\"\nrecord Cpp => Sentiment = \"struct\"\nrecord R => Sentiment = \"list\"\n\n-- Python tokenization and I/O\nsource Py from \"text_processing.py\" (\"createDocument\", \"tokenizeText\", \"getSampleText\")\ncreateDocument :: Str -> Str -> Document\ntokenizeText :: Str -> [Str]\ngetSampleText :: Int -> Str\n\n-- C++ fast string operations\nsource Cpp from \"string_ops.hpp\" (\"countWords\", \"findNgrams\")\ncountWords :: [Str] -> [WordFreq]\nfindNgrams :: [Str] -> Int -> [Str]\n\n-- R sentiment and statistics\nsource R from \"text_stats.R\" (\"computeSentiment\", \"wordDiversity\")\ncomputeSentiment :: [Str] -> Sentiment\nwordDiversity :: [Str] -> Real\n\n--' Analyze text document\n--'\n--' Creates a document from sample text and tokenizes it.\n--'\n--' name: analyze\nanalyzeText ::\n  --' Sample text ID (1-5)\n  --' arg: --sample\n  --' metavar: ID\n  --' default: 1\n  Int ->\n  --' return: Document with tokens\n  Document\nanalyzeText sampleId = createDocument \"doc1\" text where\n  text = getSampleText sampleId\n\n--' Find top N most frequent words\n--'\n--' Tokenizes text and finds the most common words using C++.\n--'\n--' name: topWords\nfindTopWords ::\n  --' Sample text ID\n  --' arg: --sample\n  --' metavar: ID\n  --' default: 1\n  Int ->\n  --' return: List of word frequencies\n  [WordFreq]\nfindTopWords sampleId = countWords tokens where\n  text = getSampleText sampleId\n  tokens = tokenizeText text\n\n--' Compute sentiment analysis\n--'\n--' Analyzes the sentiment of sample text using R.\n--'\n--' name: sentiment\nanalyzeSentiment ::\n  --' Sample text ID\n  --' arg: --sample\n  --' metavar: ID\n  --' default: 2\n  Int ->\n  --' return: Sentiment scores\n  Sentiment\nanalyzeSentiment sampleId = computeSentiment tokens where\n  text = getSampleText sampleId\n  tokens = tokenizeText text\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/string_ops.hpp",
    "content": "#include <vector>\n#include <string>\n#include <map>\n#include <algorithm>\n#include <cmath>\n\nstruct WordFreq {\n    std::string word;\n    int count;\n};\n\n// Count word frequencies and return sorted list\nstd::vector<WordFreq> countWords(std::vector<std::string> tokens) {\n    std::map<std::string, int> wordCounts;\n\n    // Count occurrences\n    for (const auto& token : tokens) {\n        wordCounts[token]++;\n    }\n\n    // Convert to vector and sort by count (descending)\n    std::vector<WordFreq> result;\n    for (const auto& entry : wordCounts) {\n        result.push_back({entry.first, entry.second});\n    }\n\n    std::sort(result.begin(), result.end(),\n              [](const auto& a, const auto& b) { return a.count > b.count; });\n\n    return result;\n}\n\n// Generate n-grams from tokens\nstd::vector<std::string> findNgrams(std::vector<std::string> tokens, int n) {\n    std::vector<std::string> ngrams;\n\n    for (size_t i = 0; i + n <= tokens.size(); i++) {\n        std::string ngram;\n        for (int j = 0; j < n; j++) {\n            if (j > 0) ngram += \" \";\n            ngram += tokens[i + j];\n        }\n        ngrams.push_back(ngram);\n    }\n\n    return ngrams;\n}\n\n// Calculate simple TF-IDF score for a word in a document\ndouble calculateTfidf(std::string word, std::vector<std::string> tokens) {\n    // Term frequency\n    int termCount = 0;\n    for (const auto& token : tokens) {\n        if (token == word) termCount++;\n    }\n\n    if (termCount == 0) return 0.0;\n\n    double tf = static_cast<double>(termCount) / tokens.size();\n\n    // Simple IDF (assuming single document, so just use log of doc length)\n    double idf = std::log(static_cast<double>(tokens.size()) / (1.0 + termCount));\n\n    return tf * idf;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/text_processing.py",
    "content": "import re\n\nSAMPLE_TEXTS = {\n    1: \"The quick brown fox jumps over the lazy dog. The dog was very lazy indeed.\",\n    2: \"This is a wonderful day! I feel great and everything is amazing.\",\n    3: \"The weather is terrible today. It's cold, rainy, and miserable.\",\n    4: \"Machine learning and artificial intelligence are transforming technology.\",\n    5: \"The cat sat on the mat. The mat was soft and comfortable for the cat.\"\n}\n\ndef getSampleText(sampleId):\n    \"\"\"Get a sample text by ID\"\"\"\n    return SAMPLE_TEXTS.get(sampleId, SAMPLE_TEXTS[1])\n\ndef tokenizeText(text):\n    \"\"\"Tokenize text into words (lowercase, alphanumeric only)\"\"\"\n    # Convert to lowercase and split on non-alphanumeric\n    words = re.findall(r'\\b[a-z]+\\b', text.lower())\n    return words\n\ndef createDocument(docId, text):\n    \"\"\"Create a Document record from text\"\"\"\n    tokens = tokenizeText(text)\n    return {\n        \"docId\": docId,\n        \"text\": text,\n        \"tokens\": tokens,\n        \"wordCount\": len(tokens)\n    }\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-8/text_stats.R",
    "content": "computeSentiment <- function(tokens) {\n  # Simple sentiment based on word lists\n  positive_words <- c(\"great\", \"wonderful\", \"amazing\", \"good\", \"happy\", \"excellent\", \"fantastic\")\n  negative_words <- c(\"terrible\", \"bad\", \"awful\", \"horrible\", \"sad\", \"miserable\", \"poor\", \"cold\", \"rainy\")\n\n  pos_count <- sum(tokens %in% positive_words)\n  neg_count <- sum(tokens %in% negative_words)\n  total <- length(tokens)\n\n  positive_score <- pos_count / total\n  negative_score <- neg_count / total\n  neutral_score <- 1 - positive_score - negative_score\n  overall <- positive_score - negative_score\n\n  list(\n    positive = positive_score,\n    negative = negative_score,\n    neutral = neutral_score,\n    overall = overall\n  )\n}\n\nwordDiversity <- function(tokens) {\n  # Type-token ratio (unique words / total words)\n  unique_count <- length(unique(tokens))\n  total_count <- length(tokens)\n  unique_count / total_count\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus price --spot 100 --strike 105 --volatility 0.2 --paths 100 --steps 50 > obs.txt 2> obs.err\n\t./nexus analyzeDist --spot 100 --paths 100 2>> obs.err  >> obs.txt\n\t./nexus paths --paths 5 --steps 10 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/README.md",
    "content": "# Claude-Test-9: Monte Carlo Option Pricer\n\n## Overview\nDemonstrates quantitative finance workflows with C++ for performance-critical Monte Carlo simulation, R for statistical analysis, and Python for I/O.\n\n## Features Tested\n- **Numerical computation**: Monte Carlo simulation with Geometric Brownian Motion\n- **2D arrays**: `[[Real]]` for price paths\n- **Records with reals**: MarketParams, SimulationResult, PriceDistribution\n- **Tuples in records**: `confidenceInterval :: (Real, Real)`\n- **Map with lambdas**: Extract final prices from paths\n- **Helper functions**: `last` to get final element\n- **Deterministic random numbers**: Seeded RNG for reproducible results\n\n## Architecture\n\n### Python (`market_io.py`)\n- `createMarketParams(...)`: Build market parameter record\n- `formatResults(price, stdError)`: Format output string\n\n### C++ (`monte_carlo.hpp`)\n- `simulatePaths(params, nPaths, nSteps)`: Generate stock price paths using GBM\n- `priceCallOption(params, paths)`: Calculate discounted expected payoff\n- `calculateStdError(values)`: Compute standard error\n\n### R (`finance_stats.R`)\n- `analyzeDistribution(prices)`: Comprehensive distribution analysis\n- `confidenceInterval(values, alpha)`: Calculate confidence intervals\n- `percentiles(values, probs)`: Compute percentiles\n\n## Mathematical Background\n\n**Geometric Brownian Motion:**\n```\ndS = μS dt + σS dW\n```\n\nWhere:\n- S = stock price\n- μ = drift (risk-free rate in risk-neutral measure)\n- σ = volatility\n- dW = Wiener process increment\n\n**Option Pricing:**\n```\nCall Price = e^(-rT) * E[max(S_T - K, 0)]\n```\n\n## Data Flow\n\n1. **priceOption**: Create params → C++ simulates paths → C++ prices option\n2. **analyzeDistribution**: Create params → C++ simulates → Extract final prices → R analyzes\n3. **generatePaths**: Create params → C++ simulates → Return raw paths\n\n## Example Usage\n\n```bash\n# Price option with spot=100, strike=105, vol=0.2, 1000 paths\n./nexus price --spot 100 --strike 105 --volatility 0.2 --paths 1000\n\n# Analyze price distribution\n./nexus analyze-dist --spot 100 --paths 5000\n\n# Generate 20 paths with 100 steps\n./nexus paths --paths 20 --steps 100\n```\n\n## What This Tests\n\n1. **Large numeric arrays**: Thousands of price paths\n2. **2D array serialization**: `[[Real]]`\n3. **Records with multiple real fields**\n4. **Tuples in records**: `(Real, Real)` for confidence intervals\n5. **Statistical functions** in R\n6. **Performance-critical C++ code**\n7. **Map with lambda**: `map (\\path -> last path) paths`\n8. **Deterministic randomness**: Seed 42 for reproducible tests\n\n## Default Parameters\n\n- Spot price: $100\n- Strike price: $105\n- Volatility: 20% (0.2)\n- Risk-free rate: 5% (0.05)\n- Time to maturity: 1 year\n- Paths: 1000\n- Steps: 50\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/exp.txt",
    "content": "9.38469771857192\n{\"mean\":107.266318819265,\"median\":103.746880550564,\"stddev\":21.6400811382564,\"percentile5\":74.3267000908099,\"percentile95\":143.705401150951,\"ciLower\":103.024862916167,\"ciUpper\":111.507774722363}\n[[100,96.8700414176103,100.380618985345,103.745288142639,113.464172647557,107.393741203849,106.873954852161,94.3928839636883,91.7711575504317,94.3609180080303,89.2427500962744],[100,100.808148613603,100.096076020442,100.538015221198,98.1483309311996,95.1871378298816,94.7664100795858,96.3955969686945,92.1013340207717,93.2141635459931,93.2895486769626],[100,107.760600180497,107.370708534733,104.175369907028,117.925138733867,104.049734411036,112.018895426603,118.031086742208,110.421901920188,118.960369506565,122.166946110177],[100,97.8785565933973,91.2351672012759,94.8450537404749,90.9846670542748,83.6417221349421,82.0402029368624,79.9198096455361,80.4985787422201,79.8452386069075,86.4046394327139],[100,104.830086352176,104.009896454446,98.5053179508003,106.515000875625,112.276800329512,133.212830144785,135.858295136884,151.176362303217,156.114809841396,157.437093840174]]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/finance_stats.R",
    "content": "analyzeDistribution <- function(prices) {\n  meanVal <- mean(prices)\n  medianVal <- median(prices)\n  stddevVal <- sd(prices)\n  p5 <- quantile(prices, 0.05)\n  p95 <- quantile(prices, 0.95)\n\n  # 95% confidence interval for mean\n  se <- sd(prices) / sqrt(length(prices))\n  ci_lower <- meanVal - 1.96 * se\n  ci_upper <- meanVal + 1.96 * se\n\n  list(\n    mean = meanVal,\n    median = medianVal,\n    stddev = stddevVal,\n    percentile5 = as.numeric(p5),\n    percentile95 = as.numeric(p95),\n    ciLower = ci_lower,\n    ciUpper = ci_upper\n  )\n}\n\nconfidenceInterval <- function(values, alpha) {\n  n <- length(values)\n  meanVal <- mean(values)\n  se <- sd(values) / sqrt(n)\n  z <- qnorm(1 - alpha/2)\n\n  list(meanVal - z * se, meanVal + z * se)\n}\n\npercentiles <- function(values, probs) {\n  quantile(values, probs)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/helpers.py",
    "content": "def last(xs):\n    return xs[-1]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/main.loc",
    "content": "-- desc: Monte Carlo Option Pricer - Quantitative Finance\n-- author: Claude\n--\n-- Demonstrates numerical computation with C++ for simulation,\n-- R for statistical analysis, and Python for I/O and formatting.\nmodule main (priceOption, analyzePriceDistribution, generatePaths)\n\nimport root-py\n\n-- Type mappings\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Py => Real = \"float\"\ntype R => Real = \"double\"\n\n-- Market parameters\nrecord MarketParams where\n  spot :: Real\n  strike :: Real\n  volatility :: Real\n  riskFreeRate :: Real\n  timeToMaturity :: Real\n\nrecord Py => MarketParams = \"dict\"\nrecord Cpp => MarketParams = \"MarketParams\"\nrecord R => MarketParams = \"list\"\n\n-- Statistical summary\nrecord PriceDistribution where\n  mean :: Real\n  median :: Real\n  stddev :: Real\n  percentile5 :: Real\n  percentile95 :: Real\n  ciLower :: Real\n  ciUpper :: Real\n\nrecord Py => PriceDistribution = \"dict\"\nrecord Cpp => PriceDistribution = \"PriceDistribution\"\nrecord R => PriceDistribution = \"list\"\n\n-- Python I/O and formatting\nsource Py from \"market_io.py\" (\"createMarketParams\", \"formatResults\")\ncreateMarketParams :: Real -> Real -> Real -> Real -> Real -> MarketParams\nformatResults :: Real -> Real -> Str\n\n-- C++ Monte Carlo simulation\nsource Cpp from \"monte_carlo.hpp\" (\"simulatePaths\", \"priceCallOption\", \"calculateStdError\")\nsimulatePaths :: MarketParams -> Int -> Int -> [[Real]]\npriceCallOption :: MarketParams -> [[Real]] -> Real\ncalculateStdError :: [Real] -> Real\n\n-- R statistical analysis\nsource R from \"finance_stats.R\" (\"analyzeDistribution\")\nanalyzeDistribution :: [Real] -> PriceDistribution\n\n-- Local helpers\nsource Py from \"helpers.py\" (\"last\")\nlast :: [a] -> a\n\n--' Price a European call option using Monte Carlo\n--'\n--' Simulates stock price paths and computes option price.\n--'\n--' name: price\npriceOption ::\n  --' Current stock price\n  --' arg: --spot\n  --' metavar: PRICE\n  --' default: 100.0\n  Real ->\n  --' Strike price\n  --' arg: --strike\n  --' metavar: STRIKE\n  --' default: 105.0\n  Real ->\n  --' Volatility (annual)\n  --' arg: --volatility\n  --' metavar: VOL\n  --' default: 0.2\n  Real ->\n  --' Number of simulation paths\n  --' arg: -n/--paths\n  --' metavar: N\n  --' default: 1000\n  Int ->\n  --' Number of time steps\n  --' arg: --steps\n  --' metavar: STEPS\n  --' default: 50\n  Int ->\n  --' return: Option price\n  Real\npriceOption spot strike vol nPaths nSteps = priceCallOption params paths where\n  params = createMarketParams spot strike vol 0.05 1.0\n  paths = simulatePaths params nPaths nSteps\n\n--' Analyze price distribution\n--'\n--' Runs Monte Carlo and analyzes the distribution of final prices using R.\n--'\n--' name: analyzeDist\nanalyzePriceDistribution ::\n  --' Current stock price\n  --' arg: --spot\n  --' metavar: PRICE\n  --' default: 100.0\n  Real ->\n  --' Number of paths\n  --' arg: -n/--paths\n  --' metavar: N\n  --' default: 1000\n  Int ->\n  --' return: Distribution statistics\n  PriceDistribution\nanalyzePriceDistribution spot nPaths = analyzeDistribution finalPrices where\n  params = createMarketParams spot 105.0 0.2 0.05 1.0\n  paths = simulatePaths params nPaths 50\n  finalPrices = map (\\path -> last path) paths\n\n--' Generate and return price paths\n--'\n--' Generates Monte Carlo paths for visualization.\n--'\n--' name: paths\ngeneratePaths ::\n  --' Number of paths\n  --' arg: -n/--paths\n  --' metavar: N\n  --' default: 10\n  Int ->\n  --' Number of steps\n  --' arg: --steps\n  --' metavar: STEPS\n  --' default: 50\n  Int ->\n  --' return: Simulated paths\n  [[Real]]\ngeneratePaths nPaths nSteps = simulatePaths params nPaths nSteps where\n  params = createMarketParams 100.0 105.0 0.2 0.05 1.0\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/market_io.py",
    "content": "def createMarketParams(spot, strike, volatility, riskFreeRate, timeToMaturity):\n    \"\"\"Create market parameters record\"\"\"\n    return {\n        \"spot\": spot,\n        \"strike\": strike,\n        \"volatility\": volatility,\n        \"riskFreeRate\": riskFreeRate,\n        \"timeToMaturity\": timeToMaturity\n    }\n\ndef formatResults(price, stdError):\n    \"\"\"Format pricing results as a string\"\"\"\n    return f\"Option Price: ${price:.4f} ± ${stdError:.4f}\"\n\ndef extractFinalPrices(paths):\n    \"\"\"Extract the final price from each path\"\"\"\n    return [path[-1] for path in paths]\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/monte_carlo.hpp",
    "content": "#include <vector>\n#include <cmath>\n#include <random>\n\nstruct MarketParams {\n    double spot;\n    double strike;\n    double volatility;\n    double riskFreeRate;\n    double timeToMaturity;\n};\n\nstruct PriceDistribution {\n    double mean;\n    double median;\n    double stddev;\n    double percentile5;\n    double percentile95;\n    double ciLower;\n    double ciUpper;\n};\n\n// Simulate stock price paths using Geometric Brownian Motion\nstd::vector<std::vector<double>> simulatePaths(MarketParams params, int nPaths, int nSteps) {\n    std::mt19937 gen(42);  // Deterministic seed\n    std::normal_distribution<> dist(0.0, 1.0);\n\n    double dt = params.timeToMaturity / nSteps;\n    double drift = (params.riskFreeRate - 0.5 * params.volatility * params.volatility) * dt;\n    double diffusion = params.volatility * std::sqrt(dt);\n\n    std::vector<std::vector<double>> paths(nPaths, std::vector<double>(nSteps + 1));\n\n    for (int i = 0; i < nPaths; i++) {\n        paths[i][0] = params.spot;\n\n        for (int j = 1; j <= nSteps; j++) {\n            double z = dist(gen);\n            double S_prev = paths[i][j-1];\n            paths[i][j] = S_prev * std::exp(drift + diffusion * z);\n        }\n    }\n\n    return paths;\n}\n\n// Price a European call option using simulated paths\ndouble priceCallOption(MarketParams params, std::vector<std::vector<double>> paths) {\n    int nPaths = paths.size();\n    double payoffSum = 0.0;\n\n    for (int i = 0; i < nPaths; i++) {\n        double finalPrice = paths[i].back();\n        double payoff = std::max(finalPrice - params.strike, 0.0);\n        payoffSum += payoff;\n    }\n\n    double avgPayoff = payoffSum / nPaths;\n    double discountedPrice = avgPayoff * std::exp(-params.riskFreeRate * params.timeToMaturity);\n\n    return discountedPrice;\n}\n\n// Calculate standard error of a sample\ndouble calculateStdError(std::vector<double> values) {\n    int n = values.size();\n    if (n == 0) return 0.0;\n\n    double mean = 0.0;\n    for (double v : values) mean += v;\n    mean /= n;\n\n    double variance = 0.0;\n    for (double v : values) {\n        double diff = v - mean;\n        variance += diff * diff;\n    }\n    variance /= (n - 1);\n\n    return std::sqrt(variance / n);\n}\n"
  },
  {
    "path": "test-suite/golden-tests/claude-test-9/test-map.loc",
    "content": "module main (testMapWorks)\n\nimport root-py\n\n-- Test that map is available through Functor typeclass\ntestMapWorks :: [Int] -> [Int]\ntestMapWorks xs = map (\\x -> x + 1) xs\n"
  },
  {
    "path": "test-suite/golden-tests/command-groups/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus math double 5 > obs.txt 2> obs.err\n\t./nexus math triple 3 >> obs.txt 2>> obs.err\n\t./nexus text greet '\"hello\"' >> obs.txt 2>> obs.err\n\t./nexus version >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/command-groups/exp.txt",
    "content": "[5,5]\n[3,3,3]\n[\"hello\",\"hello\"]\n42\n"
  },
  {
    "path": "test-suite/golden-tests/command-groups/main.loc",
    "content": "module main (\n  --* group: math\n  --* Mathematical operations\n  double, triple,\n\n  --* group: text\n  --* Text utilities\n  greet,\n\n  --* group:\n  version\n)\n\ndouble :: Int -> (Int, Int)\ndouble x = (x, x)\n\ntriple :: Int -> (Int, Int, Int)\ntriple x = (x, x, x)\n\ngreet :: Str -> [Str]\ngreet x = [x, x]\n\nversion :: Int\nversion = 42\n"
  },
  {
    "path": "test-suite/golden-tests/composition/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[\"x\",\"y\"]' > obs.txt 2> obs.err\n\t./nexus bar 0 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/composition/exp.txt",
    "content": "[\"abcx\",\"abcy\"]\n1\n"
  },
  {
    "path": "test-suite/golden-tests/composition/main.loc",
    "content": "module main (foo, bar)\n\nimport root-py\n\nsource py from \"paste.py\" (\n    \"morloc_paste\" as paste\n    )\n\nsource py (\"abs\")\n\nabs :: Real -> Real\npaste :: Str -> Str -> Str\n\nfoo :: [Str] -> [Str]\nfoo xs = map (paste \"a\" . paste \"b\" . paste \"c\") xs\n\nbar :: Real -> Real\nbar = abs . (-) 1.0 . abs\n"
  },
  {
    "path": "test-suite/golden-tests/composition/paste.py",
    "content": "def morloc_paste(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/.gitignore",
    "content": "example.json\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus readFASTQ '\"example.fastq\"' | \\\n\t\t./nexus trim config.json /dev/stdin | \\\n\t\t./nexus writeFASTA '\"obs.txt\"' /dev/stdin\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/config.json",
    "content": "{ \"minLength\":40\n, \"minQuality\":20\n}\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/example.fastq",
    "content": "@SEQ_1\nGATTTGGGGTTCAAAGCAGTATCGATCAAATAGTAAATCCATTTGTTCAACTCACAGTTT\n+\n!!!!!!!!***+))%%%++)(%%%%).1***-+*''))**55CCF>>>>>>!!!!!!!!!\n@SEQ_2\nGATTTGGGGTTCAAAGCAGTATCGATCAAATAGTAAATCCATTTGTTCAACTCACAGTTT\n+\n!''*((((***+))%%%++)(%%%%).1***-+*''))**55CCF>>>>>>CCCCCCC65\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/exp.txt",
    "content": ">SEQ_1\nATTTGTTCAAC\n>SEQ_2\nATTTGTTCAACTCACAGTTT\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/foo.hpp",
    "content": "// This code was adapted from that generated by Perplexity using the Morloc type\n// signatures as prompts\n\n// C++ header sourced by morloc script\n#pragma one\n\n#include <algorithm>\n#include <cstdint>\n#include <fstream>\n#include <iostream>\n#include <map>\n#include <string>\n#include <tuple>\n#include <vector>\n\n// Types for clarity\nusing Str = std::string;\nusing FASTQRecord = std::tuple< std::tuple<Str, Str>, Str >;\nusing namespace std;\n\n// Struct to hold the trimming configuration\nstruct Config {\n    int minLength;\n    int minQuality;\n};\n\n// Read a FASTQ file into a vector of tuples\nvector<FASTQRecord> readFASTQ(const Str& filename) {\n    ifstream fin(filename);\n    vector<FASTQRecord> result;\n    if (!fin) {\n        cerr << \"Cannot open file: \" << filename << endl;\n        return result;\n    }\n    Str header, seq, plus, qual;\n    while (getline(fin, header)) {\n        // FASTQ files: 4 lines per record\n        if (!getline(fin, seq)) break;\n        if (!getline(fin, plus)) break;\n        if (!getline(fin, qual)) break;\n        // Strip '@' from header if present\n        if (!header.empty() && header[0] == '@') header = header.substr(1);\n        // Pair (header, quality), then sequence\n        result.push_back(\n            make_tuple(\n                make_tuple(header, qual),\n                seq\n            )\n        );\n    }\n    return result;\n}\n\n// Write a list of (header, sequence) to FASTA file\nint writeFASTA(const Str& filename, const std::vector< std::tuple<Str, Str> >& records) {\n    std::ofstream fout(filename);\n    if (!fout) {\n        std::cerr << \"Cannot open file for writing: \" << filename << std::endl;\n        return 1;\n    }\n    for (const auto& rec : records) {\n        fout << \">\" << std::get<0>(rec) << \"\\n\";\n        fout << std::get<1>(rec) << \"\\n\";\n    }\n    fout.close();\n    return 0;\n}\n\n\n// Helper to convert ASCII (Phred+33) to score\ninline uint8_t phredScore(char c) {\n    return static_cast<uint8_t>(c) - 33;\n}\n\n// Trim low-quality bases at both ends\nstd::string trimRead(\n    const Config& config,\n    const std::string& qualities,\n    const std::string& sequence\n) {\n    size_t n = sequence.size();\n    if (qualities.size() != n) return \"\";\n\n    size_t left = 0, right = n;\n    while (left < n && phredScore(qualities[left]) < config.minQuality) ++left;\n    while (right > left && phredScore(qualities[right-1]) < config.minQuality) --right;\n\n    if (right <= left) return \"\";\n\n    std::string trimmed_seq = sequence.substr(left, right - left);\n\n    return trimmed_seq;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/demo-trimming/main.loc",
    "content": "module main (trim, readFASTQ, writeFASTA)\n\nsource Cpp from \"foo.hpp\" (\"trimRead\", \"readFASTQ\", \"writeFASTA\")\n\nimport root-cpp\n\nrecord Config = Config\n    { minLength :: Int\n    , minQuality :: Int \n    }\nrecord Cpp => Config = \"Config\"\n\n--' Read a FASTQ file\nreadFASTQ\n    :: Str -- FASTQ filename\n    -> [ ( ( Str -- FASTQ header\n           , Str -- quality reads as phred scores\n           )\n         , Str -- read sequence\n         ) ]\n\n--' Write a annotated sequence data to a FASTA file\nwriteFASTA\n    :: Str -- output filename\n    -> [(Str, Str)] -- list of headers and sequences\n    -> Int -- exit code, 0 for success, 1 for failure\n\n--' Trim a single read\ntrimRead\n    :: Config\n    -> Str -- quality scores\n    -> Str -- sequence\n    -> Str -- trimmed sequence\n\nwrapRead :: Config -> ((Str, Str), Str) -> (Str, Str) \nwrapRead config x = ( .0 (.0 x)\n             , trimRead config (.1 (.0 x)) (.1 x)\n             )\n\n--' Trim a list of reads with phred quality scores\ntrim\n    :: Config\n    -> [((Str, Str), Str)]\n    -> [(Str, Str)]\ntrim config = map (wrapRead config) \n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-1/exp.txt",
    "content": "4.2\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-1/foo.py",
    "content": "def g(f, x):\n    return x\n\ndef f (f, xs):\n    return xs\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-1/main.loc",
    "content": "module main (foo)\n\nsource Py from \"foo.py\" (\"g\", \"f\")\n\ntype Py => Real = \"float\"\ntype Py => Bool = \"bool\"\ntype Py => (List a) = \"list\" a\n\ng :: (a -> [a] -> [a]) -> a -> a\nf :: (a -> Bool) -> [a] -> [a]\n\nfoo = g h1 4.2 where\n  h1 x xs = h2 xs\n  h2 xs = f h3 xs\n  h3 x = True\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"Alice\"' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-2/exp.txt",
    "content": "{\"name\":\"Alice\",\"age\":42}\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-2/foo.R",
    "content": "id <- function(x) x\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-2/foo.py",
    "content": "def foo(name):\n    return dict(name = name, age = 42)\n"
  },
  {
    "path": "test-suite/golden-tests/edge-cases-2/main.loc",
    "content": "module main (foo, bar)\n\nimport root ((.))\n\ntype R => Str = \"character\"\ntype R => Int = \"integer\"\ntype R => (Tuple2 a b) = \"list\" a b\n\ntype Py => Str = \"str\"\ntype Py => Int = \"int\"\ntype Py => (Tuple2 a b) = \"tuple\" a b\n\n-- So this is really obvious code duplication now, but I'll fix it later\n\nrecord Person = Person\n  { name :: Str\n  , age :: Int\n  }\n\nrecord Py => Person = \"dict\"\n  { name :: Str\n  , age :: Int\n  }\n\nrecord R => Person = \"list\"\n  { name :: Str\n  , age :: Int\n  }\n\nsource Py from \"foo.py\" (\"foo\")\nsource R from \"foo.R\" (\"id\")\n\nid :: a -> a\nfoo :: Str -> Person\n\nbar = id . foo\n"
  },
  {
    "path": "test-suite/golden-tests/effect-accumulate-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus combined > obs.txt 2> obs.err\n\t./nexus ioOnly >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/effect-accumulate-py/exp.txt",
    "content": "20\n10\n"
  },
  {
    "path": "test-suite/golden-tests/effect-accumulate-py/foo.py",
    "content": "def read_value():\n    return 10\n\ndef risky_double(x):\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/effect-accumulate-py/main.loc",
    "content": "-- Test effect accumulation: do-block with mixed effects\n-- readValue is <IO>, riskyDouble is <Error>\n-- combined do-block uses <IO, Error> annotation\nmodule main (combined, ioOnly)\n\nimport root-py\n\nsource Py from \"foo.py\" (\"read_value\" as readValue, \"risky_double\" as riskyDouble)\n\ntype Py => Int = \"int\"\n\nreadValue :: <IO> Int\nriskyDouble :: Int -> <Error> Int\n\n-- Uses both <IO> and <Error> source functions in one do-block\ncombined :: <IO, Error> Int\ncombined = do\n  x <- readValue\n  y <- riskyDouble x\n  y\n\n-- Uses only <IO> source function\nioOnly :: <IO> Int\nioOnly = do\n  x <- readValue\n  x\n"
  },
  {
    "path": "test-suite/golden-tests/effect-coerce-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testPureLit > obs.txt 2> obs.err\n\t./nexus testPureExpr >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/effect-coerce-cpp/exp.txt",
    "content": "42\n30\n"
  },
  {
    "path": "test-suite/golden-tests/effect-coerce-cpp/main.loc",
    "content": "-- Test implicit coercion from pure values to effect types in C++\nmodule main (testPureLit, testPureExpr)\n\nimport root-cpp\n\n-- Pure literal coerced to effect return type\ntestPureLit :: <IO> Int\ntestPureLit = 42\n\n-- Pure expression coerced to effect return type\ntestPureExpr :: <IO> Int\ntestPureExpr = 10 + 20\n"
  },
  {
    "path": "test-suite/golden-tests/effect-coerce-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testPureLit > obs.txt 2> obs.err\n\t./nexus testPureExpr >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/effect-coerce-py/exp.txt",
    "content": "42\n30\n"
  },
  {
    "path": "test-suite/golden-tests/effect-coerce-py/main.loc",
    "content": "-- Test implicit coercion from pure values to effect types\nmodule main (testPureLit, testPureExpr)\n\nimport root-py\n\n-- Pure literal coerced to effect return type\ntestPureLit :: <IO> Int\ntestPureLit = 42\n\n-- Pure expression coerced to effect return type\ntestPureExpr :: <IO> Int\ntestPureExpr = 10 + 20\n"
  },
  {
    "path": "test-suite/golden-tests/effect-error-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus riskyCall > obs.txt 2> obs.err\n\t./nexus safeCall >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/effect-error-cpp/exp.txt",
    "content": "42\n8\n"
  },
  {
    "path": "test-suite/golden-tests/effect-error-cpp/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\nint riskyAdd(int a, int b) {\n    return a + b;\n}\n\nint safeAdd(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/effect-error-cpp/main.loc",
    "content": "-- Test C++ codegen with Error effect label.\n-- riskyCall uses <Error> source functions -> enriched try/catch\n-- safeCall uses <IO> source functions -> transparent catch\n-- Both should compile and run correctly.\nmodule main (riskyCall, safeCall)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"riskyAdd\", \"safeAdd\")\n\ntype Cpp => Int = \"int\"\n\nriskyAdd :: Int -> Int -> <Error> Int\nsafeAdd :: Int -> Int -> <IO> Int\n\nriskyCall :: <Error> Int\nriskyCall = do\n  x <- riskyAdd 10 20\n  riskyAdd x 12\n\nsafeCall :: <IO> Int\nsafeCall = do\n  x <- safeAdd 3 4\n  safeAdd x 1\n"
  },
  {
    "path": "test-suite/golden-tests/effect-multi-label-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMulti > obs.txt 2> obs.err\n\t./nexus testSingle >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/effect-multi-label-py/exp.txt",
    "content": "42\n30\n"
  },
  {
    "path": "test-suite/golden-tests/effect-multi-label-py/main.loc",
    "content": "-- Test multi-label effects: <IO, Error> parses and works\nmodule main (testMulti, testSingle)\n\nimport root-py\n\n-- Multi-label effect annotation\ntestMulti :: <IO, Error> Int\ntestMulti = 42\n\n-- Single-label effect annotation (for comparison)\ntestSingle :: <IO> Int\ntestSingle = 10 + 20\n"
  },
  {
    "path": "test-suite/golden-tests/effect-subtype-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testSubtype > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/effect-subtype-py/exp.txt",
    "content": "99\n"
  },
  {
    "path": "test-suite/golden-tests/effect-subtype-py/foo.py",
    "content": "def ioFunc():\n    return 99\n"
  },
  {
    "path": "test-suite/golden-tests/effect-subtype-py/main.loc",
    "content": "-- Test effect row subtyping: <IO> value used where <IO, Error> expected\nmodule main (testSubtype)\n\nimport root-py\n\nsource Py from \"foo.py\" (\"ioFunc\")\n\ntype Py => Int = \"int\"\n\n-- ioFunc returns <IO> Int\nioFunc :: <IO> Int\n\n-- testSubtype expects <IO, Error> but ioFunc only has <IO>\n-- This should work because <IO> <: <IO, Error>\ntestSubtype :: <IO, Error> Int\ntestSubtype = do\n  x <- ioFunc\n  x\n"
  },
  {
    "path": "test-suite/golden-tests/errors/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus ccerr 0 2>&1 | grep -c \"zero\" >  obs.txt || true\n\t./nexus cperr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus crerr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus pcerr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus pperr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus prerr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus rcerr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus rperr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\t./nexus rrerr 0 2>&1 | grep -c \"zero\" 2>> obs.err  >> obs.txt || true\n\nclean:\n\trm -rf nexus poolscpp.out __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/errors/exp.txt",
    "content": "1\n1\n1\n1\n1\n1\n1\n1\n1\n"
  },
  {
    "path": "test-suite/golden-tests/errors/foo.R",
    "content": "rid <- function(x) x\n\nrdiv <- function(x){\n    if(x == 0){\n        stop(\"Cannot divide by zero\")\n    } else {\n        1 / x\n    }\n}\n"
  },
  {
    "path": "test-suite/golden-tests/errors/foo.hpp",
    "content": "template <typename T>\nT cid(T x){\n    return x;\n}\n\ndouble cdiv(double x){\n    if(x == 0){\n       throw std::runtime_error(\"Cannot divide by zero\"); \n    } else {\n        return 1 / x;\n    }\n}\n"
  },
  {
    "path": "test-suite/golden-tests/errors/foo.py",
    "content": "def pid(x): \n    return x\n\ndef pdiv(x):\n    if x == 0:\n        raise ValueError(\"Cannot divide by zero\")\n    else:\n        return 1 / x\n"
  },
  {
    "path": "test-suite/golden-tests/errors/main.loc",
    "content": "module main (\n      ccerr, cperr, crerr\n    , rcerr, rperr, rrerr\n    , pcerr, pperr, prerr\n  )\n\nimport root ((.))\n\nsource Cpp from \"foo.hpp\" (\"cid\", \"cdiv\")\nsource R from \"foo.R\" (\"rid\", \"rdiv\")\nsource Py from \"foo.py\" (\"pid\", \"pdiv\")\n\ntype Cpp => Real = \"double\"\ntype Py => Real = \"float\"\ntype R => Real = \"double\"\n\ncid :: a -> a \nrid :: a -> a \npid :: a -> a \n\n-- 1 / n, raise an error if n == 0\ncdiv :: Real -> Real\nrdiv :: Real -> Real \npdiv :: Real -> Real\n\nccerr :: Real -> Real\ncperr :: Real -> Real\ncrerr :: Real -> Real\nrcerr :: Real -> Real\nrperr :: Real -> Real\nrrerr :: Real -> Real\npcerr :: Real -> Real\npperr :: Real -> Real\nprerr :: Real -> Real\n\nccerr = cid . rid . cid . pid . cdiv\ncperr = cid . rid . pid . cid . pdiv\ncrerr = cid . rid . rid . cid . rdiv\n\nrcerr = rid . pid . cid . rid . cdiv\nrperr = rid . pid . rid . cid . pdiv\nrrerr = rid . pid . rid . cid . rdiv\n\npcerr = pid . cid . pid . rid . cdiv\npperr = pid . rid . cid . rid . pdiv\nprerr = pid . cid . rid . pid . rdiv\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 6 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus poolsgc\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-1/exp.txt",
    "content": "30\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-1/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nbar y = (*) (1.0 + y)\n\nfoo x = bar 4.0 x\n\n--   \\x -> (\\y -> mul (add 1 y)) y x\n--   \\x -> (\\y -> \\z -> mul (add 1 y) z) y x\n\n-- This expression enters the typechecker (sans alternate implementations):\n--   \\x -> (\\y -> mul (add 1 y)) y x\n-- Given `mul (add 1 y) |- Real -> Real`, and given two arguments are passed to\n-- it, we should rewrite the expression as:\n--   \\x -> (\\y x -> mul (add 1 y) x) y x\n-- The first lambda layer isn't relevant, so just rewrite this:\n--   (\\y -> mul (add 1 y)) y x\n-- As\n--   (\\y x -> mul (add 1 y) x) y x\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus bar 1.0 2.0 3.0 4.0 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus poolsgc\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-2/exp.txt",
    "content": "10\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-2/main.loc",
    "content": "module main (bar)\n\nimport root-py\n\nbar :: Real -> Real -> Real -> Real -> Real\nbar x y z = (+) x . (+) y . (+) z\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo0 2 > obs.txt 2> obs.err\n\t./nexus foo1 2 2>> obs.err  >> obs.txt\n\t./nexus foo2 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus poolsgc\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-3/exp.txt",
    "content": "[7,9,11]\n[7,9,11]\n[7,9,11]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-3/main.loc",
    "content": "module main (foo0, foo1, foo2)\n\nimport root-py (zipWith, Integral, Real, List)\n\nbar x y z = x + y + z\n\nfoo0 a = zipWith (        bar a    ) [1.0, 2.0, 3.0] [4.0, 5.0, 6.0]\nfoo1 a = zipWith (\\x   -> bar a x  ) [1.0, 2.0, 3.0] [4.0, 5.0, 6.0]\nfoo2 a = zipWith (\\x y -> bar a x y) [1.0, 2.0, 3.0] [4.0, 5.0, 6.0]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo \"[1.0,2.0,3.0]\" > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-4/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-4/main.loc",
    "content": "module main (foo)\n\nimport root-py (Functor, Integral, List, Real)\n\nfoo :: [Real] -> [Real]\nfoo = map ((+) 1.0)\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo0 \"[1.0,2.0]\" \"[3.0,4.0]\" >  obs.txt\n\t./nexus foo1 \"[1.0,2.0]\" \"[3.0,4.0]\" 2>> obs.err  >> obs.txt\n\t./nexus foo2 \"[1.0,2.0]\" \"[3.0,4.0]\" 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-5/exp.txt",
    "content": "[4,6]\n[4,6]\n[4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-5/main.loc",
    "content": "module main (foo0, foo1, foo2)\n\nimport root-cpp (zipWith, Integral, Real, List)\n\nfoo0 :: [Real] -> [Real] -> [Real]\nfoo0 = zipWith (+)\n\nfoo1 :: [Real] -> [Real] -> [Real]\nfoo1 xs = zipWith (+) xs\n\nfoo2 :: [Real] -> [Real] -> [Real]\nfoo2 xs ys = zipWith (+) xs ys\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-6/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 6 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-6/exp.txt",
    "content": "15.4\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-6/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nbar :: Real -> Real\nbar = (+) (4.2 * 2.0)\n\nfoo :: Real -> Real\nfoo x = bar (1.0 + x)\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-7/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 1 [2,3,4] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-7/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-7/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\ntype Py => Real = \"float\"\ntype Py => (List a) = \"list\" a\n\nfoo :: Real -> [Real] -> [Real]\nfoo x = map id\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 23 35 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-cpp/exp.txt",
    "content": "[24,true]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-cpp/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <functional>\n#include <type_traits>\n#include <utility>\n\ntemplate<class A, class B, class C, class F>\nauto onThree(F f, const std::tuple<A, B, C>& x) -> std::invoke_result_t<F, A, B, C> {\n    return f(std::get<0>(x), std::get<1>(x), std::get<2>(x));\n}\n\nint inc(int x){\n    return x + 1;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-cpp/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"onThree\", \"inc\")\n\nonThree :: (a -> b -> c -> d) -> (a, b, c) -> d\n\ninc :: Int -> Int\n\nfoo x y = onThree (\\a b c -> (inc x, b)) (inc y, True, \"hi\")\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 23 45 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-py/exp.txt",
    "content": "[24,true]\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-py/foo.py",
    "content": "def onThree(f, x):\n    return f(x[0], x[1], x[2])\n\ndef inc(x):\n    return x + 1\n"
  },
  {
    "path": "test-suite/golden-tests/eta-reduction-8-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nsource Py from \"foo.py\" (\"onThree\" as onThree, \"inc\")\n\nonThree :: (a -> b -> c -> d) -> (a, b, c) -> d\ninc :: Int -> Int\n\nfoo x y = onThree (\\a b c -> (inc x, b)) (inc y, True, \"hi\")\n"
  },
  {
    "path": "test-suite/golden-tests/eval-restrict-source/Makefile",
    "content": "all:\n\trm -f obs.txt obs.err build.err\n\tmorloc eval 'source Py from \"x.py\" (\"f\" as f)' 2> build.err || true\n\tgrep -c \"source statements are not allowed in eval mode\" build.err > obs.txt\n\tmorloc eval 'class Foo a where; foo :: a -> a' 2>> build.err || true\n\tgrep -c \"class declarations are not allowed in eval mode\" build.err >> obs.txt\n\tmorloc eval 'instance Foo Int where; source Py from \"x.py\" (\"foo\" as foo)' 2>> build.err || true\n\tgrep -c \"instance declarations are not allowed in eval mode\" build.err >> obs.txt\n\nclean:\n\trm -f obs.err build.err\n"
  },
  {
    "path": "test-suite/golden-tests/eval-restrict-source/exp.txt",
    "content": "1\n1\n1\n"
  },
  {
    "path": "test-suite/golden-tests/feature-integration-1/Makefile",
    "content": "all:\n\tmorloc make -o nexus main.loc\n\t./nexus test > obs.txt\n\nclean:\n\trm -rf __pycache__ nexus log\n"
  },
  {
    "path": "test-suite/golden-tests/feature-integration-1/exp.txt",
    "content": "[true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/feature-integration-1/main.loc",
    "content": "module main (test)\n\nimport root-py\n\n-- No-argument declarations combining features\n\nval1 = .1 (10, 20)\nval2 = .(.1, .0) (10, 20)\nval3 = (_, 42) 7\nval4 = .1 ((_, 42) 7)\nadd5 = (+) 5\nneg_add5 = neg . (+) 5\nget_fst = .0\ninc = (+) _ 1\nneg_snd = neg . .1\nadd10_fst = (+) 10 . .0\n\n-- Declarations with arguments combining features\n\nget_second x = .1 x\nset_first v pair = .(.0 = v) pair\npair_with x = (_, x)\nextract_neg x = neg (g x) where g = .1\napply_template x = f 10 where f = (_, x)\ntransform x = h x where h = neg . .0\n\n-- Map/fold with combined features\n\nneg_seconds xs = map (neg . .1) xs\n\ntest =\n  [ val1 == 20\n  , val2 == (20, 10)\n  , val3 == (7, 42)\n  , val4 == 42\n  , add5 3 == 8\n  , neg_add5 3 == neg 8\n  , get_fst (100, 200) == 100\n  , inc 5 == 6\n  , neg_snd (3, 7) == neg 7\n  , add10_fst (5, 20) == 15\n  , get_second (1, 2) == 2\n  , set_first 99 (1, 2) == (99, 2)\n  , pair_with 42 7 == (7, 42)\n  , extract_neg (3, 7) == neg 7\n  , apply_template 42 == (10, 42)\n  , transform (5, 20) == neg 5\n  , neg_seconds [(1, 2), (3, 4)] == [neg 2, neg 4]\n  , map (_, 0) [1, 2, 3] == [(1, 0), (2, 0), (3, 0)]\n  , map .(.1 = 0) [(1, 2), (3, 4)] == [(1, 0), (3, 0)]\n  , map ((+) _ 1) [1, 2, 3] == [2, 3, 4]\n  , fold (\\acc x -> acc + .1 x) 0 [(1, 10), (2, 20), (3, 30)] == 60\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-c/.gitignore",
    "content": "a.json\nb.json\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\tbash run.sh > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools a.json b.json\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-c/exp.txt",
    "content": "[\"ab\",2]\n[\"ab\",2]\n[\"ab\",2]\n[\"ab\",2]\n[\"ab\",2]\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-c/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <string>\n\nstd::string cat(std::string x, std::string y){\n  std::string z = x + y;\n  return z;\n}\n\nint len(std::string x){\n  return x.size();\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-c/main.loc",
    "content": "module main (foo)\n\nsource Cpp from \"foo.hpp\" (\"cat\", \"len\")\n\ntype Cpp => Str = \"std::string\"\ntype Cpp => Int = \"int\"\ntype Cpp => Tuple2 a b = \"std::tuple<$1,$2>\" a b\ncat :: Str -> Str -> Str\nlen :: Str -> Int\n\nfoo x y = (v, l) where\n  v = cat x y\n  l = len v\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-c/run.sh",
    "content": "#!/usr/bin/env bash\necho '\"a\"' > a.json\necho '\"b\"' > b.json\n./nexus foo a.json b.json\n./nexus foo '\"a\"' b.json\n./nexus foo '\"a\"' '\"b\"'\n./nexus foo '\"a\"' <(echo '\"b\"')\necho '\"b\"' | ./nexus foo '\"a\"' /dev/stdin\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-py/.gitignore",
    "content": "a.json\nb.json\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\tbash run.sh > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ a.json b.json\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-py/exp.txt",
    "content": "[\"<ab>\",4]\n[\"<ab>\",4]\n[\"<ab>\",4]\n[\"<ab>\",4]\n[\"<ab>\",4]\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-py/foo.py",
    "content": "def cat(xs):\n    return \"\".join(xs)\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-py/main.loc",
    "content": "module main (foo)\n\nsource Py (\"len\")\nsource Py from \"foo.py\" (\"cat\")\n\ntype Py => Str = \"str\"\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\ntype Py => Tuple2 a b = \"tuple\" a b\ncat :: [Str] -> Str\nlen :: Str -> Int\n\nfoo x y = (v, l) where\n  v = cat [\"<\", x, y, \">\"]\n  l = len v\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-py/run.sh",
    "content": "#!/usr/bin/env bash\necho '\"a\"' > a.json\necho '\"b\"' > b.json\n./nexus foo a.json b.json\n./nexus foo '\"a\"' b.json\n./nexus foo '\"a\"' '\"b\"'\n./nexus foo '\"a\"' <(echo '\"b\"')\necho '\"b\"' | ./nexus foo '\"a\"' /dev/stdin\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-r/.gitignore",
    "content": "a.json\nb.json\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\tbash run.sh > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools a.json b.json\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-r/exp.txt",
    "content": "[\"<ab>\",4]\n[\"<ab>\",4]\n[\"<ab>\",4]\n[\"<ab>\",4]\n[\"<ab>\",4]\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-r/foo.R",
    "content": "concat <- function(xs){\n    paste(xs, collapse=\"\")\n}\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-r/main.loc",
    "content": "module main (foo)\n\nsource R from \"foo.R\" (\"concat\" as cat, \"nchar\" as len)\n\ntype R => Str = \"character\"\ntype R => Int = \"integer\"\ntype R => Tuple2 a b = \"list\" a b\ntype R => List a = \"list\" a\n\ncat :: [Str] -> Str\nlen :: Str -> Int\n\nfoo x y = (v, l) where\n  v = cat [\"<\", x, y, \">\"]\n  l = len v\n"
  },
  {
    "path": "test-suite/golden-tests/file-input-r/run.sh",
    "content": "#!/usr/bin/env bash\necho '\"a\"' > a.json\necho '\"b\"' > b.json\n./nexus foo a.json b.json\n./nexus foo '\"a\"' b.json\n./nexus foo '\"a\"' '\"b\"'\n./nexus foo '\"a\"' <(echo '\"b\"')\necho '\"b\"' | ./nexus foo '\"a\"' /dev/stdin\n"
  },
  {
    "path": "test-suite/golden-tests/force-inline-basic/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- forceTuple ---\" > obs.txt\n\t./nexus forceTuple >> obs.txt 2>> obs.err\n\techo \"--- forceApp ---\" >> obs.txt\n\t./nexus forceApp >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/force-inline-basic/exp.txt",
    "content": "--- forceTuple ---\nEVAL 2\nEVAL 1\n[2,4]\n--- forceApp ---\nEVAL 4\nEVAL 3\n14\n"
  },
  {
    "path": "test-suite/golden-tests/force-inline-basic/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/force-inline-basic/main.loc",
    "content": "-- Test inline force operator (!) inside do-blocks\nmodule main (forceTuple, forceApp)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\", \"add\")\n\ntype Cpp => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\nadd :: Int -> Int -> Int\n\n-- Force applied expressions in a tuple\nforceTuple :: <IO> (Int, Int)\nforceTuple = do (!(sideEffect 1), !(sideEffect 2))\n\n-- Force in function args\nforceApp :: <IO> Int\nforceApp = do add !(sideEffect 3) !(sideEffect 4)\n"
  },
  {
    "path": "test-suite/golden-tests/formatting/.gitignore",
    "content": "z*\n"
  },
  {
    "path": "test-suite/golden-tests/formatting/Makefile",
    "content": "all:\n\trm -f *.err obs.txt z*\n\tmorloc make -o nexus main.loc 2> build.err\n\t# 0\n\t./nexus -f json f0 '[1,2,3,4,5,6]' > z0.json\n\t./nexus -f mpk f0 z0.json > z0.mpk\n\t./nexus -f voidstar f0 z0.mpk > z0.dat\n\t./nexus f0 z0.mpk > obs.txt 2> obs.err\n\t# 1\n\t./nexus -f json f1 '[[\"abc\",5],[\"def\",6]]' > z1.json\n\t./nexus -f mpk f1 z1.json > z1.mpk\n\t./nexus -f voidstar f1 z1.mpk > z1.dat\n\t./nexus f1 z1.mpk 2>> obs.err  >> obs.txt\n\t# 2\n\t./nexus -f json f2 '[[\"abc\",\"def\"],[5,6]]' > z2.json\n\t./nexus -f mpk f2 z2.json > z2.mpk\n\t./nexus -f voidstar f2 z2.mpk > z2.dat\n\t./nexus f2 z2.mpk 2>> obs.err  >> obs.txt\n\t# 3\n\t./nexus -f json f3 '[[[[9,9,9],[1,2],7],[[4,4,4,4],[3,4,5],8]],9]' > z3.json\n\t./nexus -f mpk f3 z3.json > z3.mpk\n\t./nexus -f voidstar f3 z3.mpk > z3.dat\n\t./nexus f3 z3.mpk 2>> obs.err  >> obs.txt\n\t# 4\n\t./nexus -f json f4 '[[[6,7]]]' > z4.json\n\t./nexus -f mpk f4 z4.json > z4.mpk\n\t./nexus -f voidstar f4 z4.mpk > z4.dat\n\t./nexus f4 z4.mpk 2>> obs.err  >> obs.txt\n\t# 5\n\t./nexus -f json f5 '[[[[\"abc\",\"defg\"],[1,2],7],[[\"hijkl\",\"mnopqrs\"],[3,4,5],8]],9]' > z5.json\n\t./nexus -f mpk f5 z5.json > z5.mpk\n\t./nexus -f voidstar f5 z5.mpk > z5.dat\n\t./nexus f5 z5.mpk 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools z* __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/formatting/exp.txt",
    "content": "[1,2,3,4,5,6]\n[[\"abc\",5],[\"def\",6]]\n[[\"abc\",\"def\"],[5,6]]\n[[[[9,9,9],[1,2],7],[[4,4,4,4],[3,4,5],8]],9]\n[[[6,7]]]\n[[[[\"abc\",\"defg\"],[1,2],7],[[\"hijkl\",\"mnopqrs\"],[3,4,5],8]],9]\n"
  },
  {
    "path": "test-suite/golden-tests/formatting/foo.py",
    "content": "def foo(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/formatting/main.loc",
    "content": "module main (f0, f1, f2, f3, f4, f5)\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => Tuple2 a b = \"tuple\" a b\ntype Py => Tuple3 a b c = \"tuple\" a b c\ntype Py => List a = \"list\" a\n\nsource Py from \"foo.py\"\n (  \"foo\" as f0\n ,  \"foo\" as f1\n ,  \"foo\" as f2\n ,  \"foo\" as f3\n ,  \"foo\" as f4\n ,  \"foo\" as f5\n )\n\n--' yolo foo foo 0\nf0 :: [Int]\n   -> [Int]\n\n--' yolo foo foo 1\nf1 :: \n    [(Str,Int)] ->\n    [(Str,Int)]\n\n--' yolo foo foo 2\nf2 :: ([Str],[Int])\n   -> ([Str],[Int])\n\n--' yolo foo foo 3\nf3 :: ([([Int],[Int],Int)],Int) -> ([([Int],[Int],Int)],Int)\n\n--' yolo foo foo 4\nf4 :: [[[Int]]] -> [[[Int]]]\n\n--' yolo foo foo 5\nf5 :: ([([Str],[Int],Int)],Int) -> ([([Str],[Int],Int)],Int)\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 17 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-1/bar.py",
    "content": "#  f :: Int -> Int -> Bool\n#  g :: Bool -> Bool -> Bool\n#  bar :: Tools -> Int -> Int\ndef bar(tools, x):\n    p1 = tools[\"f\"](x, 2)\n    p2 = tools[\"g\"](True, p1 > 2)\n    return p2\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-1/exp.txt",
    "content": "2\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-1/foo.hpp",
    "content": "// funf :: Int -> Int -> Int\nint funf(int x, int y){\n  return 2 * x + 3 * y;\n}\n\n// fung :: Bool -> Bool -> Int\nint fung(bool x, bool y){\n  return x + y;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-1/main.loc",
    "content": "module main (foo)\n\nimport root-py\nimport root-cpp\n\nrecord Tools where\n  f :: Int -> Int -> Int\n  g :: Bool -> Bool -> Int\nrecord Py => Tools = \"dict\"\nrecord Cpp => Tools = \"struct\"\n\nsource Cpp from \"foo.hpp\" (\"funf\", \"fung\")\nfunf :: Int -> Int -> Int\nfung :: Bool -> Bool -> Int\n\ntools :: Tools\ntools = { f = funf, g = fung }\n\nsource Py from \"bar.py\" (\"bar\")\nbar :: Tools -> Int -> Int\n\nfoo = bar tools\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 38 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-2/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-2/foo.py",
    "content": "def bar(funcs, x):\n    return funcs[\"f\"](x)\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-2/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nrecord Funs where\n  f :: Int -> Int\n  g :: Int -> Int\nrecord Py => Funs = \"dict\"\n\nfuns = { f = (+) 4, g = (+) 1 }\n\nbar :: Funs -> Int -> Int\n\nfoo = bar funs\n\nsource Py from \"foo.py\" (\"bar\")\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3a/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 37 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3a/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3a/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nrecord Funs where\n  f :: Int -> Int\n  g :: Int -> Int\nrecord Py => Funs = \"dict\"\n\nfuns :: Funs\nfuns = { f = (+) 4, g = (+) 1 }\n\nfoo :: Int -> Int\nfoo x = (.f funs) (1 + x)\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3b/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 33 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3b/exp.txt",
    "content": "67\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3b/main.hpp",
    "content": "int funf(int x){\n  return 2*x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3b/main.loc",
    "content": "module main (foo)\n\nimport root-py\nimport root-cpp\n\nrecord Funs where\n  f :: Int -> Int\n  g :: Int -> Int\nrecord Py => Funs = \"dict\"\nrecord Cpp => Funs = \"struct\"\n\nsource Cpp from \"main.hpp\" (\"funf\")\nfunf :: Int -> Int\n\nsource Py  from \"main.py\"  (\"fung\", \"doit\")\nfung :: Int -> Int\ndoit :: (Int -> Int) -> Int -> Int\n\nfuns :: Funs\nfuns = { f = funf, g = fung }\n\nfoo :: Int -> Int\nfoo = doit (.f funs)\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3b/main.py",
    "content": "def fung(x):\n    return 3*x\n\ndef doit(f, x):\n    return f(x) + 1\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 21 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3c/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3c/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nrecord Funs where\n  f :: Int -> Int\n  g :: Int -> Int\nrecord Py => Funs = \"dict\"\n\nbar :: Int -> Int -> Funs\nsource Py from \"main.py\" (\"bar\")\n\nfoo :: Int -> Int -> Int\nfoo x y = (.f (bar x 9)) y\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3c/main.py",
    "content": "def funf(a):\n    def f(x):\n        return a*x\n    return f\n\ndef fung(b):\n    def g(x):\n        return b*x\n    return g\n\ndef bar(a, b):\n    return { \"f\": funf(a), \"g\": fung(b) }\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t> obs.txt\n\t./nexus bar true true 5 2>> obs.err  >> obs.txt # 10\n\t./nexus bar false true 5 2>> obs.err  >> obs.txt # 16\n\t./nexus bar false false 5 2>> obs.err  >> obs.txt # 15\n\t./nexus baz true 5 true 2>> obs.err  >> obs.txt # 9\n\t./nexus foo true 5 2>> obs.err  >> obs.txt # 20\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d/exp.txt",
    "content": "10\n16\n15\n9\n14\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d/main.loc",
    "content": "module main (foo, bar, baz)\n\nimport root-py\n\nbar :: Bool -> Bool -> Int -> Int\nbaz :: Bool -> Int -> Bool -> Int\nsource Py from \"main.py\" where\n  --' takes two bools and returns a function of an integer\n  --' rsize: 2\n  bar\n  --' takes one bool and returns a fuction of an int that returns a function of a bool\n  --' rsize: 1 1\n  baz\n\nfoo :: Bool -> Int -> Int\nfoo x y = baz x (bar x x y) x\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d/main.py",
    "content": "def bar(cond1, cond2):\n    if cond1:\n        return lambda x: 2*x\n    else:\n        return lambda x: 3*x + cond2\n\ndef baz(cond1):\n    def f1(num):\n        def f2(cond2):\n            return cond1 + num + 3*cond2\n        return f2\n    return f1\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t> obs.txt\n\t./nexus bar true true 5 2>> obs.err  >> obs.txt # 10\n\t./nexus bar false true 5 2>> obs.err  >> obs.txt # 16\n\t./nexus bar false false 5 2>> obs.err  >> obs.txt # 15\n\t./nexus baz true 5 true 2>> obs.err  >> obs.txt # 9\n\t./nexus foo true 5 2>> obs.err  >> obs.txt # 14\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-c/exp.txt",
    "content": "10\n16\n15\n9\n14\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-c/main.hpp",
    "content": "#include <functional>\n\nstd::function<int(int)> bar(bool cond1, bool cond2) {\n    if (cond1) {\n        return [](int x) { return 2 * x; };\n    } else {\n        return [cond2](int x) { return 3 * x + (int)cond2; };\n    }\n}\n\nstd::function<std::function<int(bool)>(int)> baz(bool cond1) {\n    auto f1 = [cond1](int num) {\n        auto f2 = [cond1, num](bool cond2) {\n            return cond1 + num + 3 * (int)cond2;\n        };\n        return f2;\n    };\n    return f1;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-c/main.loc",
    "content": "module main (foo, bar, baz)\n\nimport root-cpp\n\nbar :: Bool -> Bool -> Int -> Int\nbaz :: Bool -> Int -> Bool -> Int\nsource Cpp from \"main.hpp\" where\n  --' takes two bools and returns a function of an integer\n  --' rsize: 2\n  bar\n  --' takes one bool and returns a fuction of an int that returns a function of a bool\n  --' rsize: 1 1\n  baz\n\nfoo :: Bool -> Int -> Int\nfoo x y = baz x (bar x x y) x\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t> obs.txt\n\t./nexus bar true true 5 2>> obs.err  >> obs.txt # 10\n\t./nexus bar false true 5 2>> obs.err  >> obs.txt # 16\n\t./nexus bar false false 5 2>> obs.err  >> obs.txt # 15\n\t./nexus baz true 5 true 2>> obs.err  >> obs.txt # 9\n\t./nexus foo true 5 2>> obs.err  >> obs.txt # 20\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-py/exp.txt",
    "content": "10\n16\n15\n9\n14\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-py/main.loc",
    "content": "module main (foo, bar, baz)\n\nimport root-py\n\nbar :: Bool -> Bool -> Int -> Int\nbaz :: Bool -> Int -> Bool -> Int\nsource Py from \"main.py\" where\n  --' takes two bools and returns a function of an integer\n  --' rsize: 2\n  bar\n  --' takes one bool and returns a fuction of an int that returns a function of a bool\n  --' rsize: 1 1\n  baz\n\nfoo :: Bool -> Int -> Int\nfoo x y = baz x (bar x x y) x\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-py/main.py",
    "content": "def bar(cond1, cond2):\n    if cond1:\n        return lambda x: 2*x\n    else:\n        return lambda x: 3*x + cond2\n\ndef baz(cond1):\n    def f1(num):\n        def f2(cond2):\n            return cond1 + num + 3*cond2\n        return f2\n    return f1\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t> obs.txt\n\t./nexus bar true true 5 2>> obs.err  >> obs.txt # 10\n\t./nexus bar false true 5 2>> obs.err  >> obs.txt # 16\n\t./nexus bar false false 5 2>> obs.err  >> obs.txt # 15\n\t./nexus baz true 5 true 2>> obs.err  >> obs.txt # 9\n\t./nexus foo true 5 2>> obs.err  >> obs.txt # 20\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-r/exp.txt",
    "content": "10\n16\n15\n9\n14\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-r/main.R",
    "content": "bar <- function(cond1, cond2){\n    if(cond1){\n        function(x){ 2*x }\n    } else {\n        function(x){ 3*x + cond2 }\n    }\n}\n\nbaz <- function(cond1){\n    function(num){\n        function(cond2){\n            cond1 + num + 3*cond2\n        }\n    }\n}\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3d-r/main.loc",
    "content": "module main (foo, bar, baz)\n\nimport root-r\n\nbar :: Bool -> Bool -> Int -> Int\nbaz :: Bool -> Int -> Bool -> Int\nsource R from \"main.R\" where\n  --' takes two bools and returns a function of an integer\n  --' rsize: 2\n  bar\n  --' takes one bool and returns a fuction of an int that returns a function of a bool\n  --' rsize: 1 1\n  baz\n\nfoo :: Bool -> Int -> Int\nfoo x y = baz x (bar x x y) x\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3e/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo true 3 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3e/exp.txt",
    "content": "[6,9]\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3e/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nbar :: Bool -> [(Int -> Int)]\nsource Py from \"main.py\" (\"bar\")\n\nfoo :: Bool -> Int -> [Int]\nfoo x y = map (\\f -> f y) (bar x)\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3e/main.py",
    "content": "def bar(cond):\n    if cond:\n        return [lambda x: 2*x, lambda x: 3*x]\n    else:\n        return [lambda x: 4*x, lambda x: 5*x]\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3f/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo true 3 > obs.txt 2> obs.err\n\t./nexus baz true 4 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3f/exp.txt",
    "content": "6\n[6,8]\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3f/main.loc",
    "content": "module main (foo, baz)\n\nimport root-py\n\nbar :: Bool -> (Int -> Int, Bool -> Int)\nsource Py from \"main.py\" (\"bar\", \"fst\")\n\nfst :: (a, b) -> a\n\nfoo :: Bool -> Int -> Int\nfoo x y = (fst (bar x)) y\n\nbaz :: Bool -> Int -> (Int, Int)\nbaz x y = ((.1 fs) x, (.0 fs) y) where\n  fs = bar x\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-3f/main.py",
    "content": "#  bar :: Bool -> (Int -> Int, Bool -> Int)\ndef bar(cond):\n    return (lambda x: 2 * x, lambda x: x + 5)\n\ndef fst(xs):\n    return xs[0]\n\ndef snd(xs):\n    return xs[1]\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-4/exp.txt",
    "content": "[6,7]\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-4/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfuns :: [(Int -> Int)]\nfuns = [ (+) 1, (+) 2 ]\n\nsource Py from \"main.py\" (\"runAll\")\nrunAll :: a -> [(a -> b)] -> [b]\n\nfoo x = runAll x funs\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-4/main.py",
    "content": "def runAll(x, fs):\n    ys = []\n    for f in fs:\n        ys.append(f(x))\n    return ys\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-5/exp.txt",
    "content": "[6,7]\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-5/main.loc",
    "content": "module main (foo)\n\nimport root-r\nimport root-py\nimport root-cpp\n\nfuns :: [(Int -> Int)]\nfuns = [ idcpp . (+) 1, idr . (+) 2 ]\n\nsource Py from \"main.py\" (\"runAll\")\nrunAll :: a -> [(a -> b)] -> [b]\n\nfoo x = runAll x funs\n"
  },
  {
    "path": "test-suite/golden-tests/functional-data-5/main.py",
    "content": "def runAll(x, fs):\n    ys = []\n    for f in fs:\n        ys.append(f(x))\n    return ys\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-1/exp.txt",
    "content": "[1,2]\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-1/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\ntemplate <class A, class F>\nauto map(F f, const std::vector<A> &xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys(xs.size());\n    std::transform(xs.begin(), xs.end(), ys.begin(), f);\n    return ys;\n}\n\ntemplate <class A, class B>\nB snd(const std::tuple<A,B> &x){\n    return(std::get<1>(x));\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-1/main.loc",
    "content": "module main (foo)\n\ntype Cpp => (List a) = \"std::vector<$1>\" a\ntype Cpp => Str = \"std::string\"\ntype Cpp => Real = \"double\"\ntype Cpp => (Tuple2 a b) = \"std::tuple<$1,$2>\" a b\n\nsource Cpp from \"foo.hpp\" (\"map\" as cppmap, \"snd\" as cppsnd)\n\ncppmap :: (a -> b) -> [a] -> [b]\ncppsnd :: (a, b) -> b\n\nfoo x = cppmap cppsnd [(\"hi\", 1.0), (\"bi\", x)]\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2.4 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-2/exp.txt",
    "content": "[1,2]\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-2/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\ntemplate <class A, class F>\nauto map(F f, const std::vector<A> &xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys(xs.size());\n    std::transform(xs.begin(), xs.end(), ys.begin(), f);\n    return ys;\n}\n\ntemplate <class A, class B>\nB bar(B dummy, const std::tuple<A,B> &x){\n    return(std::get<1>(x));\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/generic-hofs-2/main.loc",
    "content": "module main (foo)\n\ntype Cpp => (List a) = \"std::vector<$1>\" a\ntype Cpp => Str = \"std::string\"\ntype Cpp => Real = \"double\"\ntype Cpp => (Tuple2 a b) = \"std::tuple<$1,$2>\" a b\n\nsource Cpp from \"foo.hpp\" (\"map\", \"bar\")\n\nmap :: (a -> b) -> [a] -> [b]\n\n-- the `forall a b` sets the order of qualifiers and ensures that the C++\n-- template arguments are generated in the right order.\nbar :: b -> (a, b) -> b\n\nfoo x = map (bar x) [(\"hi\", 1.0), (\"bi\", 2.0)]\n"
  },
  {
    "path": "test-suite/golden-tests/guards-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus myabs 5 > obs.txt 2> obs.err\n\t./nexus myabs 0 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 5 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 1 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 10 >> obs.txt 2>> obs.err\n\t./nexus classify 5 >> obs.txt 2>> obs.err\n\t./nexus classify 50 >> obs.txt 2>> obs.err\n\t./nexus classify 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-cpp/exp.txt",
    "content": "5\n0\n5\n2\n8\n\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-cpp/main.loc",
    "content": "module main (myabs, clamp, classify)\n\nimport root-cpp\n\nmyabs :: Int -> Int\nmyabs x\n  ? x >= 0 = x\n  : neg x\n\nclamp :: Int -> Int -> Int -> Int\nclamp lo hi x\n  ? x < lo = lo\n  ? x > hi = hi\n  : x\n\nclassify :: Int -> Str\nclassify x\n  ? x > big = \"big\"\n  ? x > small = \"medium\"\n  : \"small\"\n  where\n    big = 100\n    small = 10\n"
  },
  {
    "path": "test-suite/golden-tests/guards-inline-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus classify 5 > obs.txt 2> obs.err\n\t./nexus classify 50 >> obs.txt 2>> obs.err\n\t./nexus classify 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-inline-cpp/exp.txt",
    "content": "\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-inline-cpp/main.loc",
    "content": "module main (classify)\n\nimport root-cpp\n\nclassify :: Int -> Str\nclassify x = (? x > 100 = \"big\" ? x > 10 = \"medium\" : \"small\")\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus absLet 5 > obs.txt 2> obs.err\n\t./nexus absLet -- -3 >> obs.txt 2>> obs.err\n\t./nexus absLet 0 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 5 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 50 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-cpp/exp.txt",
    "content": "5\n3\n0\n\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-cpp/main.loc",
    "content": "module main (absLet, classifyDo)\n\nimport root-cpp\n\nabsLet :: Int -> Int\nabsLet x =\n  let result ? x >= 0 = x\n             : neg x\n  in result\n\nclassifyDo :: Int -> <IO> Str\nclassifyDo x = do\n  let label ? x > 100 = \"big\"\n            ? x > 10 = \"medium\"\n            : \"small\"\n  label\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus absLet 5 > obs.txt 2> obs.err\n\t./nexus absLet -- -3 >> obs.txt 2>> obs.err\n\t./nexus absLet 0 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 5 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 50 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-py/exp.txt",
    "content": "5\n3\n0\n\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-py/main.loc",
    "content": "module main (absLet, classifyDo)\n\nimport root-py\n\nabsLet :: Int -> Int\nabsLet x =\n  let result ? x >= 0 = x\n             : neg x\n  in result\n\nclassifyDo :: Int -> <IO> Str\nclassifyDo x = do\n  let label ? x > 100 = \"big\"\n            ? x > 10 = \"medium\"\n            : \"small\"\n  label\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus absLet 5 > obs.txt 2> obs.err\n\t./nexus absLet -- -3 >> obs.txt 2>> obs.err\n\t./nexus absLet 0 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 5 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 50 >> obs.txt 2>> obs.err\n\t./nexus classifyDo 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-r/exp.txt",
    "content": "5\n3\n0\n\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-let-r/main.loc",
    "content": "module main (absLet, classifyDo)\n\nimport root-r\n\nabsLet :: Int -> Int\nabsLet x =\n  let result ? x >= 0 = x\n             : neg x\n  in result\n\nclassifyDo :: Int -> <IO> Str\nclassifyDo x = do\n  let label ? x > 100 = \"big\"\n            ? x > 10 = \"medium\"\n            : \"small\"\n  label\n"
  },
  {
    "path": "test-suite/golden-tests/guards-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus myabs 5 > obs.txt 2> obs.err\n\t./nexus myabs 0 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 5 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 1 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 10 >> obs.txt 2>> obs.err\n\t./nexus classify 5 >> obs.txt 2>> obs.err\n\t./nexus classify 50 >> obs.txt 2>> obs.err\n\t./nexus classify 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-py/exp.txt",
    "content": "5\n0\n5\n2\n8\n\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-py/main.loc",
    "content": "module main (myabs, clamp, classify)\n\nimport root-py\n\nmyabs :: Int -> Int\nmyabs x\n  ? x >= 0 = x\n  : neg x\n\nclamp :: Int -> Int -> Int -> Int\nclamp lo hi x\n  ? x < lo = lo\n  ? x > hi = hi\n  : x\n\nclassify :: Int -> Str\nclassify x\n  ? x > big = \"big\"\n  ? x > small = \"medium\"\n  : \"small\"\n  where\n    big = 100\n    small = 10\n"
  },
  {
    "path": "test-suite/golden-tests/guards-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus myabs 5 > obs.txt 2> obs.err\n\t./nexus myabs 0 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 5 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 1 >> obs.txt 2>> obs.err\n\t./nexus clamp 2 8 10 >> obs.txt 2>> obs.err\n\t./nexus classify 5 >> obs.txt 2>> obs.err\n\t./nexus classify 50 >> obs.txt 2>> obs.err\n\t./nexus classify 200 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/guards-r/exp.txt",
    "content": "5\n0\n5\n2\n8\n\"small\"\n\"medium\"\n\"big\"\n"
  },
  {
    "path": "test-suite/golden-tests/guards-r/main.loc",
    "content": "module main (myabs, clamp, classify)\n\nimport root-r\n\nmyabs :: Int -> Int\nmyabs x\n  ? x >= 0 = x\n  : neg x\n\nclamp :: Int -> Int -> Int -> Int\nclamp lo hi x\n  ? x < lo = lo\n  ? x > hi = hi\n  : x\n\nclassify :: Int -> Str\nclassify x\n  ? x > big = \"big\"\n  ? x > small = \"medium\"\n  : \"small\"\n  where\n    big = 100\n    small = 10\n"
  },
  {
    "path": "test-suite/golden-tests/higher-kinded-types/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus foo1 12 2>> obs.err  >> obs.txt\n\t./nexus foo2 12 2>> obs.err  >> obs.txt\n\t./nexus foo3 12 2>> obs.err  >> obs.txt\n\t./nexus foo4 12 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/higher-kinded-types/exp.txt",
    "content": "[\"1\",\"2\",\"12\"]\n[\"1\",\"2\",\"12\"]\n[\"1\",\"2\",\"12\"]\n[\"1\",\"2\",\"12\"]\n"
  },
  {
    "path": "test-suite/golden-tests/higher-kinded-types/foo.py",
    "content": "def morloc_map(f, xs):\n    return list(map(f, xs))\n\ndef testshow(x):\n    return str(x)\n"
  },
  {
    "path": "test-suite/golden-tests/higher-kinded-types/main.loc",
    "content": "module main (foo1, foo2, foo3, foo4)\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\nsource Py from \"foo.py\" (\"morloc_map\" as listMap, \"morloc_map\" as functorMap)\nlistMap :: (a -> b) -> [a] -> [b]\nfunctorMap :: (a -> b) -> f a -> f b\n\nclass TestFunctor f where\n  classFMap :: (a -> b) -> f a -> f b\n\ninstance TestFunctor List where\n  source Py from \"foo.py\" (\"morloc_map\" as classFMap)\n\n\nclass TestList f where\n  classMap :: (a -> b) -> f a -> f b\n\ninstance TestList List where\n  source Py from \"foo.py\" (\"morloc_map\" as classMap)\n\nsource Py from \"foo.py\" (\"testshow\")\ntestshow :: Int -> Str\n\nfoo1 x = listMap testshow [1,2,x]\nfoo2 x = functorMap testshow [1,2,x]\nfoo3 x = classMap testshow [1,2,x]\nfoo4 x = classFMap testshow [1,2,x]\n"
  },
  {
    "path": "test-suite/golden-tests/higher-kinded-types/notes",
    "content": "morloc: see: AppU (VarU (TV {unTVar = \"List\"})) [VarU (TV {unTVar = \"Int\"})]\n  bnd: fromList []\nCallStack (from HasCallStack):\n  error, called at library/Morloc/TypeEval.hs:124:24 in morloc-0.55.0-6B2a5tHeUon5TyP9WSJ7Sf:Morloc.TypeEval\n\nscope: fromList [\n  (TV {unTVar = \"Int\"},[([],VarU (TV {unTVar = \"int\"}),True)]),\n  (TV {unTVar = \"List\"},[(\n    [Left (TV {unTVar = \"a\"})],\n    AppU (VarU (TV {unTVar = \"list\"})) [VarU (TV {unTVar = \"a\"})],\n    True)]\n  )\n]\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf .R*\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/eq.py",
    "content": "import sys\n\ndef eq(x, y):\n    if x == y:\n        return True\n    else:\n        print(\"Test failed:\", file=sys.stderr)\n        print(f\"  Expected: {y!s}\", file=sys.stderr)\n        print(f\"  Observed: {x!s}\", file=sys.stderr)\n        return False\n\ndef check(xs):\n    for (msg, x) in xs:\n        if not x:\n            return False\n    return True\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\ntemplate <class A, class B, class C, class F>\nstd::vector<C> czipWith(\n        F f,\n        const std::vector<A>& xs,\n        const std::vector<B>& ys\n    )\n{\n    static_assert(std::is_invocable_r_v<C, F, A, B>, \n                  \"Function f must be callable with type A and return type B\");\n    std::size_t N = std::min(xs.size(), ys.size());\n    std::vector<C> zs(N);\n    for(std::size_t i = 0; i < N; i++){\n        zs[i] = f(xs[i], ys[i]);\n    }\n    return zs;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Tuple2 a b = \"tuple\" a b\ntype R => Tuple2 a b = \"tuple\" a b\ntype Cpp => Tuple2 a b = \"std::tuple<$1, $2>\" a b\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\ntype Cpp => Str = \"std::string\"\n\ntype Py => Bool = \"bool\"\ntype R => Bool = \"logical\"\ntype Cpp => Bool = \"bool\"\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n\ndef pzipWith(f, xs, ys):\n    return list(map(f, xs, ys))\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/fooc.loc",
    "content": "module fooc (*)\n\nimport foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \nczipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\", \"czipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \npzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\", \"pzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/foor.loc",
    "content": "module foor (*)\n\nimport foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/hofs-1/main.loc",
    "content": "module main (test)\n\nimport foor\nimport foopy\n\nsource Py from \"eq.py\" (\"eq\", \"check\")\neq :: a -> a -> Bool\ncheck :: [(Str, Bool)] -> Bool\n\nradd3 x y z = radd x (radd y z)\npadd3 x y z = padd x (padd y z)\n\ntest = check\n  [ (\"0\", eq (pzipWith radd [1,2] [5,6]) [6,8])\n  , (\"1\", eq (pzipWith (\\x y -> 1) [1,2] [5,6]) [1,1])\n  , (\"2\", eq (pzipWith (\\x y -> x) [1,2] [5,6]) [1,2])\n  , (\"3\", eq (pzipWith (\\x y -> y) [1,2] [5,6]) [5,6])\n  , (\"4\", eq (pzipWith (\\x y -> radd x 10) [1,2] [5,6]) [11,12])\n  , (\"5\", eq (pzipWith (\\x y -> radd 10 y) [1,2] [5,6]) [15,16])\n  , (\"6\", eq (pzipWith (\\x y -> radd x x) [1,2] [5,6]) [2,4])\n  , (\"7\", eq (pzipWith (\\x y -> radd y y) [1,2] [5,6]) [10,12])\n  , (\"8\", eq (pzipWith (\\x y -> radd x (radd x 5)) [1,2] [5,6]) [7,9])\n  , (\"9\", eq (pzipWith (\\x y -> radd (radd x 5) 1) [1,2] [5,6]) [7,8])\n  , (\"a\", eq (pzipWith (\\x y -> rneg (radd x y)) [1,2] [5,6]) [-6,-8])\n  , (\"b\", eq (pzipWith (\\x y -> [x,y]) [1,2] [5,6]) [[1,5],[2,6]])\n  , (\"c\", eq (pzipWith (\\x y -> (x,y)) [1,2] [5,6]) [(1,5),(2,6)])\n  , (\"d\", eq (pzipWith (\\x y -> [radd 1 x]) [1,2] [5,6]) [[2],[3]])\n  , (\"e\", eq (pzipWith (\\x y -> (x,radd 1 y)) [1,2] [5,6]) [(1,6),(2,7)])\n  -----------------------\n  , (\"f\", eq (pzipWith (radd3 10) [1,2] [5,6]) [16,18])\n  , (\"10\", eq (pzipWith (\\x y -> radd3 x y 10) [1,2] [5,6]) [16,18])\n  , (\"11\", eq (pmap (\\x -> radd3 x x 10) [1,2]) [12,14])\n  , (\"12\", eq (pzipWith (\\x y -> rneg (radd3 x y 10)) [1,2] [5,6]) [-16,-18])\n  ------ cis\n  , (\"13\", eq (pzipWith padd [1,2] [5,6]) [6,8])\n  , (\"14\", eq (pzipWith (\\x y -> 1) [1,2] [5,6]) [1,1])\n  , (\"15\", eq (pzipWith (\\x y -> x) [1,2] [5,6]) [1,2])\n  , (\"16\", eq (pzipWith (\\x y -> y) [1,2] [5,6]) [5,6])\n  , (\"17\", eq (pzipWith (\\x y -> padd x 10) [1,2] [5,6]) [11,12])\n  , (\"18\", eq (pzipWith (\\x y -> padd 10 y) [1,2] [5,6]) [15,16])\n  , (\"19\", eq (pzipWith (\\x y -> padd x x) [1,2] [5,6]) [2,4])\n  , (\"1a\", eq (pzipWith (\\x y -> padd y y) [1,2] [5,6]) [10,12])\n  , (\"1b\", eq (pzipWith (\\x y -> padd x (padd x 5)) [1,2] [5,6]) [7,9])\n  , (\"1c\", eq (pzipWith (\\x y -> padd (padd x 5) 1) [1,2] [5,6]) [7,8])\n  , (\"1d\", eq (pzipWith (\\x y -> pneg (padd x y)) [1,2] [5,6]) [-6,-8])\n  , (\"1e\", eq (pzipWith (\\x y -> [x,y]) [1,2] [5,6]) [[1,5],[2,6]])\n  , (\"20\", eq (pzipWith (\\x y -> (x,y)) [1,2] [5,6]) [(1,5),(2,6)])\n  , (\"21\", eq (pzipWith (\\x y -> [padd 1 x]) [1,2] [5,6]) [[2],[3]])\n  , (\"22\", eq (pzipWith (\\x y -> (x,padd 1 y)) [1,2] [5,6]) [(1,6),(2,7)])\n  -----------------------\n  , (\"23\", eq (pzipWith (padd3 10) [1,2] [5,6]) [16,18])\n  , (\"24\", eq (pzipWith (\\x y -> padd3 x y 10) [1,2] [5,6]) [16,18])\n  , (\"25\", eq (pmap (\\x -> padd3 x x 10) [1,2]) [12,14])\n  , (\"26\", eq (pzipWith (\\x y -> pneg (padd3 x y 10)) [1,2] [5,6]) [-16,-18])\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/holes-func/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf __pycache__ nexus log\n"
  },
  {
    "path": "test-suite/golden-tests/holes-func/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/holes-func/main.loc",
    "content": "module main (test)\n\nimport root-py\n\ntest = fold (&&) True\n  [ (==) (map ((-) _ 1) [1,2,3]) [0,1,2]\n  , (==) (((-) _ 1) 8) 7\n  , (==) (((-) _ _) 8 1) 7\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/holes-record/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf __pycache__ nexus log\n"
  },
  {
    "path": "test-suite/golden-tests/holes-record/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/holes-record/main.loc",
    "content": "module main (test)\n\nimport root-py\n\nrecord Person = Person\n  { name :: Str\n  , age :: Int\n  }\nrecord Py => Person = \"dict\"\n\n-- This defines how Python works when it doesn't know the type\ntype Py => Record = \"dict\"\n\nbar :: Str -> Person\nbar = { name = _, age = 0 }\n\ntest = fold (&&) True\n  [ map bar [\"Alice\", \"Bob\"] == [{name = \"Alice\", age = 0 }, {name = \"Bob\", age = 0}]\n\n  -- Requires the Record def, there is something wrong in type inference.\n  , map {name = _, age = 0} [\"Alice\", \"Bob\"] == [{name = \"Alice\", age = 0 }, {name = \"Bob\", age = 0}]\n\n  -- anonymous records (works in Python and R, would fail in C++ currently)\n  , map {foo = _, bar = 0} [\"Alice\", \"Bob\"] == [{foo = \"Alice\", bar = 0 }, {foo = \"Bob\", bar = 0}]\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/holes-simple/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf __pycache__ nexus log\n"
  },
  {
    "path": "test-suite/golden-tests/holes-simple/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/holes-simple/main.loc",
    "content": "module main (test)\n\nimport root-py\n\nfoo = [_,_,_]\nfoo2 = [(_,3),(_,2),(_,1)]\npoint = (_,_,_)\npoint2 = (_,_,(_, 42))\n\ntest = fold (&&) True\n  [ (==) (map (_, 5) [1,2]) [(1,5),(2,5)]\n  , (==) (foo 1 2 3) [1,2,3]\n  , (==) (foo2 1 2 3) [(1,3),(2,2),(3,1)]\n  , (==) (point 1 2 3) (1,2,3)\n  , (==) (point2 1 2 3) (1,2,(3,42))\n  , (==) ([_] 1) [1]\n  , (==) ((_, 5) 1) (1,5)\n  , (==) ((1, _) 5) (1,5)\n  , (==) ((1, (_,2)) 5) (1,(5,2))\n  , (==) ([(_, (_, _)), _] 1 2 3 (4,(5,6))) [(1,(2,3)), (4,(5,6))]\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/import-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/import-1/exp.txt",
    "content": "3.74165738677394\n"
  },
  {
    "path": "test-suite/golden-tests/import-1/main.loc",
    "content": "module main (foo)\n\nimport root-cpp\nimport math-cpp (sqrt)\n\nsum = fold (+) zero\n\nsquare x = x * x\n\nfoo :: [Real] -> Real\nfoo xs = sqrt (sum (map square xs))\n"
  },
  {
    "path": "test-suite/golden-tests/import-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus person '\"Bob\"' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/import-2/exp.txt",
    "content": "[[\"Bob\"],[42]]\n"
  },
  {
    "path": "test-suite/golden-tests/import-2/localmap/main.loc",
    "content": "module localmap (Map, person)\n\nsource py from \"main.py\" ( \"morloc_person\" as person )\n\ntype py => Str = \"str\"\ntype py => Int = \"int\"\ntype py => (Map key val) = \"dict\" key val\ntype py => (Tuple2 a b) = \"tuple\" a b\ntype py => (List a) = \"list\" a\n\nclass Packable a b where\n  pack :: a -> b\n  unpack :: b -> a\n\ninstance Packable ([key],[val]) (Map key val) where\n  source py from \"main.py\"\n   ( \"morloc_packMap\" as pack\n   , \"morloc_unpackMap\" as unpack\n   )\n\nperson :: Str -> Map Str Int\n"
  },
  {
    "path": "test-suite/golden-tests/import-2/localmap/main.py",
    "content": "def morloc_packMap (xs):\n  d = dict()\n  ks, vs = xs\n  for (k,v) in zip(ks, vs):\n    d[k] = v\n  return d\n\ndef morloc_unpackMap (d):\n  return (list(d.keys()), list(d.values()))\n\ndef morloc_person(name):\n    return {name : 42}\n"
  },
  {
    "path": "test-suite/golden-tests/import-2/main.loc",
    "content": "-- test for gitub issue #16\n\n-- The index for `person` should link to the scope where person was defined\n\n-- Since `person` was defined in `localmap`, it should have access to `Map`,\n-- `List`, and all the other type terms defined there. These types should not\n-- need to be imported here. Indeed, if they are imported here from other sources,\n-- there should be no conflict.\n\nmodule main (person)\nimport localmap (person)\n"
  },
  {
    "path": "test-suite/golden-tests/infix/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_arithmetic       > obs.txt 2> obs.err\n\t./nexus test_precedence       2>> obs.err  >> obs.txt\n\t./nexus test_exponentiation   2>> obs.err  >> obs.txt\n\t./nexus test_append           2>> obs.err  >> obs.txt\n\t./nexus test_application      2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/infix/exp.txt",
    "content": "8\n14\n8\n[1,2,3,4]\n10\n"
  },
  {
    "path": "test-suite/golden-tests/infix/impl.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n\ndef pow(x, y):\n    return x ** y\n\ndef cons(x, xs):\n    return [x] + xs\n\ndef append(xs, ys):\n    return xs + ys\n\ndef cons(x, xs):\n    return [x] + xs\n\ndef append(xs, ys):\n    return xs + ys\n"
  },
  {
    "path": "test-suite/golden-tests/infix/main.loc",
    "content": "module main\n  ( test_arithmetic\n  , test_precedence\n  , test_exponentiation\n  , test_append\n  , test_application\n  )\n\ntype Py => Int = \"int\"\ntype Py => (List a) = \"list\" a\n\n-- Mathematical operators - source from Python\nsource py from \"impl.py\"\n  ( \"add\" as (+)\n  , \"mul\" as (*)\n  , \"pow\" as (**)\n  , \"append\" as (<>)\n  )\n\n-- Define infix operators as wrappers\ninfixl 6 +\ninfixl 7 *\ninfixr 8 **\ninfixr 5 <>\ninfixr 0 $\n\n(+) :: Int -> Int -> Int\n(*) :: Int -> Int -> Int\n(**) :: Int -> Int -> Int\n(<>) :: [Int] -> [Int] -> [Int]\n\n-- Function application operator\n($) :: (Int -> Int) -> Int -> Int\n($) f x = f x\n\n-- Test cases\n\n-- Basic arithmetic\ntest_arithmetic :: Int\ntest_arithmetic = 5 + 3\n\n-- Precedence: multiplication before addition\ntest_precedence :: Int\ntest_precedence = 2 + 3 * 4\n\n-- Exponentiation (right associative, highest precedence)\ntest_exponentiation :: Int\ntest_exponentiation = 2 ** 3\n\n-- List append operator\ntest_append :: [Int]\ntest_append = [1, 2] <> [3, 4]\n\n-- Function application operator\ntest_application :: Int\ntest_application = (*) 2 $ 5\n"
  },
  {
    "path": "test-suite/golden-tests/infix-generic/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus x > obs.txt 2> obs.err\n\t./nexus y 2>> obs.err  >> obs.txt\n\t./nexus z 2>> obs.err  >> obs.txt\n\t./nexus f 99 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/infix-generic/exp.txt",
    "content": "\"11\"\n[7,8,9]\n\"13\"\n\"141\"\n"
  },
  {
    "path": "test-suite/golden-tests/infix-generic/foo.py",
    "content": "def add(x, y):\n    return x + y\n\ndef morloc_map(f, xs):\n    return [f(x) for x in xs]\n\ndef morloc_str(x):\n    return str(x)\n"
  },
  {
    "path": "test-suite/golden-tests/infix-generic/main.loc",
    "content": "module main (x,y,z,f)\n\ntype Py => Str = \"str\"\ntype Py => Int = \"int\"\ntype Py => (List a) = \"list\" a\n\ninfixl 9 .\n(.) :: (b -> c) -> (a -> b) -> a -> c\n(.) g f x = g (f x)\n\ninfixr 0 $\n($) :: (a -> b) -> a -> b\n($) f x = f x\n\ninfixl 6 +\n(+) :: Int -> Int -> Int\nsource Py from \"foo.py\"\n  ( \"add\" as (+)\n  , \"morloc_str\" as show\n  , \"morloc_map\" as map\n  )\n\nshow :: a -> Str\nmap :: (a -> b) -> [a] -> [b]\n\nx = show ((+) 6 5)\ny = map  ((+) 6) [1,2,3]\nz = show ((+) 6 $ 7)\nf = show . (+) 42\n"
  },
  {
    "path": "test-suite/golden-tests/infix-import/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_addition > obs.txt 2> obs.err\n\t./nexus test_multiplication 2>> obs.err  >> obs.txt\n\t./nexus test_exponentiation 2>> obs.err  >> obs.txt\n\t./nexus test_concat 2>> obs.err  >> obs.txt\n\t./nexus test_combined 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/infix-import/exp.txt",
    "content": "8\n28\n1024\n[1,2,3,4]\n27\n"
  },
  {
    "path": "test-suite/golden-tests/infix-import/foo.py",
    "content": "def add(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/infix-import/main.loc",
    "content": "module main\n  ( test_addition\n  , test_multiplication\n  , test_exponentiation\n  , test_concat\n  , test_combined\n  )\n\n-- Import infix operators from ops module\nimport ops ((+), (*), (**), (<>))\n\ntype Py => Int = \"int\"\ntype Py => (List a) = \"list\" a\n\n-- Test cases using imported operators\n\n-- Basic addition\ntest_addition :: Int\ntest_addition = 5 + 3\n\n-- Multiplication\ntest_multiplication :: Int\ntest_multiplication = 4 * 7\n\n-- Exponentiation\ntest_exponentiation :: Int\ntest_exponentiation = 2 ** 10\n\n-- List concatenation\ntest_concat :: [Int]\ntest_concat = [1, 2] <> [3, 4]\n\n-- Combined: uses operator precedence\ntest_combined :: Int\ntest_combined = 2 + 3 * 2 ** 3 + 1\n"
  },
  {
    "path": "test-suite/golden-tests/infix-import/ops/main.loc",
    "content": "module ops\n  ( (+)\n  , (*)\n  , (**)\n  , (<>)\n  )\n\ntype Py => Int = \"int\"\ntype Py => (List a) = \"list\" a\n\nsource py from \"ops.py\"\n  ( \"add\" as (+)\n  , \"mul\" as (*)\n  , \"pow\" as (**)\n  , \"concat\" as (<>)\n  )\n\ninfixl 6 +\n(+) :: Int -> Int -> Int\n\ninfixl 7 *\n(*) :: Int -> Int -> Int\n\ninfixr 8 **\n(**) :: Int -> Int -> Int\n\ninfixr 5 <> \n(<>) :: [a] -> [a] -> [a]\n"
  },
  {
    "path": "test-suite/golden-tests/infix-import/ops/ops.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n\ndef pow(x, y):\n    return x ** y\n\ndef concat(xs, ys):\n    return xs + ys\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_a > obs.txt 2> obs.err\n\t./nexus test_b 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/exp.txt",
    "content": "25\n209\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/main.loc",
    "content": "module main (test_a, test_b)\n\nimport ops-a ((&) as (&~))\nimport ops-b ((&))\n\ntype Py => Int = \"int\"\n\n-- Test operator from ops-a with its fixity (infixl 5 &)\n-- 2 &~ 3 &~ 4 should be (2 & 3) & 4 = 14 (left-associative)\ntest_a :: Int\ntest_a = 2 &~ 3 &~ 4\n\n-- Test operator from ops-b with its fixity (infixr 7 &)\n-- 2 & 3 & 4 should be 2 & (3 & 4) = 18 (right-associative)\ntest_b :: Int\ntest_b = 2 & 3 & 4\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/ops-a/main.loc",
    "content": "module ops-a (*)\n\ntype Py => Int = \"int\"\n\n-- Left-associative &\ninfixl 5 &\n(&) :: Int -> Int -> Int\nsource Py from \"ops.py\" (\"left_assoc\" as (&))\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/ops-a/ops.py",
    "content": "def left_assoc(x, y):\n    # (x & y) = x + y + 10\n    # This way we can distinguish left vs right associativity\n    return x + y + 10\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/ops-b/main.loc",
    "content": "module ops-b (*)\n\ntype Py => Int = \"int\"\n\n-- Right-associative &\ninfixr 7 &\n(&) :: Int -> Int -> Int\nsource Py from \"ops.py\" (\"right_assoc\" as (&))\n"
  },
  {
    "path": "test-suite/golden-tests/infix-local-fixity/ops-b/ops.py",
    "content": "def right_assoc(x, y):\n    # (x & y) = x + y + 100\n    # Different value to distinguish from left_assoc\n    return x + y + 100\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_sum_py > obs.txt 2> obs.err\n\t./nexus test_product_py 2>> obs.err  >> obs.txt\n\t./nexus test_chain_py 2>> obs.err  >> obs.txt\n\t./nexus test_sum_cpp 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/exp.txt",
    "content": "15\n42\n14\n123\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/main.loc",
    "content": "module main (test_sum_py, test_sum_cpp, test_product_py, test_chain_py)\n\nimport ops-py\nimport ops-cpp\n\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\n\n-- Python tests\ntest_sum_py :: Int\ntest_sum_py = 10 <+> 5\n\ntest_product_py :: Int\ntest_product_py = 6 <*> 7\n\n-- Test precedence: <*> (infixl 7) > <+> (infixl 6)\n-- Should be: 2 <+> (3 <*> 4) = 2 + 12 = 14\ntest_chain_py :: Int\ntest_chain_py = 2 <+> 3 <*> 4\n\n-- C++ test\ntest_sum_cpp :: Int\ntest_sum_cpp = 100 <+> 23\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/ops/main.loc",
    "content": "module ops (*)\n\n-- Language-agnostic operator signatures and fixity declarations\n(<+>) :: Int -> Int -> Int\n(<*>) :: Int -> Int -> Int\n\ninfixl 6 <+>\ninfixl 7 <*>\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/ops-cpp/main.loc",
    "content": "module ops-cpp (*)\n\nimport ops\n\ntype Cpp => Int = \"int\"\n\nsource Cpp from \"ops.hpp\" (\"add\" as (<+>), \"mul\" as (<*>))\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/ops-cpp/ops.hpp",
    "content": "#include <cppmorloc.hpp>\n\nint add(int x, int y) { return x + y; }\nint mul(int x, int y) { return x * y; }\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/ops-py/main.loc",
    "content": "module ops-py (*)\n\nimport ops\n\ntype Py => Int = \"int\"\n\nsource Py from \"ops.py\" (\"add\" as (<+>), \"mul\" as (<*>))\n"
  },
  {
    "path": "test-suite/golden-tests/infix-polyglot/ops-py/ops.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-import/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_comp > obs.txt 2> obs.err\n\t./nexus test_min 2>> obs.err  >> obs.txt\n\t./nexus test_not_equal 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-import/exp.txt",
    "content": "true\n17\ntrue\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-import/foo.py",
    "content": "def le(x, y):\n    return x <= y\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-import/main.loc",
    "content": "module main (test_comp, test_min, test_not_equal)\n\nimport .numops (Ord, (<), (>), (>=), min)\n\ntype Py => Int = \"int\"\ntype Py => Bool = \"bool\"\n\ninstance Ord Int where\n  source Py from \"foo.py\" (\"le\" as (<=))\n\n-- Test imported infix operators\ntest_comp :: Bool\ntest_comp = 5 < 10\n\n-- Test method using infix operators\ntest_min :: Int\ntest_min = min 42 17\n\n-- Test combined expression\ntest_not_equal :: Bool\ntest_not_equal = 3 <= 5\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-import/numops/main.loc",
    "content": "module (Ord, (<), (>), (>=), min)\n\ntype Py => Bool = \"bool\"\n\nclass Ord a where\n  (<=) :: a -> a -> Bool\n\ninfixl 4 <\ninfixl 4 >\ninfixl 4 <=\ninfixl 4 >=\n\nsource Py from \"ops.py\"\n  ( \"morloc_not\" as not\n  , \"morloc_and\" as (&&)\n  , \"morloc_if\" as ifelse\n  )\nnot :: Bool -> Bool\n(&&) :: Bool -> Bool -> Bool\nifelse :: Bool -> a -> a -> a\n\n(<) :: a -> a -> Bool\n(<) x y = (x <= y) && not (y <= x)\n\n(>) :: a -> a -> Bool\n(>) x y = y < x\n\n(>=) :: a -> a -> Bool\n(>=) x y = y <= x\n\nmin :: a -> a -> a\nmin x y = ifelse (x < y) x y\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-import/numops/ops.py",
    "content": "def morloc_not(b):\n    return not b\n\ndef morloc_and(p, q):\n    return p and q\n\ndef morloc_if(cond, t, f):\n    return t if cond else f\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_sum_py > obs.txt 2> obs.err\n\t./nexus test_product_py 2>> obs.err  >> obs.txt\n\t./nexus test_chain_py 2>> obs.err  >> obs.txt\n\t./nexus test_sum_cpp 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/exp.txt",
    "content": "15\n42\n14\n123\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/main.loc",
    "content": "module main (test_sum_py, test_sum_cpp, test_product_py, test_chain_py)\n\nimport .semiring-py (Semiring)\nimport .semiring-cpp (Semiring)\n\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\n\n-- Python tests\ntest_sum_py :: Int\ntest_sum_py = 10 <+> 5\n\ntest_product_py :: Int\ntest_product_py = 6 <*> 7\n\n-- Test precedence: <*> (infixl 7) > <+> (infixl 6)\n-- Should be: 2 <+> (3 <*> 4) = 2 + 12 = 14\ntest_chain_py :: Int\ntest_chain_py = 2 <+> 3 <*> 4\n\n-- C++ test\ntest_sum_cpp :: Int\ntest_sum_cpp = 100 <+> 23\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/semiring/main.loc",
    "content": "module (Semiring)\n\n-- Semiring typeclass with additive and multiplicative operators\nclass Semiring a where\n  zero :: a\n  one :: a\n  (<+>) :: a -> a -> a  -- additive operation\n  (<*>) :: a -> a -> a  -- multiplicative operation\n\ninfixl 6 <+>\ninfixl 7 <*>\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/semiring-cpp/main.loc",
    "content": "module (*)\n\nimport .semiring (Semiring)\n\ntype Cpp => Int = \"int\"\n\ninstance Semiring Int where\n  source Cpp from \"ops.hpp\" (\"add\" as (<+>), \"mul\" as (<*>))\n  zero = 0\n  one = 1\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/semiring-cpp/ops.hpp",
    "content": "#include <cppmorloc.hpp>\n\nint add(int x, int y) { return x + y; }\nint mul(int x, int y) { return x * y; }\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/semiring-py/main.loc",
    "content": "module (*)\n\nimport .semiring (Semiring)\n\ntype Py => Int = \"int\"\n\ninstance Semiring Int where\n  source Py from \"ops.py\" (\"add\" as (<+>), \"mul\" as (<*>))\n  zero = 0\n  one = 1\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-polyglot/semiring-py/ops.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-simple/Makefile",
    "content": "all:\n\trm -f *.err obs.txt *.err\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_add 5 > obs.txt 2> obs.err\n\t./nexus test_mul 5 2>> obs.err  >> obs.txt\n\t./nexus test_expr 2>> obs.err  >> obs.txt\n\t./nexus test_negate 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-simple/exp.txt",
    "content": "8\n20\n31\n-42\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-simple/foo.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n\ndef neg(x):\n    return -x\n"
  },
  {
    "path": "test-suite/golden-tests/infix-typeclass-simple/main.loc",
    "content": "module main (test_add, test_mul, test_expr, test_negate)\n\ntype Py => Int = \"int\"\n\n-- Typeclass with infix operators\nclass Num a where\n  zero :: a\n  negate :: a -> a\n  (+) :: a -> a -> a\n  (*) :: a -> a -> a\n\ninfixl 6 +\ninfixl 7 *\n\ninstance Num Int where\n  source Py from \"foo.py\" (\"add\" as (+), \"mul\" as (*), \"neg\" as negate)\n  zero = 0\n\n-- Test basic infix usage\ntest_add :: Int -> Int\ntest_add x = x + 3\n\ntest_mul :: Int -> Int\ntest_mul x = 4 * x\n\n-- Test precedence: should be 4 * 7 + 3 = 31\ntest_expr :: Int\ntest_expr = 4 * 7 + 3\n\n-- Test prefix method\ntest_negate :: Int\ntest_negate = negate 42\n"
  },
  {
    "path": "test-suite/golden-tests/inline-block-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testAdd 3 4 > obs.txt 2> obs.err\n\t./nexus testExpr 2 3 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-block-py/exp.txt",
    "content": "7\n10\n"
  },
  {
    "path": "test-suite/golden-tests/inline-block-py/main.loc",
    "content": "-- Test %inline before an entire source ... where block\n-- Expected: ALL functions in the block should be inlined\nmodule main (testAdd, testExpr)\n\ntype Py => Int = \"int\"\n\n-- %inline before the whole source block should make both (+) and (*) inline\n%inline source Py where\n  (+)\n  (*)\n\n(+) :: Int -> Int -> Int\n(*) :: Int -> Int -> Int\n\ntestAdd :: Int -> Int -> Int\ntestAdd x y = x + y\n\n-- If inlined: (x * y) + z with native operators\n-- Either way, result should be correct: 2*3 + 4 = 10\ntestExpr :: Int -> Int -> Int -> Int\ntestExpr x y z = x * y + z\n"
  },
  {
    "path": "test-suite/golden-tests/inline-cross-lang/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 1.5 2.5 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-cross-lang/exp.txt",
    "content": "8\n"
  },
  {
    "path": "test-suite/golden-tests/inline-cross-lang/foo.R",
    "content": "rDouble <- function(x) {\n    x * 2\n}\n"
  },
  {
    "path": "test-suite/golden-tests/inline-cross-lang/foo.py",
    "content": "def add(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/inline-cross-lang/main.loc",
    "content": "-- Test that %inline is silently ignored for cross-language calls\n-- The inline Python add is called from an R context via composition\nmodule main (foo)\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Py => List a = \"list\" a\n\ntype R => Int = \"integer\"\ntype R => Real = \"numeric\"\ntype R => List a = \"list\" a\n\nsource Py from \"foo.py\" where\n  %inline add\n\nsource R from \"foo.R\" (\"rDouble\" as double)\n\nadd :: Real -> Real -> Real\ndouble :: Real -> Real\n\n-- double is R, add is Python with %inline\n-- The inline should be silently ignored for the cross-language call\nfoo :: Real -> Real -> Real\nfoo x y = double (add x y)\n"
  },
  {
    "path": "test-suite/golden-tests/inline-deep-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus deep 1 2 3 4 5 6 7 8 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-deep-py/exp.txt",
    "content": "186\n"
  },
  {
    "path": "test-suite/golden-tests/inline-deep-py/main.loc",
    "content": "-- Test deeply nested inline operators with precedence\nmodule main (deep)\n\ntype Py => Int = \"int\"\n\nsource Py where\n  %inline (+)\n  %inline (*)\n\n(+) :: Int -> Int -> Int\n(*) :: Int -> Int -> Int\n\ninfixl 6 +\ninfixl 7 *\n\ndeep :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int\ndeep a b c d e f g h = (a + b) * (c + d) + (e + f) * (g + h)\n"
  },
  {
    "path": "test-suite/golden-tests/inline-func-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testAdd 3 4 > obs.txt 2> obs.err\n\t./nexus testDouble 5 >> obs.txt 2>> obs.err\n\t./nexus testMixed 2 3 >> obs.txt 2>> obs.err\n\t./nexus testRepeated 3 4 >> obs.txt 2>> obs.err\n\t./nexus testNested 2 3 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-func-py/exp.txt",
    "content": "7\n20\n15\n14\n20\n"
  },
  {
    "path": "test-suite/golden-tests/inline-func-py/foo.py",
    "content": "def add(x, y):\n    return x + y\n\ndef double(x):\n    return x * 2\n\ndef triple(x):\n    return x * 3\n"
  },
  {
    "path": "test-suite/golden-tests/inline-func-py/main.loc",
    "content": "-- Test %inline pragma on non-operator functions\nmodule main (testAdd, testDouble, testMixed, testRepeated, testNested)\n\ntype Py => Int = \"int\"\n\nsource Py from \"foo.py\" where\n  %inline add\n  %inline double\n  triple\n\nadd :: Int -> Int -> Int\ndouble :: Int -> Int\ntriple :: Int -> Int\n\n-- Inline non-operator function\ntestAdd :: Int -> Int -> Int\ntestAdd x y = add x y\n\n-- Inline unary function\ntestDouble :: Int -> Int\ntestDouble x = double (add x x)\n\n-- Mix of inline (add, double) and non-inline (triple) from same source\n-- triple(add(2, 3)) = triple(5) = 15\ntestMixed :: Int -> Int -> Int\ntestMixed x y = triple (add x y)\n\n-- Inline function called many times in one expression\n-- add(add(x, x), add(y, y)) = add(2x, 2y) = 2x + 2y\n-- with x=3, y=4: add(6, 8) = 14\ntestRepeated :: Int -> Int -> Int\ntestRepeated x y = add (add x x) (add y y)\n\n-- Deeply nested inline: double(double(add(x, y)))\n-- with x=2, y=3: double(double(5)) = double(10) = 20\ntestNested :: Int -> Int -> Int\ntestNested x y = double (double (add x y))\n"
  },
  {
    "path": "test-suite/golden-tests/inline-ho-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMapDouble '[1,2,3]' > obs.txt 2> obs.err\n\t./nexus testDirectDouble 5 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-ho-py/exp.txt",
    "content": "[2,4,6]\n10\n"
  },
  {
    "path": "test-suite/golden-tests/inline-ho-py/foo.py",
    "content": "def double(x):\n    return x * 2\n\ndef mymap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/inline-ho-py/main.loc",
    "content": "-- Test inline function passed as higher-order argument\n-- Expected: inline flag silently ignored, function works as normal value\nmodule main (testMapDouble, testDirectDouble)\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\n\nsource Py from \"foo.py\" where\n  %inline double\n  mymap\n\ndouble :: Int -> Int\nmymap :: (a -> b) -> [a] -> [b]\n\n-- Inline function passed as argument to higher-order function\n-- Should work: double is passed as a value, inlining is silently skipped\ntestMapDouble :: [Int] -> [Int]\ntestMapDouble xs = mymap double xs\n\n-- Direct inline call (for comparison -- this should inline normally)\ntestDirectDouble :: Int -> Int\ntestDirectDouble x = double x\n"
  },
  {
    "path": "test-suite/golden-tests/inline-mixed-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t-./nexus mixedOps 3 4 2 3 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-mixed-py/exp.txt",
    "content": "1\n"
  },
  {
    "path": "test-suite/golden-tests/inline-mixed-py/main.loc",
    "content": "-- Test mix of inline and non-inline operators from same source where block\n-- BUG: non-inline operator (-) sourced via where-syntax is emitted as a bare\n-- symbol for the function name. In Python this generates \"-(n24, n25)\" which\n-- is unary negation applied to a tuple, not a binary subtraction call.\n-- The underlying issue: operators sourced via \"source Py where (-)\" without\n-- %inline need the code generator to handle their non-alphanumeric name\n-- (e.g., emit as operator.sub or wrap appropriately).\nmodule main (mixedOps)\n\ntype Py => Int = \"int\"\n\nsource Py where\n  %inline (+)\n  %inline (*)\n  (-)\n\n(+) :: Int -> Int -> Int\n(*) :: Int -> Int -> Int\n(-) :: Int -> Int -> Int\n\n-- (3 + 4) - (2 * 3) = 7 - 6 = 1\nmixedOps :: Int -> Int -> Int -> Int -> Int\nmixedOps a b c d = (a + b) - (c * d)\n"
  },
  {
    "path": "test-suite/golden-tests/inline-old-style-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testAdd 3 4 > obs.txt 2> obs.err\n\t./nexus testExpr 2 3 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-old-style-py/exp.txt",
    "content": "7\n10\n"
  },
  {
    "path": "test-suite/golden-tests/inline-old-style-py/foo.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n"
  },
  {
    "path": "test-suite/golden-tests/inline-old-style-py/main.loc",
    "content": "-- Test %inline with old-style source syntax\n-- %inline before old-style source inlines all functions in the block\nmodule main (testAdd, testExpr)\n\ntype Py => Int = \"int\"\n\n%inline source Py from \"foo.py\" (\"add\" as (+), \"mul\" as (*))\n\n(+) :: Int -> Int -> Int\n(*) :: Int -> Int -> Int\n\ninfixl 6 +\ninfixl 7 *\n\ntestAdd :: Int -> Int -> Int\ntestAdd x y = x + y\n\n-- 2 * 3 + 4 = 10\ntestExpr :: Int -> Int -> Int -> Int\ntestExpr x y z = x * y + z\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-ho-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testFold '[1,2,3,4]' > obs.txt 2> obs.err\n\t./nexus testDirect 3 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-ho-py/exp.txt",
    "content": "10\n7\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-ho-py/foo.py",
    "content": "def fold(f, b, xs):\n    for x in xs:\n        b = f(b, x)\n    return b\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-ho-py/main.loc",
    "content": "-- Test inline operator passed as higher-order argument\n-- Expected: inline flag silently ignored for HO usage, operator works as value\n-- This is tricky because operators like (+) are not callable values in most languages.\n-- The compiler needs to handle this by wrapping the operator in a lambda or using\n-- a language-specific mechanism.\nmodule main (testFold, testDirect)\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\n\nsource Py from \"foo.py\" where\n  fold\n\nsource Py where\n  %inline (+)\n\n(+) :: Int -> Int -> Int\nfold :: (b -> a -> b) -> b -> [a] -> b\n\n-- Inline operator passed as value to fold\n-- fold (+) 0 [1,2,3,4] = 10\ntestFold :: [Int] -> Int\ntestFold xs = fold (+) 0 xs\n\n-- Direct inline usage (for comparison)\ntestDirect :: Int -> Int -> Int\ntestDirect x y = x + y\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus addTwo 3 4 > obs.txt 2> obs.err\n\t./nexus mulAdd 2 3 10 >> obs.txt 2>> obs.err\n\t./nexus nested 1 2 3 4 >> obs.txt 2>> obs.err\n\t./nexus letExpr 3 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-py/exp.txt",
    "content": "7\n16\n21\n49\n"
  },
  {
    "path": "test-suite/golden-tests/inline-op-py/main.loc",
    "content": "-- Test %inline pragma on operators in source where blocks\nmodule main (addTwo, mulAdd, nested, letExpr)\n\ntype Py => Int = \"int\"\n\nsource Py where\n  %inline (+)\n  %inline (*)\n\n(+) :: Int -> Int -> Int\n(*) :: Int -> Int -> Int\n\n-- Simple inline operator\naddTwo :: Int -> Int -> Int\naddTwo x y = x + y\n\n-- Nested inline operators\nmulAdd :: Int -> Int -> Int -> Int\nmulAdd x y z = x * y + z\n\n-- Test operator precedence with parenthesization\nnested :: Int -> Int -> Int -> Int -> Int\nnested a b c d = (a + b) * (c + d)\n\n-- Inline operator inside let binding\nletExpr :: Int -> Int -> Int\nletExpr x y =\n  let s = x + y\n  in s * s\n"
  },
  {
    "path": "test-suite/golden-tests/inline-typeclass-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus addInts 3 4 > obs.txt 2> obs.err\n\t./nexus addReals 1.5 2.5 >> obs.txt 2>> obs.err\n\t./nexus expr1 3 4 5 6 >> obs.txt 2>> obs.err\n\t./nexus expr2 2 3 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/inline-typeclass-py/exp.txt",
    "content": "7\n4\n77\n10\n"
  },
  {
    "path": "test-suite/golden-tests/inline-typeclass-py/main.loc",
    "content": "-- Test %inline pragma inside typeclass instances with source ... where syntax\nmodule main (addInts, addReals, expr1, expr2)\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\n\nclass Addable a where\n  (+) :: a -> a -> a\n  (*) :: a -> a -> a\n\ninfixl 6 +\ninfixl 7 *\n\ninstance Addable Int where\n  source Py where\n    %inline (+)\n    %inline (*)\n\ninstance Addable Real where\n  source Py where\n    %inline (+)\n    %inline (*)\n\n-- Basic typeclass inline operator usage\naddInts :: Int -> Int -> Int\naddInts x y = x + y\n\naddReals :: Real -> Real -> Real\naddReals x y = x + y\n\n-- Complex expression with typeclass-dispatched inline operators\n-- (3 + 4) * (5 + 6) = 7 * 11 = 77\nexpr1 :: Int -> Int -> Int -> Int -> Int\nexpr1 a b c d = (a + b) * (c + d)\n\n-- Precedence: 2 * 3 + 4 = 6 + 4 = 10\nexpr2 :: Int -> Int -> Int -> Int\nexpr2 x y z = x * y + z\n"
  },
  {
    "path": "test-suite/golden-tests/inline-typeclass-py/ops.py",
    "content": "def add(x, y):\n    return x + y\n\ndef mul(x, y):\n    return x * y\n"
  },
  {
    "path": "test-suite/golden-tests/interop-1-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus py2c 3 4 > obs.txt 2> obs.err\n\t./nexus c2py 3 4 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/interop-1-py/exp.txt",
    "content": "112\n700\n"
  },
  {
    "path": "test-suite/golden-tests/interop-1-py/main.loc",
    "content": "module main (py2c, c2py)\n\nimport root-py\nimport root-cpp\n\npadd x = idpy . (+) x\ncmul x = idcpp . (*) x\n\npy2c x y = padd (cmul x y) (100.0 :: Real)\nc2py x y = cmul (padd x y) (100.0 :: Real)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-1-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus r2c 3 4 > obs.txt 2> obs.err\n\t./nexus c2r 3 4 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/interop-1-r/exp.txt",
    "content": "112\n700\n"
  },
  {
    "path": "test-suite/golden-tests/interop-1-r/main.loc",
    "content": "module main (r2c, c2r)\n\nimport root ((.))\nimport root-r (Real, Integral, idr)\nimport root-cpp (Real, Numeric, idcpp)\n\nradd x = idr . (+) x\ncmul x = idcpp . (*) x\n\nr2c x y = radd (cmul x y) (100.0 :: Real)\nc2r x y = cmul (radd x y) (100.0 :: Real)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-10/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"asdf\"' 3.14 > obs.txt 2> obs.err\n\n.PHONY: clean\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-10/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/interop-10/foo.hpp",
    "content": "#include <algorithm>\n#include <functional>\n#include <type_traits>\n\n// f :: (a -> b) -> a -> b\n// g :: Str -> Int\n// h :: (Str, Int) -> Str\n\n// f :: (a -> b) -> a -> b\ntemplate <typename a, typename F>\nauto f(F func, a x) -> std::invoke_result_t<F, a> {\n  return(func(x));\n}\n\n// g :: Bool -> Int\nint g(bool x){\n  return(42);\n}\n"
  },
  {
    "path": "test-suite/golden-tests/interop-10/foo.py",
    "content": "#  h :: (Str, Int) -> Str\ndef h(x):\n    return True\n"
  },
  {
    "path": "test-suite/golden-tests/interop-10/main.loc",
    "content": "module main (foo)\n\nimport root-cpp (Str, Int, Real, Bool, Tuple2)\nimport root-py (Str, Int, Real, Bool, Tuple2)\n\nsource Cpp from \"foo.hpp\" (\"f\", \"g\")\nsource Py from \"foo.py\" (\"h\")\n\nf :: (a -> b) -> a -> b\ng :: Bool -> Int\nh :: (Str, Real) -> Bool\n\nfoo x y = f g (h (x, y))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-11/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 14 > obs.txt 2> obs.err\n\t./nexus foo2 2 2>> obs.err  >> obs.txt\n\t./nexus foo3 3 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-11/exp.txt",
    "content": "15\n6\n8\n"
  },
  {
    "path": "test-suite/golden-tests/interop-11/incdef.hpp",
    "content": "#ifndef FOO\n#define FOO\n\ntypedef struct bar_s {\n  int sass;\n  int fass;\n} bar_t;\n\n\nint inc(int x) {\n  return x + 1;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-11/main.loc",
    "content": "module main (foo, foo2, foo3)\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\ntype Py => Tuple2 a b = \"tuple\" a b\n\ntype Cpp => Int = \"int\"\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Cpp => Tuple2 a b = \"std::tuple<$1,$2>\" a b\n\nrecord Bar = Bar\n  { sass :: Int\n  , fass :: Int\n  }\nrecord Cpp => Bar = \"bar_t\"\n\nsource Py (\"sum\")\nsource Py from \"pysum.py\" (\"sum2\", \"sumDict\" as sumBar)\nsum :: [Int] -> Int\nsum2 :: (Int, Int) -> Int\nsumBar :: Bar -> Int\n\nsource Cpp from \"incdef.hpp\" (\"inc\")\ninc :: Int -> Int\n\nfoo x = sum [inc x]\nfoo2 x = sum2 (inc x, inc x)\nfoo3 x = sumBar {sass = inc x, fass = inc x}\n"
  },
  {
    "path": "test-suite/golden-tests/interop-11/pysum.py",
    "content": "def sum2(xs):\n    return xs[0] + xs[1]\n\ndef sumDict(bar):\n    return sum(bar.values())\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/exp.txt",
    "content": "9\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/foo.R",
    "content": "morloc_fold <- function(f, b, xs){\n  for(x in xs){\n    b <- f(b, x)\n  }\n  b\n}\n\nmorloc_add <- function(x, y){\n  x + y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\n\ntype Py => Real = \"float\"\ntype R => Real = \"numeric\"\n\nadd :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/foo.py",
    "content": "def morloc_map(f, xs):\n    return list(map(f, xs))\n\ndef morloc_add(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\nsource Py from \"foo.py\" (\"morloc_map\" as map, \"morloc_add\" as add)\nmap :: (a -> b) -> [a] -> [b]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nsource R from \"foo.R\" (\"morloc_fold\" as fold, \"morloc_add\" as add)\nfold :: (b -> a -> b) -> b -> [a] -> b\n"
  },
  {
    "path": "test-suite/golden-tests/interop-2/main.loc",
    "content": "module main (foo)\n\nimport .foo (List, Real)\nimport .foopy (map, add)\nimport .foor (fold, add)\n\nsum xs b = fold add b xs\n\nfoo xs = sum (map (add 1.0) xs) 0.0\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/exp.txt",
    "content": "[-1,-2,-3]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class F>\nauto cmap(F f, const std::vector<A>& xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-cp/main.loc",
    "content": "module main (foo)\n\nimport .foo (List, Int)\nimport .fooc (cmap)\nimport .foopy (pneg)\n\nfoo :: [Int] -> [Int]\nfoo xs = cmap pneg xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pp/exp.txt",
    "content": "[-1,-2,-3]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pp/main.loc",
    "content": "module main (foo)\n\nimport .foopy\n\nfoo :: [Int] -> [Int]\nfoo xs = pmap pneg xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/exp.txt",
    "content": "[-1,-2,-3]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-pr/main.loc",
    "content": "module main (foo)\n\nimport .foo (List, Int)\nimport .foopy\nimport .foor\n\nfoo :: [Int] -> [Int]\nfoo xs = pmap rneg xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/exp.txt",
    "content": "[-1,-2,-3]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3a-rc/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foor\nimport .fooc\n\nfoo :: [Int] -> [Int]\nfoo xs = rmap cneg xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class F>\nauto cmap(F f, const std::vector<A>& xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-cp/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .fooc\nimport .foopy\n\nfoo :: [Int] -> [Int]\nfoo xs = cmap (padd 1) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pp/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foopy\n\nfoo xs = pmap (padd 1) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-pr/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foopy\nimport .foor\n\nfoo xs = pmap (radd 1) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3b-rc/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foor\nimport .fooc\n\nfoo xs = rmap (cadd 1) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/exp.txt",
    "content": "[43,44,45]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class F>\nauto cmap(F f, const std::vector<A>& xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-cp/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .fooc\nimport .foopy\n\nfoo x xs = cmap (padd x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 4 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/exp.txt",
    "content": "[5,6,7]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pp/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foopy\n\nfoo x xs = pmap (padd x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 4 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/exp.txt",
    "content": "[5,6,7]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-pr/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foopy\nimport .foor\n\nfoo x xs = pmap (radd x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/exp.txt",
    "content": "[43,44,45]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3c-rc/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foor (rmap)\nimport .fooc (cadd)\n\nfoo :: Int -> [Int] -> [Int]\nfoo x xs = rmap (cadd x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/exp.txt",
    "content": "[4,3,2]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class F>\nauto cmap(F f, const std::vector<A>& xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-cp/main.loc",
    "content": "module main (foo)\n\nimport .foo (Int, List)\nimport .fooc (cmap)\nimport .foopy (padd, pneg)\n\nfoo :: Int -> [Int] -> [Int]\nfoo x ys = cmap (\\y -> padd x (pneg y)) ys\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/exp.txt",
    "content": "[4,3,2]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pp/main.loc",
    "content": "module main (foo)\n\nimport .foo (List, Int)\nimport .foopy (pmap, padd, pneg)\n\nfoo x ys = pmap (\\y -> padd x (pneg y)) ys\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/exp.txt",
    "content": "[4,3,2]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-pr/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foopy (pmap)\nimport .foor (radd, rneg)\n\nfoo x ys = pmap (\\y -> radd x (rneg y)) ys\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/exp.txt",
    "content": "[4,3,2]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3d-rc/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .foor (rmap, rneg)\nimport .fooc (cadd)\n\nfoo x ys = rmap (\\y -> cadd x (rneg y)) ys\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] [4,5,6] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf __pycache__ main\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/exp.txt",
    "content": "[7,11,15]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\nrzipWith <- function(f, xs, ys) sapply(seq_along(xs), function(i) f(xs[i], ys[i]))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class F>\nauto cmap(F f, const std::vector<A>& xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\ntemplate <class A, class B, class F>\nauto czipWith(\n        F f,\n        const std::vector<A>& xs,\n        const std::vector<B>& ys\n    ) -> std::vector<std::invoke_result_t<F, A, B>>\n{\n    using C = std::invoke_result_t<F, A, B>;\n    std::size_t N = std::min(xs.size(), ys.size());\n    std::vector<C> zs(N);\n    for(std::size_t i = 0; i < N; i++){\n        zs[i] = f(xs[i], ys[i]);\n    }\n    return zs;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/foo.loc",
    "content": "module (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n\ndef pzipWith(f, xs, ys):\n    return list(map(f, xs, ys))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/fooc.loc",
    "content": "module (*)\n\nimport .foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \nczipWith :: (a -> b -> c) -> [a] -> [b] -> [c] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\", \"czipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/foopy.loc",
    "content": "module (*)\n\nimport .foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \npzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\", \"pzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/foor.loc",
    "content": "module (*)\n\nimport .foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \nrzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\", \"rzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-cp/main.loc",
    "content": "module main (foo)\n\nimport .foo\nimport .fooc (czipWith, cmul)\nimport .foopy (padd)\n\nadd3 x y z = padd x (padd y z)\n\nfoo x = czipWith (\\y -> add3 (cmul x y) y)\n\n--                          |-> z <-|\n-- Tests the case where z is dependent on both a context argument (x) and a\n-- lambda argument (y)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] [4,5,6] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/exp.txt",
    "content": "[7,11,15]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\nrzipWith <- function(f, xs, ys) sapply(seq_along(xs), function(i) f(xs[i], ys[i]))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n\ndef pzipWith(f, xs, ys):\n    return list(map(f, xs, ys))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/fooc.loc",
    "content": "module fooc (*)\n\nimport foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \npzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\", \"pzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/foor.loc",
    "content": "module foor (*)\n\nimport foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \nrzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\", \"rzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pp/main.loc",
    "content": "module main (foo)\n\nimport foo\nimport foopy (pzipWith, pmul, padd)\n\nadd3 x y z = padd x (padd y z)\n\nfoo x = pzipWith (\\y -> add3 (pmul x y) y)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] [4,5,6] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/exp.txt",
    "content": "[6,14,24]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\nrzipWith <- function(f, xs, ys) sapply(seq_along(xs), function(i) f(xs[i], ys[i]))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n\ndef pzipWith(f, xs, ys):\n    return list(map(f, xs, ys))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/fooc.loc",
    "content": "module fooc (*)\n\nimport foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \npzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\", \"pzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/foor.loc",
    "content": "module foor (*)\n\nimport foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \nrzipWith :: (a -> b -> c) -> [a] -> [b] -> [c] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\", \"rzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-pr/main.loc",
    "content": "module main (foo)\n\nimport foo\nimport foopy (pzipWith, pmul)\nimport foor (radd, rmul)\n\nfoo x ys zs = pzipWith (\\y z -> radd (pmul x y) (rmul y z)) ys zs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 [1,2,3] [4,5,6] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/exp.txt",
    "content": "[7,11,15]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\nrzipWith <- function(f, xs, ys) sapply(seq_along(xs), function(i) f(xs[i], ys[i]))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n\ndef pzipWith(f, xs, ys):\n    return list(map(f, xs, ys))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/fooc.loc",
    "content": "module fooc (*)\n\nimport foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \npzipWith :: (a -> b -> c) -> [a] -> [b] -> [c]\n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\", \"pzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/foor.loc",
    "content": "module foor (*)\n\nimport foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \nrzipWith :: (a -> b -> c) -> [a] -> [b] -> [c] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\", \"rzipWith\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3e-rc/main.loc",
    "content": "module main (foo)\n\nimport foo\nimport foor (rzipWith, rmul)\nimport fooc (cadd)\n\nadd3 x y z = cadd x (cadd y z)\n\nfoo :: Int -> [Int] -> [Int] -> [Int]\nfoo x = rzipWith (\\y -> add3 (rmul x y) y)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/exp.txt",
    "content": "420\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class B, class F>\nstd::vector<B> cmap(F f, const std::vector<A>& xs) {\n    static_assert(std::is_invocable_r_v<B, F, A>, \n                  \"Function f must be callable with type A and return type B\");\n    \n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/fooc.loc",
    "content": "module fooc (*)\n\nimport foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/foor.loc",
    "content": "module foor (*)\n\nimport foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-3f/main.loc",
    "content": "module main (foo)\n\nimport foo (Int)\nimport foopy\nimport foor\nimport fooc\n\nfoo x = pmul (cadd (rmul 20 x) x) 10\n"
  },
  {
    "path": "test-suite/golden-tests/interop-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 9 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-4/exp.txt",
    "content": "10\n"
  },
  {
    "path": "test-suite/golden-tests/interop-4/main.loc",
    "content": "module main (foo)\n\nimport root (Real)\nimport root-py (id)\nimport root-cpp (Integral)\n\nfoo :: Real -> Real\nfoo x = id (1.0 + x)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/exp.txt",
    "content": "[2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/foo.hpp",
    "content": "#ifndef MORLOC_FOO_HPP\n#define MORLOC_FOO_HPP\n\n#include <vector>\n#include <algorithm>\n#include <functional>\n#include <type_traits>\n#include <utility>\n\nint cneg(int x){\n    return (-1) * x;\n}\n\nint cadd(int x, int y){\n    return x + y;\n}\n\nint cmul(int x, int y){\n    return x * y;\n}\n\ntemplate <class A, class F>\nauto cmap(F f, const std::vector<A>& xs) -> std::vector<std::invoke_result_t<F, A>> {\n    using B = std::invoke_result_t<F, A>;\n    std::vector<B> ys;\n    ys.reserve(xs.size());\n    for(const auto& x : xs) {\n        ys.push_back(f(x));\n    }\n    return ys;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/fooc.loc",
    "content": "module fooc (*)\n\nimport foo\n\ncneg :: Int -> Int\ncadd :: Int -> Int -> Int\ncmul :: Int -> Int -> Int\ncmap :: (a -> b) -> [a] -> [b] \n\nsource Cpp from \"foo.hpp\" (\"cneg\", \"cadd\", \"cmul\", \"cmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-5/main.loc",
    "content": "module main (foo)\n\nimport foo (List, Int)\nimport foopy (padd)\nimport fooc (cmap)\n\nfoo xs = cmap (\\x -> padd 1 x) xs\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/exp.txt",
    "content": "9\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/foo.R",
    "content": "rneg <- function(x) (-1) * x\nradd <- function(x, y) x + y\nrmul <- function(x, y) x * y\nrmap <- function(f, xs) sapply(xs, f)\nrfold <- function(f, b, xs) Reduce(f=f, x=xs, init=b)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/foo.loc",
    "content": "module foo (*)\n\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype R => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/foo.py",
    "content": "def pneg(x):\n    return (-1) * x\n\ndef padd(x, y):\n    return x + y\n\ndef pmul(x, y):\n    return x * y\n\ndef pmap(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/foopy.loc",
    "content": "module foopy (*)\n\nimport foo\n\npneg :: Int -> Int\npadd :: Int -> Int -> Int\npmul :: Int -> Int -> Int\npmap :: (a -> b) -> [a] -> [b] \n\nsource Py from \"foo.py\" (\"pneg\", \"padd\", \"pmul\", \"pmap\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/foor.loc",
    "content": "module foor (*)\n\nimport foo\n\nrneg :: Int -> Int\nradd :: Int -> Int -> Int\nrmul :: Int -> Int -> Int\nrmap :: (a -> b) -> [a] -> [b] \nrfold :: (b -> a -> b) -> b -> [a] -> b\n\nsource R from \"foo.R\" (\"rneg\", \"radd\", \"rmul\", \"rmap\", \"rfold\")\n"
  },
  {
    "path": "test-suite/golden-tests/interop-6/main.loc",
    "content": "module main (foo)\n\nimport foo (List, Int)\nimport foopy (pmap, padd)\nimport foor (rfold)\n\n-- yes, this is an odd definition of `sum`\nsum xs b = rfold padd b xs\n\nfoo xs = sum (pmap (padd 1) xs) 0\n-- foo xs = fold add (map (add 1) xs) 0\n--          R    Py   Py   Py\n"
  },
  {
    "path": "test-suite/golden-tests/interop-7/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 13 > obs.txt 2> obs.err\n\tdiff exp.txt obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/interop-7/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/interop-7/foo.R",
    "content": "f <- function(x){\n  list(\"asdf\", 4.2, x)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/interop-7/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <algorithm>\n#include <assert.h>\n#include <functional>\n#include <iostream>\n#include <map>\n#include <utility>\n#include <variant>\n#include <vector>\n\n// foldTree :: (l -> a -> a)\n//          -> (n -> e -> a -> a)\n//          -> a\n//          -> (n, e, l)\n//          -> a\ntemplate<typename N, typename E, typename L, typename A, typename F1, typename F2>\nA g(\n  F1 laa,\n  F2 anea,\n  A b,\n  const std::tuple<N,E,L>& x\n){\n    A a1 = laa(std::get<2>(x), b);\n    A a2 = anea(std::get<0>(x), std::get<1>(x), a1);\n    return a2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-7/main.loc",
    "content": "module main (foo)\n\nimport root((.))\nimport root-r (Str, Int, Real, Bool, Tuple3)\nimport root-cpp (Str, Int, Real, Bool, Tuple3)\n\nsource R from \"foo.R\" (\"f\")\nsource Cpp from \"foo.hpp\" (\"g\")\n\nf :: Int -> (Str, Real, Int)\n\n-- template<typename N, typename E, typename L, typename A>\ng :: (l -> a -> a)\n  -> (n -> e -> a -> a)\n  -> a\n  -> (n, e, l)\n  -> a\n\nfoo = g (\\l a -> a) (\\n e a -> a) True . f\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-py-to-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo \"true\" > obs.txt 2> obs.err\n\tdiff exp.txt obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-py-to-r/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-py-to-r/foo.R",
    "content": "g <- function(f, b, x){\n    f(x, b)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-py-to-r/foo.py",
    "content": "def f(n):\n    return True\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-py-to-r/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\nimport root-r (Bool, List)\nimport root-py (Bool, List)\n\nsource R from \"foo.R\" (\"g\")\nsource Py from \"foo.py\" (\"f\")\n\nf :: Bool -> Bool\n\ng :: (n -> a -> a) -> a -> n -> a\n\nbar = g (\\n a -> [n]) []\n\nfoo = bar . f\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo \"true\" > obs.txt 2> obs.err\n\tdiff exp.txt obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-c/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-c/foo.R",
    "content": "f <- function(n){ TRUE }\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-c/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\ntemplate<typename A, typename C, typename F>\nC g(F down, C b, A x){\n    C b2 = down(x, b);\n    return b2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-c/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\nimport root-r (Bool, List)\nimport root-cpp (Bool, List)\n\nsource R from \"foo.R\" (\"f\")\nsource Cpp from \"foo.hpp\" (\"g\")\n\nf :: Bool -> Bool\n\ng :: (n -> a -> a) -> a -> n -> a\n\nbar = g (\\n a -> [n]) []\n\nfoo = bar . f\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo \"true\" > obs.txt 2> obs.err\n\tdiff exp.txt obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-py/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-py/foo.R",
    "content": "f <- function(n){ TRUE }\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-py/foo.py",
    "content": "def g(f, b, x):\n    return f(x, b)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-8-r-to-py/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\nimport root-r (Bool, List)\nimport root-py (Bool, List)\n\nsource R from \"foo.R\" (\"f\")\nsource Py from \"foo.py\" (\"g\")\n\nf :: Bool -> Bool\n\ng :: (n -> a -> a) -> a -> n -> a\n\nbar = g (\\n a -> [n]) []\n\nfoo = bar . f\n"
  },
  {
    "path": "test-suite/golden-tests/interop-9/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 1 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/interop-9/exp.txt",
    "content": "0\n"
  },
  {
    "path": "test-suite/golden-tests/interop-9/foo.R",
    "content": "add <- function(x, y){ x + y}\n"
  },
  {
    "path": "test-suite/golden-tests/interop-9/foo.py",
    "content": "def sub(x, y):\n    return (x - y)\n\ndef add(x, y):\n    return (x + y)\n"
  },
  {
    "path": "test-suite/golden-tests/interop-9/main.loc",
    "content": "module main (foo)\n\ntype R => Int = \"integer\"\ntype Py => Int = \"int\"\n\nsource R from \"foo.R\" (\"add\")\nsource py from \"foo.py\" (\"sub\")\n\nsub :: Int -> Int -> Int\nadd :: Int -> Int -> Int\n\nfoo x = (sub (sub (add 1 x) x) x)\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-agnostic/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\tcat obs-saved.txt >> obs.txt\n\techo >> obs.txt\n\nclean:\n\trm -rf nexus pools *err obs-saved.txt\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-agnostic/exp.txt",
    "content": "[\"26c7827d889f6da3\",\"d756d7b62fc50bf1\",\"b5148cb100a911fc\"]\n[\"26c7827d889f6da3\",\"d756d7b62fc50bf1\",\"b5148cb100a911fc\"]\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-agnostic/hello.json",
    "content": "\"hello\"\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-agnostic/main.loc",
    "content": "module main (test)\n\n-- Test intrinsics in a language-independent (nexus-only) context.\n-- No language-specific root imports — all evaluation happens in the nexus.\n\ntest = do\n  let y = ( @hash \"hello\"\n          , @hash 42\n          , @hash (1, 2, 3)\n          )\n  @savej y \"obs-saved.txt\"\n  y\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-constants/.gitignore",
    "content": "z\nzj\nzp\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-constants/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus langstr > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err z zj zp\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-constants/exp.txt",
    "content": "[[\"hi\",\"py\"],[\"hi\",\"py\"],[\"hi\",\"py\"],null]\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-constants/main.loc",
    "content": "module main (langstr)\n\nimport root-py (idpy)\nimport root-cpp (idcpp)\nimport root-r (idr)\n\nlangstr :: <IO> (?[Str],?[Str],?[Str],?[Str])\nlangstr = do\n  @save  (idpy [\"hi\", @lang])  \"z\"\n  @savem (idpy [\"hi\", @lang]) \"zp\"\n  @savej (idpy [\"hi\", @lang]) \"zj\"\n  xs  <- @load \"z\"\n  ys  <- @load \"zp\"\n  zs  <- @load \"zj\"\n  bad <- @load \"zany\"\n  (xs, ys, zs, bad)\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-hash/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-hash/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-hash/foo.hpp",
    "content": "#include <string>\n\nint identity_int(int x) {\n    return x;\n}\n\nstd::string identity_str(std::string x) {\n    return x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-hash/foo.py",
    "content": "def identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-hash/main.loc",
    "content": "module main (test)\n\nimport root-cpp\nimport root-py\nimport root-r\n\nsource Cpp from \"foo.hpp\" (\"identity_int\" as identityInt, \"identity_str\" as identityStr)\n\nequalHashes :: [a] -> Bool\nequalHashes xs =\n  let headHash = @hash (at 0 xs)\n      isSame = map ((==) headHash) (map @hash xs)\n  in fold (==) True isSame\n\ntest :: Bool\ntest =\n  equalHashes [idcpp 45, idpy 45, idr 45] &&\n  equalHashes [idcpp \"asdf\", idpy \"asdf\", idr \"asdf\"] &&\n  equalHashes [idcpp 5, idpy 5, idr 5] &&\n  equalHashes [idcpp (5,\"asdf\"), idpy (5,\"asdf\"), idr (5,\"asdf\")]\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-ho-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testShowHO > obs.txt 2> obs.err\n\t./nexus testMapMaybeShow >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-ho-r/exp.txt",
    "content": "\"42\"\n\"42\"\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-ho-r/main.loc",
    "content": "module main (testShowHO, testMapMaybeShow)\n\nimport maybe (tryMaybe, mapMaybe)\nimport maybe-r\nimport root\nimport root-r\n\ntestShowHO :: Str\ntestShowHO = tryMaybe show \"none\" 42\n\ntestMapMaybeShow :: ?Str\ntestMapMaybeShow = mapMaybe show 42\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testShow > obs.txt 2> obs.err\n\t./nexus testRead >> obs.txt 2>> obs.err\n\t./nexus testRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read/exp.txt",
    "content": "true\n42\n99\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read/foo.hpp",
    "content": "int identity_int(int x) {\n    return x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read/main.loc",
    "content": "module main (testShow, testRead, testRoundTrip)\n\nimport root-cpp\nimport root-py\n\nsource Cpp from \"foo.hpp\" (\"identity_int\" as identityInt)\n\nidentityInt :: Int -> Int\n\n-- @show in C++ pool context\nshowCpp :: Int -> Str\nshowCpp x = @show (identityInt x)\n\n-- @show in nexus (no pool context)\nshowNexus :: Int -> Str\nshowNexus x = @show x\n\n-- round-trip: show then read should recover the original value\nroundTrip :: Int -> ?Int\nroundTrip x = @read (@show x)\n\ntestShow :: Bool\ntestShow = showCpp 42 == \"42\" && showNexus 42 == \"42\"\n\ntestRead :: ?Int\ntestRead = @read \"42\"\n\ntestRoundTrip :: ?Int\ntestRoundTrip = roundTrip 99\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read-nexus/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus showInt > obs.txt 2> obs.err\n\t./nexus showReal >> obs.txt 2>> obs.err\n\t./nexus showBool >> obs.txt 2>> obs.err\n\t./nexus showStr >> obs.txt 2>> obs.err\n\t./nexus showList >> obs.txt 2>> obs.err\n\t./nexus showTuple >> obs.txt 2>> obs.err\n\t./nexus showRecord >> obs.txt 2>> obs.err\n\t./nexus showNested >> obs.txt 2>> obs.err\n\t./nexus showOptSome >> obs.txt 2>> obs.err\n\t./nexus showOptNone >> obs.txt 2>> obs.err\n\t./nexus showEmptyList >> obs.txt 2>> obs.err\n\t./nexus readInt >> obs.txt 2>> obs.err\n\t./nexus readReal >> obs.txt 2>> obs.err\n\t./nexus readBool >> obs.txt 2>> obs.err\n\t./nexus readStr >> obs.txt 2>> obs.err\n\t./nexus readList >> obs.txt 2>> obs.err\n\t./nexus readTuple >> obs.txt 2>> obs.err\n\t./nexus readRecord >> obs.txt 2>> obs.err\n\t./nexus readInvalid >> obs.txt 2>> obs.err\n\t./nexus readTypeMismatch >> obs.txt 2>> obs.err\n\t./nexus readEmptyStr >> obs.txt 2>> obs.err\n\t./nexus roundTripInt >> obs.txt 2>> obs.err\n\t./nexus roundTripReal >> obs.txt 2>> obs.err\n\t./nexus roundTripBool >> obs.txt 2>> obs.err\n\t./nexus roundTripStr >> obs.txt 2>> obs.err\n\t./nexus roundTripList >> obs.txt 2>> obs.err\n\t./nexus roundTripTuple >> obs.txt 2>> obs.err\n\t./nexus roundTripRecord >> obs.txt 2>> obs.err\n\t./nexus roundTripNested >> obs.txt 2>> obs.err\n\t./nexus showShowInt >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read-nexus/exp.txt",
    "content": "\"42\"\n\"3.14\"\n\"true\"\n\"\\\"hello\\\"\"\n\"[1,2,3]\"\n\"[10,\\\"abc\\\"]\"\n\"{\\\"x\\\":1,\\\"y\\\":2}\"\n\"[[1,2],[3,4]]\"\n\"42\"\n\"null\"\n\"[]\"\n42\n3.14\ntrue\n\"hello\"\n[1,2,3]\n[10,\"abc\"]\n{\"x\":1,\"y\":2}\nnull\nnull\nnull\n42\n3.14\ntrue\n\"hello\"\n[1,2,3]\n[10,\"abc\"]\n{\"x\":1,\"y\":2}\n[[1,2],[3,4]]\n\"\\\"42\\\"\"\n"
  },
  {
    "path": "test-suite/golden-tests/intrinsic-show-read-nexus/main.loc",
    "content": "module main\n  ( showInt\n  , showReal\n  , showBool\n  , showStr\n  , showList\n  , showTuple\n  , showRecord\n  , showNested\n  , showOptSome\n  , showOptNone\n  , showEmptyList\n  , readInt\n  , readReal\n  , readBool\n  , readStr\n  , readList\n  , readTuple\n  , readRecord\n  , readInvalid\n  , readTypeMismatch\n  , readEmptyStr\n  , roundTripInt\n  , roundTripReal\n  , roundTripBool\n  , roundTripStr\n  , roundTripList\n  , roundTripTuple\n  , roundTripRecord\n  , roundTripNested\n  , showShowInt\n  )\n\n-- No language-specific imports: all nexus-level evaluation\n\nrecord Point where\n  x :: Int\n  y :: Int\n\n-- === @show tests ===\n\nshowInt :: Str\nshowInt = @show 42\n\nshowReal :: Str\nshowReal = @show 3.14\n\nshowBool :: Str\nshowBool = @show True\n\nshowStr :: Str\nshowStr = @show \"hello\"\n\nshowList :: Str\nshowList = @show [1, 2, 3]\n\nshowTuple :: Str\nshowTuple = @show (10, \"abc\")\n\nshowRecord :: Str\nshowRecord = @show {x = 1, y = 2}\n\nshowNested :: Str\nshowNested = @show [[1, 2], [3, 4]]\n\nshowOptSome :: Str\nshowOptSome = @show (@read \"42\" :: ?Int)\n\nshowOptNone :: Str\nshowOptNone = @show (@read \"bad\" :: ?Int)\n\nshowEmptyList :: Str\nshowEmptyList = @show ([] :: [Int])\n\n-- === @read tests ===\n\nreadInt :: ?Int\nreadInt = @read \"42\"\n\nreadReal :: ?Real\nreadReal = @read \"3.14\"\n\nreadBool :: ?Bool\nreadBool = @read \"true\"\n\nreadStr :: ?Str\nreadStr = @read \"\\\"hello\\\"\"\n\nreadList :: ?[Int]\nreadList = @read \"[1,2,3]\"\n\nreadTuple :: ?(Int, Str)\nreadTuple = @read \"[10,\\\"abc\\\"]\"\n\nreadRecord :: ?Point\nreadRecord = @read \"{\\\"x\\\":1,\\\"y\\\":2}\"\n\nreadInvalid :: ?Int\nreadInvalid = @read \"not_a_number\"\n\nreadTypeMismatch :: ?Int\nreadTypeMismatch = @read \"\\\"hello\\\"\"\n\nreadEmptyStr :: ?Int\nreadEmptyStr = @read \"\"\n\n-- === round-trip tests ===\n\nroundTripInt :: ?Int\nroundTripInt = @read (@show 42)\n\nroundTripReal :: ?Real\nroundTripReal = @read (@show 3.14)\n\nroundTripBool :: ?Bool\nroundTripBool = @read (@show True)\n\nroundTripStr :: ?Str\nroundTripStr = @read (@show \"hello\")\n\nroundTripList :: ?[Int]\nroundTripList = @read (@show [1, 2, 3])\n\nroundTripTuple :: ?(Int, Str)\nroundTripTuple = @read (@show (10, \"abc\"))\n\nroundTripRecord :: ?Point\nroundTripRecord = @read (@show {x = 1, y = 2})\n\nroundTripNested :: ?[[Int]]\nroundTripNested = @read (@show [[1, 2], [3, 4]])\n\n-- === edge case: @show of @show ===\n\nshowShowInt :: Str\nshowShowInt = @show (@show 42)\n"
  },
  {
    "path": "test-suite/golden-tests/let-crosslang/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- letMinimal ---\" > obs.txt\n\t./nexus letMinimal >> obs.txt 2> obs.err\n\techo \"--- letMinimalReverse ---\" >> obs.txt\n\t./nexus letMinimalReverse >> obs.txt 2>> obs.err\n\techo \"--- letTripleChain ---\" >> obs.txt\n\t./nexus letTripleChain >> obs.txt 2>> obs.err\n\techo \"--- letDiamond ---\" >> obs.txt\n\t./nexus letDiamond >> obs.txt 2>> obs.err\n\techo \"--- letTripleUse ---\" >> obs.txt\n\t./nexus letTripleUse >> obs.txt 2>> obs.err\n\techo \"--- letDeepChain ---\" >> obs.txt\n\t./nexus letDeepChain >> obs.txt 2>> obs.err\n\techo \"--- letInDoBlock ---\" >> obs.txt\n\t./nexus letInDoBlock >> obs.txt 2>> obs.err\n\techo \"--- letListFanOut ---\" >> obs.txt\n\t./nexus letListFanOut >> obs.txt 2>> obs.err\n\techo \"--- letListDeepChain ---\" >> obs.txt\n\t./nexus letListDeepChain >> obs.txt 2>> obs.err\n\techo \"--- letStrIdChain ---\" >> obs.txt\n\t./nexus letStrIdChain >> obs.txt 2>> obs.err\n\techo \"--- letMixedTypes ---\" >> obs.txt\n\t./nexus letMixedTypes >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/let-crosslang/exp.txt",
    "content": "--- letMinimal ---\n10\n--- letMinimalReverse ---\n10\n--- letTripleChain ---\n5\n--- letDiamond ---\n100\n--- letTripleUse ---\n300\n--- letDeepChain ---\n155\n--- letInDoBlock ---\n4\n--- letListFanOut ---\n40\n--- letListDeepChain ---\n115\n--- letStrIdChain ---\n7\n--- letMixedTypes ---\n20\n"
  },
  {
    "path": "test-suite/golden-tests/let-crosslang/f.hpp",
    "content": "#ifndef __F_HPP__\n#define __F_HPP__\n\n#include <string>\n#include <vector>\n\nstd::string makeStr(int n) {\n    return std::string(n, 'x');\n}\n\nint strLen(const std::string& s) {\n    return (int)s.size();\n}\n\nint doubleIt(int x) {\n    return x * 2;\n}\n\nint addTwo(int a, int b) {\n    return a + b;\n}\n\nstd::vector<int> makeRange(int n) {\n    std::vector<int> result(n);\n    for (int i = 0; i < n; i++) result[i] = i + 1;\n    return result;\n}\n\nint sumList(const std::vector<int>& xs) {\n    int total = 0;\n    for (int x : xs) total += x;\n    return total;\n}\n\nstd::vector<int> incAll(const std::vector<int>& xs) {\n    std::vector<int> result(xs.size());\n    for (size_t i = 0; i < xs.size(); i++) result[i] = xs[i] + 1;\n    return result;\n}\n\nstd::string idStr(const std::string& s) {\n    return s;\n}\n\nstd::vector<int> idList(const std::vector<int>& xs) {\n    return xs;\n}\n\nint counter() {\n    static int n = 0;\n    return ++n;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/let-crosslang/f.py",
    "content": "def makeStr(n):\n    return \"y\" * n\n\ndef strLen(s):\n    return len(s)\n\ndef doubleIt(x):\n    return x * 2\n\ndef addTwo(a, b):\n    return a + b\n\ndef makeRange(n):\n    return list(range(1, n + 1))\n\ndef sumList(xs):\n    return sum(xs)\n\ndef incAll(xs):\n    return [x + 1 for x in xs]\n\ndef idStr(s):\n    return s\n\ndef idList(xs):\n    return list(xs)\n\n_counter = 0\ndef counter():\n    global _counter\n    _counter += 1\n    return _counter\n"
  },
  {
    "path": "test-suite/golden-tests/let-crosslang/main.loc",
    "content": "-- Regression tests for let bindings with cross-language foreign calls.\n-- Covers type changes across let boundaries, diamond patterns, fan-out,\n-- deep chains, do-blocks with lets, and mixed types.\n\nmodule main\n  ( letMinimal\n  , letMinimalReverse\n  , letTripleChain\n  , letDiamond\n  , letTripleUse\n  , letDeepChain\n  , letInDoBlock\n  , letListFanOut\n  , letListDeepChain\n  , letStrIdChain\n  , letMixedTypes\n  )\n\nimport root-cpp\nimport root-py\n\nsource Cpp from \"f.hpp\"\n  ( \"makeStr\" as cppMakeStr\n  , \"strLen\" as cppStrLen\n  , \"doubleIt\" as cppDouble\n  , \"addTwo\" as cppAdd\n  , \"makeRange\" as cppMakeRange\n  , \"sumList\" as cppSumList\n  , \"incAll\" as cppIncAll\n  , \"idStr\" as cppIdStr\n  , \"idList\" as cppIdList\n  , \"counter\" as cppCounter\n  )\n\nsource Py from \"f.py\"\n  ( \"makeStr\" as pyMakeStr\n  , \"strLen\" as pyStrLen\n  , \"doubleIt\" as pyDouble\n  , \"addTwo\" as pyAdd\n  , \"makeRange\" as pyMakeRange\n  , \"sumList\" as pySumList\n  , \"incAll\" as pyIncAll\n  , \"idStr\" as pyIdStr\n  , \"idList\" as pyIdList\n  , \"counter\" as pyCounter\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\ncppMakeStr :: Int -> Str\ncppStrLen :: Str -> Int\ncppDouble :: Int -> Int\ncppAdd :: Int -> Int -> Int\ncppMakeRange :: Int -> [Int]\ncppSumList :: [Int] -> Int\ncppIncAll :: [Int] -> [Int]\ncppIdStr :: Str -> Str\ncppIdList :: [Int] -> [Int]\ncppCounter :: <IO> Int\n\npyMakeStr :: Int -> Str\npyStrLen :: Str -> Int\npyDouble :: Int -> Int\npyAdd :: Int -> Int -> Int\npyMakeRange :: Int -> [Int]\npySumList :: [Int] -> Int\npyIncAll :: [Int] -> [Int]\npyIdStr :: Str -> Str\npyIdList :: [Int] -> [Int]\npyCounter :: <IO> Int\n\n------------------------------------------------------------\n-- PATTERN 1: Minimal reproducer - 2 consecutive foreign calls\n-- from C++ pool to Python pool with type change (Str -> Int -> Str)\n--\n-- cppMakeStr(10) = \"xxxxxxxxxx\" (local)\n-- pyStrLen(\"xxxxxxxxxx\") = 10 (foreign to py, returns Int)\n-- pyMakeStr(10) = \"yyyyyyyyyy\" (foreign to py, returns Str)\n-- cppStrLen(\"yyyyyyyyyy\") = 10 (local)\n-- Expected: 10\n------------------------------------------------------------\nletMinimal :: Int\nletMinimal =\n    let s1 = cppMakeStr 10\n        n1 = pyStrLen s1\n        s2 = pyMakeStr n1\n    in cppStrLen s2\n\n------------------------------------------------------------\n-- PATTERN 2: Reverse direction - C++ is the foreign pool\n-- pyMakeStr(10) = \"yyyyyyyyyy\" (local in py)\n-- cppStrLen(\"yyyyyyyyyy\") = 10 (foreign to cpp, returns Int)\n-- cppMakeStr(10) = \"xxxxxxxxxx\" (foreign to cpp, returns Str)\n-- pyStrLen(\"xxxxxxxxxx\") = 10 (local in py)\n-- Expected: 10\n------------------------------------------------------------\nletMinimalReverse :: Int\nletMinimalReverse =\n    let s1 = pyMakeStr 10\n        n1 = cppStrLen s1\n        s2 = cppMakeStr n1\n    in pyStrLen s2\n\n------------------------------------------------------------\n-- PATTERN 3: Three consecutive foreign calls with alternating types\n-- cppMakeStr(5) = \"xxxxx\" (local)\n-- pyStrLen(\"xxxxx\") = 5 (foreign, Int)\n-- pyMakeStr(5) = \"yyyyy\" (foreign, Str)\n-- pyStrLen(\"yyyyy\") = 5 (foreign, Int)\n-- Expected: 5\n------------------------------------------------------------\nletTripleChain :: Int\nletTripleChain =\n    let s1 = cppMakeStr 5\n        n1 = pyStrLen s1\n        s2 = pyMakeStr n1\n        n2 = pyStrLen s2\n    in n2\n\n------------------------------------------------------------\n-- PATTERN 4: Diamond - single let-bound value used in two foreign calls\n-- cppMakeStr(50) = \"xxx...x\" (50 chars, local)\n-- pyStrLen(s) = 50 (foreign)\n-- pyIdStr(s) -> cppStrLen = 50 (foreign + local)\n-- Expected: 100\n------------------------------------------------------------\nletDiamond :: Int\nletDiamond =\n    let s = cppMakeStr 50\n        a = pyStrLen s\n        b = cppStrLen (pyIdStr s)\n    in a + b\n\n------------------------------------------------------------\n-- PATTERN 5: Let-bound foreign result used 3 times\n-- cppMakeStr(100) = \"xxx...x\" (100 chars, local)\n-- pyStrLen used 3 times, each a foreign call\n-- Expected: 300\n------------------------------------------------------------\nletTripleUse :: Int\nletTripleUse =\n    let s = cppMakeStr 100\n    in pyStrLen s + pyStrLen s + pyStrLen s\n\n------------------------------------------------------------\n-- PATTERN 6: Deep chain of 10 alternating let-bound foreign calls\n-- cppMakeRange(10) = [1..10] (local)\n-- pyIncAll 10 times: each adds 1 to all elements\n-- [1..10] + 10 = [11..20], sum = sum(11..20) = 155\n-- Expected: 155\n------------------------------------------------------------\nletDeepChain :: Int\nletDeepChain =\n    let xs = cppMakeRange 10\n        xs2 = pyIncAll xs\n        xs3 = cppIncAll xs2\n        xs4 = pyIncAll xs3\n        xs5 = cppIncAll xs4\n        xs6 = pyIncAll xs5\n        xs7 = cppIncAll xs6\n        xs8 = pyIncAll xs7\n        xs9 = cppIncAll xs8\n        xs10 = pyIncAll xs9\n        xs11 = cppIncAll xs10\n    in pySumList xs11\n\n------------------------------------------------------------\n-- PATTERN 7: Do-block with let bindings and foreign calls\n-- cppCounter returns 1, pyDouble(1) = 2\n-- cppCounter returns 2, pyAdd(2,2) = 4\n-- Expected: 4\n------------------------------------------------------------\nletInDoBlock :: <IO> Int\nletInDoBlock = do\n    a <- cppCounter\n    let b = pyDouble a\n    c <- cppCounter\n    pyAdd b c\n\n------------------------------------------------------------\n-- PATTERN 8: Let-bound list forwarded to two different foreign calls\n-- cppMakeRange(5) = [1,2,3,4,5] (local)\n-- pyIncAll([1..5]) = [2..6] (foreign)\n-- pySumList([2..6]) = 20 (foreign)\n-- pyIncAll([1..5]) = [2..6] (foreign again on same data)\n-- pySumList([2..6]) = 20 (foreign)\n-- Expected: 40\n------------------------------------------------------------\nletListFanOut :: Int\nletListFanOut =\n    let xs = cppMakeRange 5\n        a = pySumList (cppIncAll xs)\n        b = pySumList (cppIncAll xs)\n    in a + b\n\n------------------------------------------------------------\n-- PATTERN 9: Deep chain with list types changing in let bindings\n-- pyMakeRange(10) = [1..10] (local in py)\n-- 6 alternating incAll calls across languages\n-- [1..10] + 6 = [7..16], sum = sum(7..16) = 115\n-- Expected: 115\n------------------------------------------------------------\nletListDeepChain :: Int\nletListDeepChain =\n    let xs = pyMakeRange 10\n        xs2 = cppIncAll xs\n        xs3 = pyIncAll xs2\n        xs4 = cppIncAll xs3\n        xs5 = pyIncAll xs4\n        xs6 = cppIncAll xs5\n        xs7 = pyIncAll xs6\n    in cppSumList xs7\n\n------------------------------------------------------------\n-- PATTERN 10: Consecutive string identity calls through let bindings\n-- Each call should preserve the string but crosses a language boundary\n-- cppMakeStr(7) = \"xxxxxxx\" (local)\n-- pyIdStr -> cppIdStr -> pyIdStr -> cppIdStr (4 foreign calls)\n-- cppStrLen = 7\n-- Expected: 7\n------------------------------------------------------------\nletStrIdChain :: Int\nletStrIdChain =\n    let s = cppMakeStr 7\n        s2 = pyIdStr s\n        s3 = cppIdStr s2\n        s4 = pyIdStr s3\n        s5 = cppIdStr s4\n    in cppStrLen s5\n\n------------------------------------------------------------\n-- PATTERN 11: Mixed types in lets - Int and Str interleaved\n-- 5 = 5 (literal)\n-- pyDouble(5) = 10 (foreign, returns Int)\n-- pyMakeStr(10) = \"yyyyyyyyyy\" (foreign, returns Str) <-- type change!\n-- pyStrLen(\"yyyyyyyyyy\") = 10 (foreign, returns Int)\n-- pyDouble(10) = 20 (foreign, returns Int)\n-- Expected: 20\n------------------------------------------------------------\nletMixedTypes :: Int\nletMixedTypes =\n    let n1 = pyDouble 5\n        s1 = pyMakeStr n1\n        n2 = pyStrLen s1\n        n3 = pyDouble n2\n    in n3\n"
  },
  {
    "path": "test-suite/golden-tests/let-expressions/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testCppOnce ---\" > obs.txt\n\t./nexus testCppOnce 5 >> obs.txt 2>&1\n\techo \"--- testPyOnce ---\" >> obs.txt\n\t./nexus testPyOnce 5 >> obs.txt 2>&1\n\techo \"--- testROnce ---\" >> obs.txt\n\t./nexus testROnce 5 >> obs.txt 2>&1\n\techo \"--- testCppSeq ---\" >> obs.txt\n\t./nexus testCppSeq 5 >> obs.txt 2>&1\n\techo \"--- testPySeq ---\" >> obs.txt\n\t./nexus testPySeq 5 >> obs.txt 2>&1\n\techo \"--- testRSeq ---\" >> obs.txt\n\t./nexus testRSeq 5 >> obs.txt 2>&1\n\techo \"--- testShadow ---\" >> obs.txt\n\t./nexus testShadow 5 >> obs.txt 2>&1\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/let-expressions/exp.txt",
    "content": "--- testCppOnce ---\nEVAL 5\n20\n--- testPyOnce ---\nEVAL 5\n20\n--- testROnce ---\nEVAL 5\n20\n--- testCppSeq ---\nEVAL 5\nEVAL 10\n30\n--- testPySeq ---\nEVAL 5\nEVAL 10\n30\n--- testRSeq ---\nEVAL 5\nEVAL 10\n30\n--- testShadow ---\nEVAL 5\n20\n"
  },
  {
    "path": "test-suite/golden-tests/let-expressions/foo.R",
    "content": "sideEffectR <- function(x) {\n    cat(paste0(\"EVAL \", x, \"\\n\"), file = stderr())\n    x * 2L\n}\n"
  },
  {
    "path": "test-suite/golden-tests/let-expressions/foo.hpp",
    "content": "#include <iostream>\n\nint sideEffectCpp(int x) {\n    std::cerr << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/let-expressions/foo.py",
    "content": "import sys\n\ndef sideEffectPy(x):\n    print(\"EVAL \" + str(x), file=sys.stderr)\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/let-expressions/main.loc",
    "content": "module main\n  ( testCppOnce\n  , testPyOnce\n  , testROnce\n  , testCppSeq\n  , testPySeq\n  , testRSeq\n  , testShadow\n  )\n\nimport root-cpp\nimport root-py\nimport root-r\n\n-- Each sideEffect function prints \"EVAL <input>\" to stderr and returns input * 2.\n-- This lets us verify: (1) how many times it is called, and (2) in what order.\n\nsource Cpp from \"foo.hpp\" (\"sideEffectCpp\")\nsource Py from \"foo.py\" (\"sideEffectPy\")\nsource R from \"foo.R\" (\"sideEffectR\")\n\nsideEffectCpp :: Int -> Int\nsideEffectPy :: Int -> Int\nsideEffectR :: Int -> Int\n\n-- Single let, value used twice: side effect should happen exactly once.\n-- With input 5: sideEffect 5 prints \"EVAL 5\", a = 10, result = 10 + 10 = 20.\ntestCppOnce :: Int -> Int\ntestCppOnce x = let a = sideEffectCpp x in a + a\n\ntestPyOnce :: Int -> Int\ntestPyOnce x = let a = sideEffectPy x in a + a\n\ntestROnce :: Int -> Int\ntestROnce x = let a = sideEffectR x in a + a\n\n-- Sequential lets: first binding evaluated before second, each exactly once.\n-- With input 5: sideEffect 5 prints \"EVAL 5\", a = 10,\n--               sideEffect 10 prints \"EVAL 10\", b = 20,\n--               result = 10 + 20 = 30.\ntestCppSeq :: Int -> Int\ntestCppSeq x =\n  let a = sideEffectCpp x\n      b = sideEffectCpp a\n  in a + b\n\ntestPySeq :: Int -> Int\ntestPySeq x =\n  let a = sideEffectPy x\n      b = sideEffectPy a\n  in a + b\n\ntestRSeq :: Int -> Int\ntestRSeq x =\n  let a = sideEffectR x\n      b = sideEffectR a\n  in a + b\n\n-- Shadowing: let-bound variable shadows the lambda parameter.\n-- The `x` in `sideEffectCpp x` refers to the lambda parameter (5).\n-- The `x` in `x + x` refers to the let-bound variable (10).\n-- Result: 10 + 10 = 20.\ntestShadow :: Int -> Int\ntestShadow x = let x = sideEffectCpp x in x + x\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 4 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/bar/baz/main.loc",
    "content": "module (*)\n\nimport .bif.biz (mul)\n\nsource Py from \"main.py\" (\"add\")\n\ntype Py => Real = \"float\"\n\nadd :: Real -> Real -> Real\n\ncompute :: Real -> Real -> Real\ncompute x y = add x (mul y y)\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/bar/baz/main.py",
    "content": "def add(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/bif/biz/main.loc",
    "content": "module (*)\n\nsource Py from \"main.py\" (\"mul\")\n\ntype Py => Real = \"float\"\n\nmul :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/bif/biz/main.py",
    "content": "def mul(x, y):\n    return x * y\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/exp.txt",
    "content": "18\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-cousin-py/main.loc",
    "content": "module main (foo)\n\ntype Py => Real = \"float\"\n\nimport .bar.baz (compute)\n\nfoo :: Real -> Real -> Real\nfoo x y = compute x y\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 3 2>> obs.err  >> obs.txt\n\t./nexus bar 3 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/exp.txt",
    "content": "-9\n9\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/lib/math/main.loc",
    "content": "module (*)\n\nsource Py from \"main.py\" (\"square\")\n\ntype Py => Real = \"float\"\n\nsquare :: Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/lib/math/main.py",
    "content": "def square(x):\n    return x * x\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/main.loc",
    "content": "module (foo, bar)\n\ntype Py => Real = \"float\"\n\nimport .util (negate)\nimport .lib.math (square)\n\nfoo :: Real -> Real\nfoo x = negate (square x)\n\nbar :: Real -> Real\nbar x = square (negate x)\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/package.yaml",
    "content": "name: main\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/util.loc",
    "content": "module (*)\n\nsource Py from \"util.py\" (\"negate\")\n\ntype Py => Real = \"float\"\n\nnegate :: Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-nested-py/util.py",
    "content": "def negate(x):\n    return -x\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-root-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 21 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-root-py/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-root-py/main.loc",
    "content": "module main (foo)\n\ntype Py => Real = \"float\"\n\nimport .root (double)\n\nfoo :: Real -> Real\nfoo x = double x\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-root-py/root/main.loc",
    "content": "module (*)\n\nsource Py from \"main.py\" (\"double\")\n\ntype Py => Real = \"float\"\n\ndouble :: Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/local-import-root-py/root/main.py",
    "content": "def double(x):\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-0/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t0.loc 2> build.err\n\t./nexus checkInt 2>> obs.err  >> obs.txt\n\t./nexus checkInt8 2>> obs.err  >> obs.txt\n\t./nexus checkInt16 2>> obs.err  >> obs.txt\n\t./nexus checkInt32 2>> obs.err  >> obs.txt\n\t./nexus checkInt64 2>> obs.err  >> obs.txt\n\t./nexus checkUInt8 2>> obs.err  >> obs.txt\n\t./nexus checkUInt16 2>> obs.err  >> obs.txt\n\t./nexus checkUInt32 2>> obs.err  >> obs.txt\n\t./nexus checkUInt64 2>> obs.err  >> obs.txt\n\t./nexus checkReal 2>> obs.err  >> obs.txt\n\t./nexus checkBool 2>> obs.err  >> obs.txt\n\t./nexus checkStr 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-0/exp.txt",
    "content": "1\n2\n3\n4\n5\n6\n7\n8\n9\n420.69\ntrue\n\"abcd\"\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-0/t0.loc",
    "content": "module main (*)\n\nimport root (Int, Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Real, Bool, Str)\n\ncheckInt    = 1 :: Int\ncheckInt8   = 2 :: Int8\ncheckInt16  = 3 :: Int16\ncheckInt32  = 4 :: Int32\ncheckInt64  = 5 :: Int64\ncheckUInt8  = 6 :: UInt8\ncheckUInt16 = 7 :: UInt16\ncheckUInt32 = 8 :: UInt32\ncheckUInt64 = 9 :: UInt64\ncheckReal   = 420.69 :: Real\ncheckBool   = True\ncheckStr    = \"abcd\"\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-0x/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 12 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *pdf\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-0x/exp.txt",
    "content": "12\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-0x/main.loc",
    "content": "module main (foo)\n\nfoo :: Int -> Int\nfoo x = x\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t1.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-1/exp.txt",
    "content": "[]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-1/t1.loc",
    "content": "module main (foo)\n\nfoo :: [Int]\nfoo = []\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t2.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-2/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-2/t2.loc",
    "content": "module main (foo)\n\nfoo = [True]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-2x/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t2.loc 2> build.err\n\t./nexus foo 99 > obs.txt 2> obs.err\n\t./nexus bar true '\"Alice\"' 2>> obs.err  >> obs.txt\n\t./nexus baz 4.3 -5 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-2x/exp.txt",
    "content": "[99,99]\n[true,\"Alice\"]\n[-5,4.3,4.3]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-2x/t2.loc",
    "content": "module main (foo, bar, baz)\n\nfoo :: Int -> [Int] \nfoo x = [x,x]\n\nbar :: Bool -> Str -> (Bool, Str)\nbar x y = (x,y)\n\nbaz :: Real -> Real -> [Real]\nbaz x y = [y,x,x]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t3.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-3/exp.txt",
    "content": "[[true],42,\"hello\"]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-3/t3.loc",
    "content": "module main (foo)\n\nfoo = ([True], 42, \"hello\")\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-3x/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t3.loc 2> build.err\n\t./nexus foo '[true]' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-3x/exp.txt",
    "content": "[[true],42,\"hello\"]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-3x/t3.loc",
    "content": "module main (foo)\n\nfoo :: [Bool] -> ([Bool], Int, Str)\nfoo x = (x, 42, \"hello\")\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t4.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *out\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_c/exp.txt",
    "content": "[[true,42],4]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_c/t4.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\nfoo = ((True, 42.0), 1.0 * 4.0)\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t4.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_py/exp.txt",
    "content": "[[42,true],4]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_py/t4.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfoo = ((42.0, True), 1.0 * 4.0)\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_r/exp.txt",
    "content": "[[true,4.2],4]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-4_r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\nfoo = ((True, 4.2), 1.0 * 4.0)\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t5.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *out\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_c/exp.txt",
    "content": "10\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_c/t5.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\nfoo = fold (+) 0.0 [1.0, 2.0, 3.0, 1.0 * 4.0]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t5.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_py/exp.txt",
    "content": "10\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_py/t5.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfoo = fold (+) 0.0 [1.0, 2.0, 3.0, 1.0 * 4.0]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t5.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_r/exp.txt",
    "content": "10\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-5_r/t5.loc",
    "content": "module main (foo)\n\nimport root-r\n\nfoo = fold (+) 0.0 [1.0, 2.0, 3.0, 1.0 * 4.0]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t6.loc 2> build.err\n\t./nexus foo 16 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *out\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_c/exp.txt",
    "content": "[true,16,0.25]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_c/t6.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\nfoo x = (True, x, 4.0 / x)\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t6.loc 2> build.err\n\t./nexus foo 16 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus poolspy\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_py/exp.txt",
    "content": "[true,16,0.25]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_py/t6.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfoo x = (True, x, 4.0 / x)\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t6.loc 2> build.err\n\t./nexus foo 16 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus poolsR\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_r/exp.txt",
    "content": "[true,16,0.25]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-6_r/t6.loc",
    "content": "module main (foo)\n\nimport root-r\n\nfoo x = (True, x, 4.0 / x)\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t7.loc 2> build.err\n\t./nexus foo 16 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_c/exp.txt",
    "content": "26\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_c/t7.loc",
    "content": "module main (foo)\n\nimport root-cpp\n\nfoo x = fold (+) 0.0 [1.0, 2.0, 3.0, x, x / 4.0]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t7.loc 2> build.err\n\t./nexus foo 16 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_py/exp.txt",
    "content": "26\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_py/t7.loc",
    "content": "module main (foo)\n\nimport root-py\n\nfoo x = fold (+) 0.0 [1.0, 2.0, 3.0, x, x / 4.0]\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus t7.loc 2> build.err\n\t./nexus foo 16 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_r/exp.txt",
    "content": "26\n"
  },
  {
    "path": "test-suite/golden-tests/manifold-form-7_r/t7.loc",
    "content": "module main (foo)\n\nimport root-r\n\nfoo x = fold (+) 0.0 [1.0, 2.0, 3.0, x, x / 4.0]\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testOptRealCppToPy > obs.txt 2> obs.err\n\t./nexus testOptRealPyToCpp >> obs.txt 2>> obs.err\n\t./nexus testRecordCppToPy >> obs.txt 2>> obs.err\n\t./nexus testRecordPyToCpp >> obs.txt 2>> obs.err\n\t./nexus testOptRealChain >> obs.txt 2>> obs.err\n\t./nexus testRecordChain >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/cppfuncs.loc",
    "content": "module (*)\n\nimport .types\n\ncMakeOptReal :: Real -> ?Real\ncFromNullReal :: Real -> ?Real -> Real\ncDoubleOptReal :: ?Real -> ?Real\ncMakePair :: Bool -> Real -> PairBR\ncGetPairValue :: PairBR -> Real\n\nsource Cpp from \"foo.hpp\"\n  ( \"cMakeOptReal\"\n  , \"cFromNullReal\"\n  , \"cDoubleOptReal\"\n  , \"cMakePair\"\n  , \"cGetPairValue\"\n  )\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/exp.txt",
    "content": "3.14\n3.14\n2.718\n2.718\n3\n3\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/foo.hpp",
    "content": "#ifndef MORLOC_MEMORY_INTEROP_MISALIGN_HPP\n#define MORLOC_MEMORY_INTEROP_MISALIGN_HPP\n\n#include <optional>\n\nstruct pair_br_t {\n    bool flag;\n    double value;\n};\n\nstd::optional<double> cMakeOptReal(double x) {\n    return std::optional<double>(x);\n}\n\ndouble cFromNullReal(double def, const std::optional<double>& x) {\n    if (!x.has_value()) return def;\n    return *x;\n}\n\nstd::optional<double> cDoubleOptReal(const std::optional<double>& x) {\n    if (!x.has_value()) return std::nullopt;\n    return std::optional<double>(*x * 2.0);\n}\n\npair_br_t cMakePair(bool flag, double value) {\n    return pair_br_t{flag, value};\n}\n\ndouble cGetPairValue(const pair_br_t& p) {\n    return p.value;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/foo.py",
    "content": "def pMakeOptReal(x):\n    return x\n\ndef pFromNullReal(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef pMakePair(flag, value):\n    return {\"flag\": flag, \"value\": value}\n\ndef pGetPairValue(p):\n    return p[\"value\"]\n\ndef pDoublePairValue(p):\n    return {\"flag\": p[\"flag\"], \"value\": p[\"value\"] * 2.0}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/main.loc",
    "content": "-- Tests misaligned data crossing C++ <-> Python boundary through shared memory.\n-- ?Real and {flag :: Bool, value :: Real} force misaligned reads/writes\n-- in the voidstar serialization format when crossing language boundaries.\n\nmodule main\n  ( testOptRealCppToPy\n  , testOptRealPyToCpp\n  , testRecordCppToPy\n  , testRecordPyToCpp\n  , testOptRealChain\n  , testRecordChain\n  )\n\nimport .types\nimport .cppfuncs\nimport .pyfuncs\n\ntestOptRealCppToPy :: Real\ntestOptRealCppToPy = pFromNullReal 0.0 (cMakeOptReal 3.14)\n\ntestOptRealPyToCpp :: Real\ntestOptRealPyToCpp = cFromNullReal 0.0 (pMakeOptReal 3.14)\n\ntestRecordCppToPy :: Real\ntestRecordCppToPy = pGetPairValue (cMakePair True 2.718)\n\ntestRecordPyToCpp :: Real\ntestRecordPyToCpp = cGetPairValue (pMakePair True 2.718)\n\ntestOptRealChain :: Real\ntestOptRealChain = pFromNullReal 0.0 (cDoubleOptReal (pMakeOptReal 1.5))\n\ntestRecordChain :: Real\ntestRecordChain = cGetPairValue (pDoublePairValue (cMakePair False 1.5))\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/pyfuncs.loc",
    "content": "module (*)\n\nimport .types\n\npMakeOptReal :: Real -> ?Real\npFromNullReal :: Real -> ?Real -> Real\npMakePair :: Bool -> Real -> PairBR\npGetPairValue :: PairBR -> Real\npDoublePairValue :: PairBR -> PairBR\n\nsource Py from \"foo.py\"\n  ( \"pMakeOptReal\"\n  , \"pFromNullReal\"\n  , \"pMakePair\"\n  , \"pGetPairValue\"\n  , \"pDoublePairValue\"\n  )\n"
  },
  {
    "path": "test-suite/golden-tests/memory-interop-misalign-cp/types.loc",
    "content": "module (*)\n\ntype Py => Bool = \"bool\"\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Cpp => Bool = \"bool\"\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\n\nrecord PairBR where\n  flag :: Bool\n  value :: Real\nrecord Cpp => PairBR = \"pair_br_t\"\nrecord Py => PairBR = \"dict\"\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testOptRecord > obs.txt 2> obs.err\n\t./nexus testOptRecordNull >> obs.txt 2>> obs.err\n\t./nexus testListRecords >> obs.txt 2>> obs.err\n\t./nexus testRecordWithOpt >> obs.txt 2>> obs.err\n\t./nexus testRecordWithOptNull >> obs.txt 2>> obs.err\n\t./nexus testNestedRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-cpp/exp.txt",
    "content": "{\"flag\":true,\"value\":3.14}\n{\"flag\":false,\"value\":0}\n6.6\n9.99\nnull\n{\"flag\":true,\"value\":2.5}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-cpp/foo.hpp",
    "content": "#ifndef MORLOC_MEMORY_NESTED_MISALIGN_HPP\n#define MORLOC_MEMORY_NESTED_MISALIGN_HPP\n\n#include <optional>\n#include <vector>\n\nstruct pair_br_t {\n    bool flag;\n    double value;\n};\n\nstruct record_with_opt_t {\n    bool flag;\n    std::optional<double> opt;\n};\n\ntemplate <typename T>\nstd::optional<T> toNull(const T& x) {\n    return std::optional<T>(x);\n}\n\nstd::optional<pair_br_t> makeOptRecord(bool flag, double value) {\n    return std::optional<pair_br_t>(pair_br_t{flag, value});\n}\n\npair_br_t fromNullRecord(const pair_br_t& def, const std::optional<pair_br_t>& x) {\n    if (!x.has_value()) return def;\n    return *x;\n}\n\ndouble sumRecordValues(const std::vector<pair_br_t>& records) {\n    double sum = 0.0;\n    for (const auto& r : records) {\n        sum += r.value;\n    }\n    return sum;\n}\n\nrecord_with_opt_t makeRecordWithOpt(bool flag, const std::optional<double>& opt) {\n    return record_with_opt_t{flag, opt};\n}\n\nstd::optional<double> getRecordOpt(const record_with_opt_t& r) {\n    return r.opt;\n}\n\nstd::optional<pair_br_t> nestedRoundTrip(const std::optional<pair_br_t>& x) {\n    if (!x.has_value()) return std::nullopt;\n    return std::optional<pair_br_t>(pair_br_t{x->flag, x->value});\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-cpp/main.loc",
    "content": "-- Tests compound misalignment from nested containers in C++.\n-- ?PairBR: optional wrapping a record with misaligned fields\n-- [PairBR]: array of records whose element width is misaligned\n-- RecordWithOpt: record containing a ?Real field (misaligned optional inside record)\n\nmodule main\n  ( testOptRecord\n  , testOptRecordNull\n  , testListRecords\n  , testRecordWithOpt\n  , testRecordWithOptNull\n  , testNestedRoundTrip\n  )\n\ntype Cpp => Bool = \"bool\"\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\nrecord PairBR where\n  flag :: Bool\n  value :: Real\nrecord Cpp => PairBR = \"pair_br_t\"\n\nrecord RecordWithOpt where\n  flag :: Bool\n  opt :: ?Real\nrecord Cpp => RecordWithOpt = \"record_with_opt_t\"\n\nmakeOptRecord :: Bool -> Real -> ?PairBR\nfromNullRecord :: PairBR -> ?PairBR -> PairBR\nsumRecordValues :: [PairBR] -> Real\nmakeRecordWithOpt :: Bool -> ?Real -> RecordWithOpt\ngetRecordOpt :: RecordWithOpt -> ?Real\nnestedRoundTrip :: ?PairBR -> ?PairBR\n\nsource Cpp from \"foo.hpp\"\n  ( \"makeOptRecord\"\n  , \"fromNullRecord\"\n  , \"sumRecordValues\"\n  , \"makeRecordWithOpt\"\n  , \"getRecordOpt\"\n  , \"nestedRoundTrip\"\n  )\n\ntoNull :: a -> ?a\nsource Cpp from \"foo.hpp\" (\"toNull\")\n\ntestOptRecord :: PairBR\ntestOptRecord = fromNullRecord {flag = False, value = 0.0} (makeOptRecord True 3.14)\n\ntestOptRecordNull :: PairBR\ntestOptRecordNull = fromNullRecord {flag = False, value = 0.0} Null\n\ntestListRecords :: Real\ntestListRecords = sumRecordValues [{flag = True, value = 1.1}, {flag = False, value = 2.2}, {flag = True, value = 3.3}]\n\ntestRecordWithOpt :: ?Real\ntestRecordWithOpt = getRecordOpt (makeRecordWithOpt True (toNull 9.99))\n\ntestRecordWithOptNull :: ?Real\ntestRecordWithOptNull = getRecordOpt (makeRecordWithOpt False Null)\n\ntestNestedRoundTrip :: PairBR\ntestNestedRoundTrip = fromNullRecord {flag = False, value = 0.0} (nestedRoundTrip (makeOptRecord True 2.5))\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testOptRecord > obs.txt 2> obs.err\n\t./nexus testOptRecordNull >> obs.txt 2>> obs.err\n\t./nexus testListRecords >> obs.txt 2>> obs.err\n\t./nexus testRecordWithOpt >> obs.txt 2>> obs.err\n\t./nexus testRecordWithOptNull >> obs.txt 2>> obs.err\n\t./nexus testNestedRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-py/exp.txt",
    "content": "{\"flag\":true,\"value\":3.14}\n{\"flag\":false,\"value\":0}\n6.6\n9.99\nnull\n{\"flag\":true,\"value\":2.5}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-py/foo.py",
    "content": "def toNull(x):\n    return x\n\ndef makeOptRecord(flag, value):\n    return {\"flag\": flag, \"value\": value}\n\ndef fromNullRecord(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef sumRecordValues(records):\n    return sum(r[\"value\"] for r in records)\n\ndef makeRecordWithOpt(flag, opt):\n    return {\"flag\": flag, \"opt\": opt}\n\ndef getRecordOpt(r):\n    return r[\"opt\"]\n\ndef nestedRoundTrip(x):\n    if x is None:\n        return None\n    return {\"flag\": x[\"flag\"], \"value\": x[\"value\"]}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-nested-misalign-py/main.loc",
    "content": "-- Tests compound misalignment from nested containers in Python.\n-- ?PairBR: optional wrapping a record with misaligned fields\n-- [PairBR]: array of records whose element width is misaligned\n-- RecordWithOpt: record containing a ?Real field (misaligned optional inside record)\n\nmodule main\n  ( testOptRecord\n  , testOptRecordNull\n  , testListRecords\n  , testRecordWithOpt\n  , testRecordWithOptNull\n  , testNestedRoundTrip\n  )\n\ntype Py => Bool = \"bool\"\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Py => List a = \"list\" a\n\nrecord PairBR where\n  flag :: Bool\n  value :: Real\nrecord Py => PairBR = \"dict\"\n\nrecord RecordWithOpt where\n  flag :: Bool\n  opt :: ?Real\nrecord Py => RecordWithOpt = \"dict\"\n\nmakeOptRecord :: Bool -> Real -> ?PairBR\nfromNullRecord :: PairBR -> ?PairBR -> PairBR\nsumRecordValues :: [PairBR] -> Real\nmakeRecordWithOpt :: Bool -> ?Real -> RecordWithOpt\ngetRecordOpt :: RecordWithOpt -> ?Real\nnestedRoundTrip :: ?PairBR -> ?PairBR\n\nsource Py from \"foo.py\"\n  ( \"makeOptRecord\"\n  , \"fromNullRecord\"\n  , \"sumRecordValues\"\n  , \"makeRecordWithOpt\"\n  , \"getRecordOpt\"\n  , \"nestedRoundTrip\"\n  )\n\ntoNull :: a -> ?a\nsource Py from \"foo.py\" (\"toNull\")\n\ntestOptRecord :: PairBR\ntestOptRecord = fromNullRecord {flag = False, value = 0.0} (makeOptRecord True 3.14)\n\ntestOptRecordNull :: PairBR\ntestOptRecordNull = fromNullRecord {flag = False, value = 0.0} Null\n\ntestListRecords :: Real\ntestListRecords = sumRecordValues [{flag = True, value = 1.1}, {flag = False, value = 2.2}, {flag = True, value = 3.3}]\n\ntestRecordWithOpt :: ?Real\ntestRecordWithOpt = getRecordOpt (makeRecordWithOpt True (toNull 9.99))\n\ntestRecordWithOptNull :: ?Real\ntestRecordWithOptNull = getRecordOpt (makeRecordWithOpt False Null)\n\ntestNestedRoundTrip :: PairBR\ntestNestedRoundTrip = fromNullRecord {flag = False, value = 0.0} (nestedRoundTrip (makeOptRecord True 2.5))\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testOptRealValue > obs.txt 2> obs.err\n\t./nexus testOptRealNull >> obs.txt 2>> obs.err\n\t./nexus testOptRealRoundTrip >> obs.txt 2>> obs.err\n\t./nexus testOptIntValue >> obs.txt 2>> obs.err\n\t./nexus testOptIntNull >> obs.txt 2>> obs.err\n\t./nexus testOptIntRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-cpp/exp.txt",
    "content": "3.14\nfalse\n3\n42\nfalse\n30\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-cpp/foo.hpp",
    "content": "#ifndef MORLOC_MEMORY_OPTIONAL_DOUBLE_HPP\n#define MORLOC_MEMORY_OPTIONAL_DOUBLE_HPP\n\n#include <optional>\n\ntemplate <typename T>\nstd::optional<T> toNull(const T& x) {\n    return std::optional<T>(x);\n}\n\ntemplate <typename T>\nbool isNull(const std::optional<T>& x) {\n    return !x.has_value();\n}\n\ntemplate <typename T>\nT fromNull(const T& def, const std::optional<T>& x) {\n    if (!x.has_value()) return def;\n    return *x;\n}\n\nstd::optional<double> doubleOptReal(const std::optional<double>& x) {\n    if (!x.has_value()) return std::nullopt;\n    return std::optional<double>(*x * 2.0);\n}\n\nstd::optional<int> addOptInt(const std::optional<int>& x, const std::optional<int>& y) {\n    if (!x.has_value() || !y.has_value()) return std::nullopt;\n    return std::optional<int>(*x + *y);\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-cpp/main.loc",
    "content": "-- Tests memory alignment of optional types.\n-- ?Real stores a 1-byte tag at offset 0, then an 8-byte double at offset 1.\n-- On strict-alignment platforms, this causes SIGBUS.\n\nmodule main\n  ( testOptRealValue\n  , testOptRealNull\n  , testOptRealRoundTrip\n  , testOptIntValue\n  , testOptIntNull\n  , testOptIntRoundTrip\n  )\n\ntype Cpp => Bool = \"bool\"\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\n\ntoNull :: a -> ?a\nisNull :: ?a -> Bool\nfromNull :: a -> ?a -> a\ndoubleOptReal :: ?Real -> ?Real\naddOptInt :: ?Int -> ?Int -> ?Int\n\nsource Cpp from \"foo.hpp\"\n  ( \"toNull\"\n  , \"isNull\"\n  , \"fromNull\"\n  , \"doubleOptReal\"\n  , \"addOptInt\"\n  )\n\ntestOptRealValue :: ?Real\ntestOptRealValue = toNull 3.14\n\ntestOptRealNull :: Bool\ntestOptRealNull = isNull (toNull 3.14)\n\ntestOptRealRoundTrip :: Real\ntestOptRealRoundTrip = fromNull 0.0 (doubleOptReal (toNull 1.5))\n\ntestOptIntValue :: ?Int\ntestOptIntValue = toNull 42\n\ntestOptIntNull :: Bool\ntestOptIntNull = isNull (toNull 42)\n\ntestOptIntRoundTrip :: Int\ntestOptIntRoundTrip = fromNull 0 (addOptInt (toNull 10) (toNull 20))\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testOptRealValue > obs.txt 2> obs.err\n\t./nexus testOptRealNull >> obs.txt 2>> obs.err\n\t./nexus testOptRealRoundTrip >> obs.txt 2>> obs.err\n\t./nexus testOptIntValue >> obs.txt 2>> obs.err\n\t./nexus testOptIntNull >> obs.txt 2>> obs.err\n\t./nexus testOptIntRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-py/exp.txt",
    "content": "3.14\nfalse\n3\n42\nfalse\n30\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-py/foo.py",
    "content": "def toNull(x):\n    return x\n\ndef isNull(x):\n    return x is None\n\ndef fromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef doubleOptReal(x):\n    if x is None:\n        return None\n    return x * 2.0\n\ndef addOptInt(x, y):\n    if x is None or y is None:\n        return None\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/memory-optional-double-py/main.loc",
    "content": "-- Tests memory alignment of optional types in Python.\n-- ?Real stores a 1-byte tag at offset 0, then an 8-byte double at offset 1.\n-- On strict-alignment platforms, this causes SIGBUS.\n\nmodule main\n  ( testOptRealValue\n  , testOptRealNull\n  , testOptRealRoundTrip\n  , testOptIntValue\n  , testOptIntNull\n  , testOptIntRoundTrip\n  )\n\ntype Py => Bool = \"bool\"\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\n\ntoNull :: a -> ?a\nisNull :: ?a -> Bool\nfromNull :: a -> ?a -> a\ndoubleOptReal :: ?Real -> ?Real\naddOptInt :: ?Int -> ?Int -> ?Int\n\nsource Py from \"foo.py\"\n  ( \"toNull\"\n  , \"isNull\"\n  , \"fromNull\"\n  , \"doubleOptReal\"\n  , \"addOptInt\"\n  )\n\ntestOptRealValue :: ?Real\ntestOptRealValue = toNull 3.14\n\ntestOptRealNull :: Bool\ntestOptRealNull = isNull (toNull 3.14)\n\ntestOptRealRoundTrip :: Real\ntestOptRealRoundTrip = fromNull 0.0 (doubleOptReal (toNull 1.5))\n\ntestOptIntValue :: ?Int\ntestOptIntValue = toNull 42\n\ntestOptIntNull :: Bool\ntestOptIntNull = isNull (toNull 42)\n\ntestOptIntRoundTrip :: Int\ntestOptIntRoundTrip = fromNull 0 (addOptInt (toNull 10) (toNull 20))\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMakePair > obs.txt 2> obs.err\n\t./nexus testGetPairFlag >> obs.txt 2>> obs.err\n\t./nexus testGetPairValue >> obs.txt 2>> obs.err\n\t./nexus testPairRoundTrip >> obs.txt 2>> obs.err\n\t./nexus testMakeTriple >> obs.txt 2>> obs.err\n\t./nexus testGetTripleCount >> obs.txt 2>> obs.err\n\t./nexus testGetTripleValue >> obs.txt 2>> obs.err\n\t./nexus testTripleRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-cpp/exp.txt",
    "content": "{\"flag\":true,\"value\":3.14}\ntrue\n3.14\n{\"flag\":false,\"value\":2.718}\n{\"flag\":true,\"count\":42,\"value\":3.14}\n42\n3.14\n{\"flag\":false,\"count\":99,\"value\":1.618}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-cpp/foo.hpp",
    "content": "#ifndef MORLOC_MEMORY_RECORD_PACK_HPP\n#define MORLOC_MEMORY_RECORD_PACK_HPP\n\nstruct pair_br_t {\n    bool flag;\n    double value;\n};\n\nstruct triple_bir_t {\n    bool flag;\n    int count;\n    double value;\n};\n\npair_br_t makePair(bool flag, double value) {\n    return pair_br_t{flag, value};\n}\n\nbool getPairFlag(const pair_br_t& p) {\n    return p.flag;\n}\n\ndouble getPairValue(const pair_br_t& p) {\n    return p.value;\n}\n\npair_br_t pairRoundTrip(const pair_br_t& p) {\n    return pair_br_t{p.flag, p.value};\n}\n\ntriple_bir_t makeTriple(bool flag, int count, double value) {\n    return triple_bir_t{flag, count, value};\n}\n\nint getTripleCount(const triple_bir_t& t) {\n    return t.count;\n}\n\ndouble getTripleValue(const triple_bir_t& t) {\n    return t.value;\n}\n\ntriple_bir_t tripleRoundTrip(const triple_bir_t& t) {\n    return triple_bir_t{t.flag, t.count, t.value};\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-cpp/main.loc",
    "content": "-- Tests memory alignment of record field packing.\n-- {flag :: Bool, value :: Real} puts Real at offset 1 (needs 8-byte alignment).\n-- {flag :: Bool, count :: Int, value :: Real} puts Int at offset 1, Real at offset 5.\n-- On strict-alignment platforms, misaligned field access causes SIGBUS.\n\nmodule main\n  ( testMakePair\n  , testGetPairFlag\n  , testGetPairValue\n  , testPairRoundTrip\n  , testMakeTriple\n  , testGetTripleCount\n  , testGetTripleValue\n  , testTripleRoundTrip\n  )\n\ntype Cpp => Bool = \"bool\"\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\n\nrecord PairBR where\n  flag :: Bool\n  value :: Real\nrecord Cpp => PairBR = \"pair_br_t\"\n\nrecord TripleBIR where\n  flag :: Bool\n  count :: Int\n  value :: Real\nrecord Cpp => TripleBIR = \"triple_bir_t\"\n\nmakePair :: Bool -> Real -> PairBR\ngetPairFlag :: PairBR -> Bool\ngetPairValue :: PairBR -> Real\npairRoundTrip :: PairBR -> PairBR\n\nmakeTriple :: Bool -> Int -> Real -> TripleBIR\ngetTripleCount :: TripleBIR -> Int\ngetTripleValue :: TripleBIR -> Real\ntripleRoundTrip :: TripleBIR -> TripleBIR\n\nsource Cpp from \"foo.hpp\"\n  ( \"makePair\"\n  , \"getPairFlag\"\n  , \"getPairValue\"\n  , \"pairRoundTrip\"\n  , \"makeTriple\"\n  , \"getTripleCount\"\n  , \"getTripleValue\"\n  , \"tripleRoundTrip\"\n  )\n\ntestMakePair :: PairBR\ntestMakePair = makePair True 3.14\n\ntestGetPairFlag :: Bool\ntestGetPairFlag = getPairFlag (makePair True 3.14)\n\ntestGetPairValue :: Real\ntestGetPairValue = getPairValue (makePair True 3.14)\n\ntestPairRoundTrip :: PairBR\ntestPairRoundTrip = pairRoundTrip (makePair False 2.718)\n\ntestMakeTriple :: TripleBIR\ntestMakeTriple = makeTriple True 42 3.14\n\ntestGetTripleCount :: Int\ntestGetTripleCount = getTripleCount (makeTriple True 42 3.14)\n\ntestGetTripleValue :: Real\ntestGetTripleValue = getTripleValue (makeTriple True 42 3.14)\n\ntestTripleRoundTrip :: TripleBIR\ntestTripleRoundTrip = tripleRoundTrip (makeTriple False 99 1.618)\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMakePair > obs.txt 2> obs.err\n\t./nexus testGetPairFlag >> obs.txt 2>> obs.err\n\t./nexus testGetPairValue >> obs.txt 2>> obs.err\n\t./nexus testPairRoundTrip >> obs.txt 2>> obs.err\n\t./nexus testMakeTriple >> obs.txt 2>> obs.err\n\t./nexus testGetTripleCount >> obs.txt 2>> obs.err\n\t./nexus testGetTripleValue >> obs.txt 2>> obs.err\n\t./nexus testTripleRoundTrip >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-py/exp.txt",
    "content": "{\"flag\":true,\"value\":3.14}\ntrue\n3.14\n{\"flag\":false,\"value\":2.718}\n{\"flag\":true,\"count\":42,\"value\":3.14}\n42\n3.14\n{\"flag\":false,\"count\":99,\"value\":1.618}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-py/foo.py",
    "content": "def makePair(flag, value):\n    return {\"flag\": flag, \"value\": value}\n\ndef getPairFlag(p):\n    return p[\"flag\"]\n\ndef getPairValue(p):\n    return p[\"value\"]\n\ndef pairRoundTrip(p):\n    return {\"flag\": p[\"flag\"], \"value\": p[\"value\"]}\n\ndef makeTriple(flag, count, value):\n    return {\"flag\": flag, \"count\": count, \"value\": value}\n\ndef getTripleCount(t):\n    return t[\"count\"]\n\ndef getTripleValue(t):\n    return t[\"value\"]\n\ndef tripleRoundTrip(t):\n    return {\"flag\": t[\"flag\"], \"count\": t[\"count\"], \"value\": t[\"value\"]}\n"
  },
  {
    "path": "test-suite/golden-tests/memory-record-pack-py/main.loc",
    "content": "-- Tests memory alignment of record field packing in Python.\n-- {flag :: Bool, value :: Real} puts Real at offset 1 (needs 8-byte alignment).\n-- {flag :: Bool, count :: Int, value :: Real} puts Int at offset 1, Real at offset 5.\n-- On strict-alignment platforms, misaligned field access causes SIGBUS.\n\nmodule main\n  ( testMakePair\n  , testGetPairFlag\n  , testGetPairValue\n  , testPairRoundTrip\n  , testMakeTriple\n  , testGetTripleCount\n  , testGetTripleValue\n  , testTripleRoundTrip\n  )\n\ntype Py => Bool = \"bool\"\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\n\nrecord PairBR where\n  flag :: Bool\n  value :: Real\nrecord Py => PairBR = \"dict\"\n\nrecord TripleBIR where\n  flag :: Bool\n  count :: Int\n  value :: Real\nrecord Py => TripleBIR = \"dict\"\n\nmakePair :: Bool -> Real -> PairBR\ngetPairFlag :: PairBR -> Bool\ngetPairValue :: PairBR -> Real\npairRoundTrip :: PairBR -> PairBR\n\nmakeTriple :: Bool -> Int -> Real -> TripleBIR\ngetTripleCount :: TripleBIR -> Int\ngetTripleValue :: TripleBIR -> Real\ntripleRoundTrip :: TripleBIR -> TripleBIR\n\nsource Py from \"foo.py\"\n  ( \"makePair\"\n  , \"getPairFlag\"\n  , \"getPairValue\"\n  , \"pairRoundTrip\"\n  , \"makeTriple\"\n  , \"getTripleCount\"\n  , \"getTripleValue\"\n  , \"tripleRoundTrip\"\n  )\n\ntestMakePair :: PairBR\ntestMakePair = makePair True 3.14\n\ntestGetPairFlag :: Bool\ntestGetPairFlag = getPairFlag (makePair True 3.14)\n\ntestGetPairValue :: Real\ntestGetPairValue = getPairValue (makePair True 3.14)\n\ntestPairRoundTrip :: PairBR\ntestPairRoundTrip = pairRoundTrip (makePair False 2.718)\n\ntestMakeTriple :: TripleBIR\ntestMakeTriple = makeTriple True 42 3.14\n\ntestGetTripleCount :: Int\ntestGetTripleCount = getTripleCount (makeTriple True 42 3.14)\n\ntestGetTripleValue :: Real\ntestGetTripleValue = getTripleValue (makeTriple True 42 3.14)\n\ntestTripleRoundTrip :: TripleBIR\ntestTripleRoundTrip = tripleRoundTrip (makeTriple False 99 1.618)\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testBoolChain > obs.txt 2> obs.err\n\t./nexus testShortStrChain >> obs.txt 2>> obs.err\n\t./nexus testMixedChain >> obs.txt 2>> obs.err\n\t./nexus testMultiHop >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__ *err\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/cppfuncs.loc",
    "content": "module cppfuncs (*)\n\nimport types\n\ncNotBool :: Bool -> Bool\ncAppendStr :: Str -> Str -> Str\ncBoolToStr :: Bool -> Str\ncStrLen :: Str -> Int\n\nsource Cpp from \"foo.hpp\" (\"cNotBool\", \"cAppendStr\", \"cBoolToStr\", \"cStrLen\")\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/exp.txt",
    "content": "false\n\"abcd\"\n\"false\"\n6\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/foo.hpp",
    "content": "#ifndef MORLOC_MEMORY_SPLIT_BLOCK_HPP\n#define MORLOC_MEMORY_SPLIT_BLOCK_HPP\n\n#include <string>\n\nbool cNotBool(bool x) {\n    return !x;\n}\n\nstd::string cAppendStr(const std::string& a, const std::string& b) {\n    return a + b;\n}\n\nstd::string cBoolToStr(bool x) {\n    return x ? \"true\" : \"false\";\n}\n\nint cStrLen(const std::string& s) {\n    return static_cast<int>(s.size());\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/foo.py",
    "content": "def pNotBool(x):\n    return not x\n\ndef pAppendStr(a, b):\n    return a + b\n\ndef pBoolToStr(x):\n    return \"true\" if x else \"false\"\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/main.loc",
    "content": "-- Tests split_block header alignment in shared memory allocator.\n-- When shmalloc(N) where N is not 8-aligned, the next block_header_t\n-- gets placed at a misaligned address. The _Atomic(unsigned int) reference_count\n-- and size_t size fields in the header get misaligned, causing bus errors on\n-- atomic operations on strict-alignment platforms.\n--\n-- Strategy: Chain cross-language calls with odd-sized intermediates (Bool = 1 byte,\n-- short strings) to force multiple shared memory allocations and block splits\n-- at non-8-byte boundaries.\n\nmodule main\n  ( testBoolChain\n  , testShortStrChain\n  , testMixedChain\n  , testMultiHop\n  )\n\nimport types\nimport cppfuncs\nimport pyfuncs\n\n-- Bool is 1 byte; crossing languages forces serialization through shared memory\ntestBoolChain :: Bool\ntestBoolChain = cNotBool (pNotBool (cNotBool True))\n\n-- Short strings have odd byte counts, forcing odd-sized allocations\ntestShortStrChain :: Str\ntestShortStrChain = pAppendStr (cAppendStr \"ab\" \"c\") \"d\"\n\n-- Mix Bool and Str across languages to create varied allocation sizes\ntestMixedChain :: Str\ntestMixedChain = cBoolToStr (pNotBool (cNotBool False))\n\n-- Multiple hops to increase chance of hitting a split_block boundary\ntestMultiHop :: Int\ntestMultiHop = cStrLen (pAppendStr (cAppendStr (pBoolToStr True) \"x\") \"y\")\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/pyfuncs.loc",
    "content": "module pyfuncs (*)\n\nimport types\n\npNotBool :: Bool -> Bool\npAppendStr :: Str -> Str -> Str\npBoolToStr :: Bool -> Str\n\nsource Py from \"foo.py\" (\"pNotBool\", \"pAppendStr\", \"pBoolToStr\")\n"
  },
  {
    "path": "test-suite/golden-tests/memory-split-block-cpp/types.loc",
    "content": "module types (*)\n\ntype Py => Bool = \"bool\"\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Cpp => Bool = \"bool\"\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-00n/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-00n/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-00n/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + (-1 * y)\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-00n/main.loc",
    "content": "module main (foo)\n\n-- 00n - declare n\n\nimport fooroot\n\nfoo x y = x + -1 * y\nfoo x y = 0 + x + -1 * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-011/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-011/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-011/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-011/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-011/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-011/main.loc",
    "content": "module main (foo)\n\n-- 011 - source0, import 1, declare 1\n\nimport fooroot\nimport foopy (foo)\n\nfoo x y = x + -1 * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-01n/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-01n/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-01n/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-01n/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-01n/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-01n/main.loc",
    "content": "module main (foo)\n\n-- 01n - source 0, import 1, declare n\n\nimport fooroot\nimport foopy (foo)\n\nfoo x y = x + -1 * y\nfoo x y = 0 + (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/foo.R",
    "content": "foo <- function(x,y){\n  x + (-1) * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/foor.loc",
    "content": "module foor (foo)\n\nimport fooroot\n\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n0/main.loc",
    "content": "module main (foo)\n\n-- 0n0 - source 0, import n, declare 0\n\nimport fooroot\nimport foopy (foo)\nimport foor (foo)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/foo.R",
    "content": "foo <- function(x,y){\n  x + (-1) * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/foor.loc",
    "content": "module foor (foo)\n\nimport fooroot\n\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + (-1 * y)\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-0n1/main.loc",
    "content": "module main (foo)\n\n-- 0n1 - source 0, import n, declare 1\n\nimport fooroot\nimport foopy (foo)\nimport foor (foo)\n\nfoo x y = x + -1 * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-101/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-101/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-101/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-101/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-101/main.loc",
    "content": "module main (foo)\n\n-- 101 - source 1, import 0, declare 1\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n\nfoo x y = x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-10n/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-10n/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-10n/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-10n/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-10n/main.loc",
    "content": "module main (foo)\n\n-- 10n - source 1, import 0, declare n\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n\nfoo x y = x + -1 * y\nfoo x y = 0 + x + -1 * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-110/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-110/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-110/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-110/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-110/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-110/main.loc",
    "content": "module main (foo)\n\n-- 110 - source 1, import 1, declare 0\n\nimport fooroot\nimport foopy (foo)\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-111/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-111/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-111/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-111/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-111/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + (-1 * y)\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-111/main.loc",
    "content": "module main (foo)\n\n-- 111 - source 1, import 1, declare 1\n\nimport fooroot\nimport foopy (foo)\n\nsource Py from \"foo.py\" (\"foo\")\n\nfoo x y = x + -1 * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus poolsgc __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/foo.R",
    "content": "foo <- function(x,y){\n  x + (-1) * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/foor.loc",
    "content": "module foor (foo)\n\nimport fooroot\n\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-1n0/main.loc",
    "content": "module main (foo)\n\n-- 1n0 - source 1, import n, declare 0\n\nimport fooroot\nimport foopy (foo)\nimport foor (foo)\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus poolsgc __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/foo.R",
    "content": "foo <- function(x,y){\n  x + (-1) * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/foor.loc",
    "content": "module foor (foo)\n\nimport fooroot\n\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n00/main.loc",
    "content": "module main (foo)\n\n-- n00 - source n, import 0, declare 0\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/foo.R",
    "content": "foo <- function(x,y){\n  x + (-1) * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/foor.loc",
    "content": "module foor (foo)\n\nimport fooroot\n\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + -1 * y\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n01/main.loc",
    "content": "module main (foo)\n\n-- n01 - source n, import 0, declare 1\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\nsource Py from \"foo.R\" (\"foo\")\n\nfoo x y = x + -1 * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 7 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/exp.txt",
    "content": "-2\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/foo.R",
    "content": "foo <- function(x,y){\n  x + (-1) * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/foo.py",
    "content": "def foo(x,y):\n  return x + (-1) * y\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/foopy.loc",
    "content": "module foopy (foo)\n\nimport fooroot\n\nsource Py from \"foo.py\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/foor.loc",
    "content": "module foor (foo)\n\nimport fooroot\n\nsource R from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/fooroot.loc",
    "content": "module fooroot (*)\n\nimport root-r\nimport root-py\n\nfoo :: Int -> Int -> Int\nfoo x y = x + (-1 * y)\nfoo x y = 1 * (x + -1 * y)\n"
  },
  {
    "path": "test-suite/golden-tests/module-form-n10/main.loc",
    "content": "module main (foo)\n\n-- n10 - source n, import 1, declare 0\n\nimport fooroot\nimport foopy (foo)\n\nsource Py from \"foo.py\" (\"foo\")\nsource Py from \"foo.R\" (\"foo\")\n"
  },
  {
    "path": "test-suite/golden-tests/multi-lang-mempty-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/multi-lang-mempty-py/exp.txt",
    "content": "[1,2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/multi-lang-mempty-py/main.loc",
    "content": "module main (test)\n\nimport root\nimport root-py\nimport root-cpp\n\ntest :: [Int]\ntest = concat [[1,2],[3,4]]\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus rms [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-c/exp.txt",
    "content": "2.16024689946929\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-c/main.loc",
    "content": "-- NOTE: currently not used, the compiler cannot yet distinguish between sourced functions\n\nmodule main (rms)\n\nimport root-cpp\n\nsource cpp from \"rms.h\"\n  ( \"rms1\" as rms\n  , \"rms2\" as rms\n  )\n\nrms :: [Real] -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-c/rms.h",
    "content": "#ifndef __RMS_H__\n#define __RMS_H__\n\n#include <math.h>\n#include <vector>\n#include <array>\n\ndouble rms1(std::vector<double>);\ndouble rms2(std::vector<double>);\n\ndouble rms1(std::vector<double> xs){\n    double x = 0;\n    for(size_t i = 0; i < xs.size(); i++){\n        x += xs[i] * xs[i];\n    }\n    return sqrt(x / xs.size());\n}\n\n// Only slightly different from rms1\ndouble rms2(std::vector<double> xs){\n    double x = 0;\n    for(size_t i = 0; i < xs.size(); i++){\n        x += pow(xs[i], 2);\n    }\n    return sqrt(x / xs.size());\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus rms [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-py/exp.txt",
    "content": "2.16024689946929\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-py/main.loc",
    "content": "-- NOTE: currently not used, the compiler cannot yet distinguish between sourced functions\n\nmodule main (rms)\n\nimport root-py\n\nsource py from \"rms.py\"\n  ( \"rms1\" as rms\n  , \"rms2\" as rms\n  )\n\nrms :: [Real] -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-py/rms.py",
    "content": "import math\n\ndef rms1(xs):\n    return math.sqrt(sum([x*x for x in xs])/len(xs))\n\ndef rms2(xs):\n    y = 0\n    for x in xs:\n        y += x*x\n    y = y / len(xs)\n    return math.sqrt(y)\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus rms [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-r/exp.txt",
    "content": "2.16024689946929\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-r/main.loc",
    "content": "-- NOTE: currently not used, the compiler cannot yet distinguish between sourced functions\n\nmodule main (rms)\n\nimport root-r\n\nsource r from \"rms.R\"\n  ( \"rms1\" as rms\n  , \"rms2\" as rms\n  )\n\nrms :: [Real] -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-1-r/rms.R",
    "content": "# the smart way\nrms1 <- function(xs){\n  sqrt(mean(xs^2))\n}\n\n# the dumb way\nrms2 <- function(xs){\n  result = 0\n  for (x in xs){\n    result = result + x ^ 2\n  }\n  result = result / length(xs)\n  result = sqrt(result)\n  return(result)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus rms [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-c/exp.txt",
    "content": "2.16024689946929\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-c/main.loc",
    "content": "module main (rms)\n\nimport math-cpp (sqrt)\nimport root-cpp (Functor, Integral)\n\nsource cpp from \"rms.h\"\n ( \"rms1\" as rms\n , \"mean\"\n )\n\nmean :: [Real] -> Real\n\nsquare x = x * x\nrms xs = sqrt (mean (map square xs))\n\nrms :: [Real] -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-c/rms.h",
    "content": "#ifndef __RMS_H__\n#define __RMS_H__\n\n#include <math.h>\n#include <vector>\n#include <array>\n\ndouble rms1(std::vector<double>);\ndouble rms2(std::vector<double>);\n\ndouble rms1(std::vector<double> xs){\n    double x = 0;\n    for(size_t i = 0; i < xs.size(); i++){\n        x += xs[i] * xs[i];\n    }\n    return sqrt(x / xs.size());\n}\n\n// Only slightly different from rms1\ndouble rms2(std::vector<double> xs){\n    double x = 0;\n    for(size_t i = 0; i < xs.size(); i++){\n        x += pow(xs[i], 2);\n    }\n    return sqrt(x / xs.size());\n}\n\ndouble mean(std::vector<double> xs){\n    double s = 0;\n    for(size_t i = 0; i < xs.size(); i++){\n        s += xs[i];\n    }\n    return s / xs.size();\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus rms [2,4,4,8] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-py/exp.txt",
    "content": "5\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-py/main.loc",
    "content": "module main (rms)\n\nimport math-py (sqrt)\nimport root-py\n\nsource py from \"rms.py\"\n  ( \"rms1\" as rms\n  , \"mean\"\n  )\n\nmean :: [Real] -> Real\n\nsquare x = x * x\nrms xs = sqrt (mean (map square xs))\n\nrms :: [Real] -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-py/rms.py",
    "content": "import math\n\ndef rms1(xs):\n    return math.sqrt(sum([x*x for x in xs])/len(xs))\n\ndef rms2(xs):\n    y = 0\n    for x in xs:\n        y += x*x\n    y = y / len(xs)\n    return math.sqrt(y)\n\ndef mean(xs):\n    return sum(xs) / len(xs)\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus rms [2,4,4,8] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-r/exp.txt",
    "content": "5\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-r/main.loc",
    "content": "module main (rms)\n\nimport math-r (sqrt)\nimport root-r (Functor, Integral)\n\nsource r from \"rms.R\"\n  ( \"rms1\" as rms\n  , \"mean_list\" as mean\n  )\n\nmean :: [Real] -> Real\n\nsquare x = x * x\nrms xs = sqrt (mean (map square xs))\n\nrms :: [Real] -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-r/rms.R",
    "content": "# the smart way\nrms1 <- function(xs){\n  sqrt(mean(xs^2))\n}\n\n# the dumb way\nrms2 <- function(xs){\n  result = 0\n  for (x in xs){\n    result = result + x ^ 2\n  }\n  result = result / length(xs)\n  result = sqrt(result)\n  return(result)\n}\n\nmean_list <- function(xs) {\n  mean(xs)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/multiple-instances-2-r/rms.py",
    "content": "import math\n\ndef rms1(xs):\n    return math.sqrt(sum([x*x for x in xs])/len(xs))\n\ndef rms2(xs):\n    y = 0\n    for x in xs:\n        y += x*x\n    y = y / len(xs)\n    return math.sqrt(y)\n"
  },
  {
    "path": "test-suite/golden-tests/multiprocessing-py-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus bar   [1,2,3] > obs.txt 2> obs.err\n\t./nexus foo 5 [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/multiprocessing-py-1/exp.txt",
    "content": "[6,7,8]\n[6,7,8]\n"
  },
  {
    "path": "test-suite/golden-tests/multiprocessing-py-1/foo.py",
    "content": "import multiprocessing as mp\nimport os\n\nn_workers = max(1, os.cpu_count() // 2)\n\ndef pmap(f, xs):\n    with mp.Pool(processes=n_workers) as pool:\n        results = pool.map(f, xs)\n    return results\n\ndef add(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/multiprocessing-py-1/main.loc",
    "content": "module main (bar, foo)\n\nsource Py from \"foo.py\" (\"pmap\", \"add\")\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\n\npmap :: (a -> b) -> [a] -> [b] \nadd :: Int -> Int -> Int\n\nbar   = pmap (add 5)\nfoo x = pmap (add x)\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-basic/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-basic/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-basic/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n\ntriple :: Int -> Int\ntriple x = x + x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-basic/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\ntest :: [Bool]\ntest =\n  [ h.double 5 == 10\n  , h.triple 3 == 9\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-disambiguation/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-disambiguation/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-disambiguation/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-disambiguation/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\n-- Test that h.double (namespace qualified) and .0 (getter) work in same file\ntest :: [Bool]\ntest =\n  [ h.double 5 == 10\n  , (.0 (42, 7)) == 42\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-composition/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-composition/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-composition/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n\ntriple :: Int -> Int\ntriple x = x + x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-composition/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\ntest :: [Bool]\ntest =\n  [ (h.double . h.triple) 2 == 12\n  , (h.triple . h.double) 2 == 12\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-double-import/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-double-import/exp.txt",
    "content": "[true,true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-double-import/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n\ntriple :: Int -> Int\ntriple x = x + x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-double-import/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers\nimport .helpers as h\n\ntest :: [Bool]\ntest =\n  [ double 5 == 10\n  , h.double 5 == 10\n  , triple 3 == 9\n  , h.triple 3 == 9\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-exported/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus myDouble 5 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-exported/exp.txt",
    "content": "10\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-exported/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-exported/main.loc",
    "content": "module main (myDouble)\n\nimport root-py\nimport .helpers as h\n\nmyDouble :: Int -> Int\nmyDouble x = h.double x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-guard/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-guard/exp.txt",
    "content": "[true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-guard/helpers.loc",
    "content": "module (*)\n\nimport root\n\nisPositive :: Int -> Bool\nisPositive x = x > 0\n\nnegate :: Int -> Int\nnegate x = 0 - x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-guard/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\nabsVal :: Int -> Int\nabsVal x\n  ? h.isPositive x = x\n  : h.negate x\n\ntest :: [Bool]\ntest =\n  [ absVal 5 == 5\n  , absVal (-3) == 3\n  , absVal 0 == 0\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-hof/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-hof/exp.txt",
    "content": "[2,4,6]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-hof/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-hof/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\ntest :: [Int]\ntest = map h.double [1, 2, 3]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-let/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-let/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-let/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-let/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\ntest :: [Bool]\ntest =\n  let x = h.double 5\n  in [x == 10]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-multi/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-multi/exp.txt",
    "content": "[true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-multi/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .mod-a as a\nimport .mod-b as b\nimport .mod-c as c\n\ntest :: [Bool]\ntest =\n  [ a.inc 0 == 1\n  , b.inc 0 == 10\n  , c.inc 0 == 100\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-multi/mod-a.loc",
    "content": "module (*)\n\nimport root\n\ninc :: Int -> Int\ninc x = x + 1\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-multi/mod-b.loc",
    "content": "module (*)\n\nimport root\n\ninc :: Int -> Int\ninc x = x + 10\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-multi/mod-c.loc",
    "content": "module (*)\n\nimport root\n\ninc :: Int -> Int\ninc x = x + 100\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-nested-getter/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-nested-getter/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-nested-getter/helpers.loc",
    "content": "module (*)\n\nimport root\n\nmakePair :: Int -> Int -> (Int, Int)\nmakePair a b = (a, b)\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-nested-getter/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\ntest :: [Bool]\ntest =\n  [ (.0 (h.makePair 42 7)) == 42\n  , (.1 (h.makePair 42 7)) == 7\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-reexport/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-reexport/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-reexport/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-reexport/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .middle\n\ntest :: [Bool]\ntest = [quadruple 3 == 12]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-reexport/middle.loc",
    "content": "module (*)\n\nimport root\nimport .helpers as h\n\nquadruple :: Int -> Int\nquadruple x = h.double (h.double x)\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-same-func-name/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-same-func-name/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-same-func-name/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-same-func-name/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as map\n\ninc :: Int -> Int\ninc x = x + 1\n\ntest :: [Bool]\ntest =\n  [ map.double 5 == 10\n  , map inc [1, 2, 3] == [2, 3, 4]\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-shadow/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-shadow/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-shadow/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-shadow/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\nh :: Int -> Int\nh x = x + 1\n\ntest :: [Bool]\ntest =\n  [ h 5 == 6\n  , h.double 5 == 10\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-unqualified/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc > obs.txt 2> build.err; grep -c \"Undefined term\" build.err > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-unqualified/exp.txt",
    "content": "1\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-unqualified/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-ns-unqualified/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h\n\ntest :: Int\ntest = double 5\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-selective/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-selective/exp.txt",
    "content": "[true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-selective/helpers.loc",
    "content": "module (*)\n\nimport root\n\ndouble :: Int -> Int\ndouble x = x + x\n\ntriple :: Int -> Int\ntriple x = x + x + x\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-selective/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .helpers as h (double)\n\ntest :: [Bool]\ntest =\n  [ h.double 5 == 10\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-separate-impls/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-separate-impls/exp.txt",
    "content": "[true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-separate-impls/main.loc",
    "content": "module main (test)\n\nimport root-py\nimport .mod-a as a\nimport .mod-b as b\n\ntest :: [Bool]\ntest =\n  [ a.transform 5 == 15\n  , b.transform 5 == 10\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-separate-impls/mod-a.loc",
    "content": "module (*)\n\nimport root\n\ntransform :: Int -> Int\ntransform x = x + 10\n"
  },
  {
    "path": "test-suite/golden-tests/namespace-separate-impls/mod-b.loc",
    "content": "module (*)\n\nimport root\n\ntransform :: Int -> Int\ntransform x = x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/nat-typecheck/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testAdd ---\" > obs.txt\n\t./nexus testAdd >> obs.txt 2>> obs.err\n\techo \"--- testTranspose ---\" >> obs.txt\n\t./nexus testTranspose >> obs.txt 2>> obs.err\n\techo \"--- testMatmul ---\" >> obs.txt\n\t./nexus testMatmul >> obs.txt 2>> obs.err\n\techo \"--- testTrace ---\" >> obs.txt\n\t./nexus testTrace >> obs.txt 2>> obs.err\n\techo \"--- testDiag ---\" >> obs.txt\n\t./nexus testDiag >> obs.txt 2>> obs.err\n\techo \"--- testVstack ---\" >> obs.txt\n\t./nexus testVstack >> obs.txt 2>> obs.err\n\techo \"--- testHstack ---\" >> obs.txt\n\t./nexus testHstack >> obs.txt 2>> obs.err\n\techo \"--- testOuter ---\" >> obs.txt\n\t./nexus testOuter >> obs.txt 2>> obs.err\n\techo \"--- testDot ---\" >> obs.txt\n\t./nexus testDot >> obs.txt 2>> obs.err\n\techo \"--- testFlatten ---\" >> obs.txt\n\t./nexus testFlatten >> obs.txt 2>> obs.err\n\techo \"--- testKron ---\" >> obs.txt\n\t./nexus testKron >> obs.txt 2>> obs.err\n\techo \"--- testSlice ---\" >> obs.txt\n\t./nexus testSlice >> obs.txt 2>> obs.err\n\techo \"--- testTake ---\" >> obs.txt\n\t./nexus testTake >> obs.txt 2>> obs.err\n\techo \"--- testSliceAdd ---\" >> obs.txt\n\t./nexus testSliceAdd >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/nat-typecheck/exp.txt",
    "content": "--- testAdd ---\n24\n--- testTranspose ---\n12\n--- testMatmul ---\n12\n--- testTrace ---\n5\n--- testDiag ---\n4\n--- testVstack ---\n18\n--- testHstack ---\n18\n--- testOuter ---\n12\n--- testDot ---\n5\n--- testFlatten ---\n12\n--- testKron ---\n24\n--- testSlice ---\n6\n--- testTake ---\n3\n--- testSliceAdd ---\n12\n"
  },
  {
    "path": "test-suite/golden-tests/nat-typecheck/main.loc",
    "content": "-- Comprehensive nat-parameterized type tests.\n-- Tests Category 1 (fully checked), Category 2 (deferred/ground),\n-- and Category 3 (opaque dimensions) from the nat type system.\nmodule main\n  ( testAdd\n  , testTranspose\n  , testMatmul\n  , testTrace\n  , testDiag\n  , testVstack\n  , testHstack\n  , testOuter\n  , testDot\n  , testFlatten\n  , testKron\n  , testSlice\n  , testTake\n  , testSliceAdd\n  )\n\nimport root\nimport root-cpp\n\nsource Cpp from \"src.hpp\"\n  ( \"ones34\"\n  , \"ones23\"\n  , \"ones32\"\n  , \"ones22\"\n  , \"ones43\"\n  , \"ones56\"\n  , \"onesV3\"\n  , \"onesV4\"\n  , \"onesV5\"\n  , \"onesV10\"\n  , \"eye33\"\n  , \"eye55\"\n  , \"add2d\"\n  , \"transpose2d\"\n  , \"matmul2d\"\n  , \"dot1d\"\n  , \"tsum1d\"\n  , \"trace2d\"\n  , \"diag2d\"\n  , \"diagMat1d\"\n  , \"flatten2d\"\n  , \"outer1d\"\n  , \"vstack2d\"\n  , \"hstack2d\"\n  , \"kron2d\"\n  , \"slice2d\"\n  , \"ttake1d\"\n  )\n\n-- -- This is what `ones` SHOULD be, but this would require dependent types:\n-- ones :: i:Int -> j:Int -> Tensor2 i j Real\n\n-- Tensor constructors with specific dimensions\nones34 :: Tensor2 3 4 Real\nones23 :: Tensor2 2 3 Real\nones32 :: Tensor2 3 2 Real\nones22 :: Tensor2 2 2 Real\nones43 :: Tensor2 4 3 Real\nones56 :: Tensor2 5 6 Real\nonesV3 :: Tensor1 3 Real\nonesV4 :: Tensor1 4 Real\nonesV5 :: Tensor1 5 Real\nonesV10 :: Tensor1 10 Real\neye33 :: Tensor2 3 3 Real\neye55 :: Tensor2 5 5 Real\n\n-- Category 1: Fully checked operations (explicit forall via var list)\nadd2d :: Tensor2 m n Real -> Tensor2 m n Real -> Tensor2 m n Real\ntranspose2d :: Tensor2 m n Real -> Tensor2 n m Real\nmatmul2d :: Tensor2 m k Real -> Tensor2 k n Real -> Tensor2 m n Real\ndot1d :: Tensor1 n Real -> Tensor1 n Real -> Real\ntsum1d :: Tensor1 n Real -> Real\ntrace2d :: Tensor2 n n Real -> Real\ndiag2d :: Tensor2 n n Real -> Tensor1 n Real\ndiagMat1d :: Tensor1 n Real -> Tensor2 n n Real\nouter1d :: Tensor1 m Real -> Tensor1 n Real -> Tensor2 m n Real\n\n-- Category 2: Nat arithmetic (deferred with variables, checked at ground)\nflatten2d :: Tensor2 m n Real -> Tensor1 (m * n) Real\nvstack2d :: Tensor2 m n Real -> Tensor2 p n Real -> Tensor2 (m + p) n Real\nhstack2d :: Tensor2 m n Real -> Tensor2 m p Real -> Tensor2 m (n + p) Real\nkron2d :: Tensor2 m n Real -> Tensor2 p q Real -> Tensor2 (m * p) (n * q) Real\n\n-- Category 3: Opaque output dimensions\nslice2d :: Int -> Int -> Tensor2 m n Real -> Tensor2 i j Real\nttake1d :: Int -> Tensor1 n Real -> Tensor1 i Real\n\n-- === Category 1 tests ===\n\n-- add two 3x4 ones matrices -> 3x4 of 2s -> flatten -> sum = 24\ntestAdd :: Real\ntestAdd = tsum1d (flatten2d (add2d ones34 ones34))\n\n-- transpose 3x4 ones -> 4x3 ones -> flatten -> sum = 12\ntestTranspose :: Real\ntestTranspose = tsum1d (flatten2d (transpose2d ones34))\n\n-- matmul I(3x3) * ones(3x4) = ones(3x4) -> flatten -> sum = 12\ntestMatmul :: Real\ntestMatmul = tsum1d (flatten2d (matmul2d eye33 ones34))\n\n-- trace of 5x5 identity = 5\ntestTrace :: Real\ntestTrace = trace2d eye55\n\n-- diagMat(ones(4)) = 4x4 diag matrix, diag extracts [1,1,1,1], sum = 4\ntestDiag :: Real\ntestDiag = tsum1d (diag2d (diagMat1d onesV4))\n\n-- vstack ones(2x3) ones(4x3) = ones(6x3) -> flatten -> sum = 18\ntestVstack :: Real\ntestVstack = tsum1d (flatten2d (vstack2d ones23 ones43))\n\n-- hstack ones(3x2) ones(3x4) = ones(3x6) -> flatten -> sum = 18\ntestHstack :: Real\ntestHstack = tsum1d (flatten2d (hstack2d ones32 ones34))\n\n-- outer [1,1,1] [1,1,1,1] = ones(3x4) -> flatten -> sum = 12\ntestOuter :: Real\ntestOuter = tsum1d (flatten2d (outer1d onesV3 onesV4))\n\n-- dot [1,1,1,1,1] [1,1,1,1,1] = 5\ntestDot :: Real\ntestDot = dot1d onesV5 onesV5\n\n-- === Category 2 tests ===\n\n-- flatten ones(3x4) -> ones(12) -> sum = 12\ntestFlatten :: Real\ntestFlatten = tsum1d (flatten2d ones34)\n\n-- kron ones(2x3) ones(2x2) = ones(4x6) -> flatten -> sum = 24\ntestKron :: Real\ntestKron = tsum1d (flatten2d (kron2d ones23 ones22))\n\n-- === Category 3 tests ===\n\n-- slice 2 3 from ones(5x6) -> ones(2x3) -> flatten -> sum = 6\ntestSlice :: Real\ntestSlice = tsum1d (flatten2d (slice2d 2 3 ones56))\n\n-- take 3 from ones(10) -> ones(3) -> sum = 3\ntestTake :: Real\ntestTake = tsum1d (ttake1d 3 onesV10)\n\n-- slice used twice (same free vars) -> add -> flatten -> sum = 12\ntestSliceAdd :: Real\ntestSliceAdd =\n  let s = slice2d 2 3 ones56\n  in tsum1d (flatten2d (add2d s s))\n"
  },
  {
    "path": "test-suite/golden-tests/nat-typecheck/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n\n#include \"mlc_tensor.hpp\"\n#include <cstring>\n\n// --- Tensor constructors (specific sizes) ---\n\nmlc::Tensor2<double> ones34() {\n    mlc::Tensor2<double> t({3, 4});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> ones23() {\n    mlc::Tensor2<double> t({2, 3});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> ones32() {\n    mlc::Tensor2<double> t({3, 2});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> ones22() {\n    mlc::Tensor2<double> t({2, 2});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> ones43() {\n    mlc::Tensor2<double> t({4, 3});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> ones56() {\n    mlc::Tensor2<double> t({5, 6});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor1<double> onesV3() {\n    mlc::Tensor1<double> t({3});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor1<double> onesV4() {\n    mlc::Tensor1<double> t({4});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor1<double> onesV5() {\n    mlc::Tensor1<double> t({5});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor1<double> onesV10() {\n    mlc::Tensor1<double> t({10});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> eye33() {\n    mlc::Tensor2<double> t({3, 3});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 0.0;\n    for (int i = 0; i < 3; i++) t(i, i) = 1.0;\n    return t;\n}\n\nmlc::Tensor2<double> eye55() {\n    mlc::Tensor2<double> t({5, 5});\n    for (size_t i = 0; i < t.size(); i++) t[i] = 0.0;\n    for (int i = 0; i < 5; i++) t(i, i) = 1.0;\n    return t;\n}\n\n// --- Operations ---\n\nmlc::Tensor2<double> add2d(\n    const mlc::Tensor2<double>& a,\n    const mlc::Tensor2<double>& b\n) {\n    int64_t m = a.shape()[0], n = a.shape()[1];\n    mlc::Tensor2<double> r({m, n});\n    for (size_t i = 0; i < a.size(); i++)\n        r[i] = a.data()[i] + b.data()[i];\n    return r;\n}\n\nmlc::Tensor2<double> transpose2d(const mlc::Tensor2<double>& a) {\n    int64_t m = a.shape()[0], n = a.shape()[1];\n    mlc::Tensor2<double> r({n, m});\n    for (int64_t i = 0; i < m; i++)\n        for (int64_t j = 0; j < n; j++)\n            r(j, i) = a(i, j);\n    return r;\n}\n\nmlc::Tensor2<double> matmul2d(\n    const mlc::Tensor2<double>& a,\n    const mlc::Tensor2<double>& b\n) {\n    int64_t m = a.shape()[0], k = a.shape()[1], n = b.shape()[1];\n    mlc::Tensor2<double> r({m, n});\n    for (size_t i = 0; i < r.size(); i++) r[i] = 0.0;\n    for (int64_t i = 0; i < m; i++)\n        for (int64_t j = 0; j < n; j++)\n            for (int64_t l = 0; l < k; l++)\n                r(i, j) += a(i, l) * b(l, j);\n    return r;\n}\n\ndouble dot1d(\n    const mlc::Tensor1<double>& a,\n    const mlc::Tensor1<double>& b\n) {\n    double s = 0;\n    for (size_t i = 0; i < a.size(); i++)\n        s += a.data()[i] * b.data()[i];\n    return s;\n}\n\ndouble tsum1d(const mlc::Tensor1<double>& a) {\n    double s = 0;\n    for (size_t i = 0; i < a.size(); i++) s += a.data()[i];\n    return s;\n}\n\ndouble trace2d(const mlc::Tensor2<double>& a) {\n    int64_t n = a.shape()[0];\n    double s = 0;\n    for (int64_t i = 0; i < n; i++) s += a(i, i);\n    return s;\n}\n\nmlc::Tensor1<double> diag2d(const mlc::Tensor2<double>& a) {\n    int64_t n = a.shape()[0];\n    mlc::Tensor1<double> r({n});\n    for (int64_t i = 0; i < n; i++) r.data()[i] = a(i, i);\n    return r;\n}\n\nmlc::Tensor2<double> diagMat1d(const mlc::Tensor1<double>& a) {\n    int64_t n = a.shape()[0];\n    mlc::Tensor2<double> r({n, n});\n    for (size_t i = 0; i < r.size(); i++) r[i] = 0.0;\n    for (int64_t i = 0; i < n; i++) r(i, i) = a.data()[i];\n    return r;\n}\n\nmlc::Tensor1<double> flatten2d(const mlc::Tensor2<double>& a) {\n    int64_t total = (int64_t)a.size();\n    mlc::Tensor1<double> r({total});\n    memcpy(r.data(), a.data(), (size_t)total * sizeof(double));\n    return r;\n}\n\nmlc::Tensor2<double> outer1d(\n    const mlc::Tensor1<double>& a,\n    const mlc::Tensor1<double>& b\n) {\n    int64_t m = a.shape()[0], n = b.shape()[0];\n    mlc::Tensor2<double> r({m, n});\n    for (int64_t i = 0; i < m; i++)\n        for (int64_t j = 0; j < n; j++)\n            r(i, j) = a.data()[i] * b.data()[j];\n    return r;\n}\n\nmlc::Tensor2<double> vstack2d(\n    const mlc::Tensor2<double>& a,\n    const mlc::Tensor2<double>& b\n) {\n    int64_t ma = a.shape()[0], mb = b.shape()[0], n = a.shape()[1];\n    mlc::Tensor2<double> r({ma + mb, n});\n    memcpy(r.data(), a.data(), (size_t)(ma * n) * sizeof(double));\n    memcpy(r.data() + ma * n, b.data(), (size_t)(mb * n) * sizeof(double));\n    return r;\n}\n\nmlc::Tensor2<double> hstack2d(\n    const mlc::Tensor2<double>& a,\n    const mlc::Tensor2<double>& b\n) {\n    int64_t m = a.shape()[0], na = a.shape()[1], nb = b.shape()[1];\n    mlc::Tensor2<double> r({m, na + nb});\n    for (int64_t i = 0; i < m; i++) {\n        memcpy(r.data() + i * (na + nb),\n               a.data() + i * na, (size_t)na * sizeof(double));\n        memcpy(r.data() + i * (na + nb) + na,\n               b.data() + i * nb, (size_t)nb * sizeof(double));\n    }\n    return r;\n}\n\nmlc::Tensor2<double> kron2d(\n    const mlc::Tensor2<double>& a,\n    const mlc::Tensor2<double>& b\n) {\n    int64_t ma = a.shape()[0], na = a.shape()[1];\n    int64_t mb = b.shape()[0], nb = b.shape()[1];\n    mlc::Tensor2<double> r({ma * mb, na * nb});\n    for (int64_t ia = 0; ia < ma; ia++)\n        for (int64_t ja = 0; ja < na; ja++)\n            for (int64_t ib = 0; ib < mb; ib++)\n                for (int64_t jb = 0; jb < nb; jb++)\n                    r(ia * mb + ib, ja * nb + jb) = a(ia, ja) * b(ib, jb);\n    return r;\n}\n\nmlc::Tensor2<double> slice2d(\n    int rows, int cols,\n    const mlc::Tensor2<double>& a\n) {\n    mlc::Tensor2<double> r({(int64_t)rows, (int64_t)cols});\n    for (int i = 0; i < rows; i++)\n        for (int j = 0; j < cols; j++)\n            r(i, j) = a(i, j);\n    return r;\n}\n\nmlc::Tensor1<double> ttake1d(int n, const mlc::Tensor1<double>& a) {\n    mlc::Tensor1<double> r({(int64_t)n});\n    memcpy(r.data(), a.data(), (size_t)n * sizeof(double));\n    return r;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-1/exp.txt",
    "content": "[2,[2,2]]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-1/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\n\ntoSnd f x = (x, f x)\n\ndouble x = (x, x)\n\nint :: Int -> Int\nint x = x\n\nfoo = toSnd double . int\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-2/exp.txt",
    "content": "[2,3]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-2/foo.py",
    "content": "def add(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-2/main.loc",
    "content": "module main (foo)\n\ntype Py => Int = \"int\" \ntype Py => Tuple2 a b = \"tuple\" a b\n\nsource Py from \"foo.py\" (\"add\") \nadd :: Int -> Int -> Int\n\ntoSnd :: (a -> b) -> a -> (a, b)\ntoSnd f x = (x, f x)\n\nfoo x = toSnd (add 1) x\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-3/exp.txt",
    "content": "[1,2,3]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-3/foo.py",
    "content": "def mlc_map(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-3/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\n\ntype Py => Int = \"int\" \ntype Py => Tuple2 a b = \"tuple\" a b\ntype Py => List a = \"list\" a\n\nsource Py from \"foo.py\" (\"mlc_map\" as map) \nmap :: (a -> b) -> [a] -> [b]\n\nfoo :: [Int] -> [Int]\nfoo = map (\\x -> x)\n\nweird :: a -> a -> a\nweird x y = y\n\nint :: Int -> Int\nint x = x\n\nfoo = map (int . weird 1)\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-4/exp.txt",
    "content": "[[2,2],[[2,[2,2]],[2,[2,2]]]]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-4/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\n\ntoSnd f z = (double z, f (bar double z))\n\ndouble x = (x, x)\n\nbar f x = (x, f x)\n\nint :: Int -> Int\nint x = x\n\nfoo = toSnd double . int\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus high.loc 2> build.err\n\t./nexus foo [99,2] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-5/exp.txt",
    "content": "99\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-5/high.loc",
    "content": "module main (foo)\n\ntype Py => Int = \"int\"\ntype Py => Tuple2 a b = \"tuple\" a b\n\nsource Py from \"high.py\" (\"fst\", \"snd\")\nfst :: (a, b) -> a\nsnd :: (a, b) -> b\n\nconst :: a -> b -> a\nconst a b = a\n\nuncurry :: (a -> b -> c) -> (a, b) -> c\nuncurry fabc ab = fabc (fst ab) (snd ab) \n\nfoo :: (Int, Int) -> Int\nfoo = uncurry const\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-5/high.py",
    "content": "def fst(x):\n    return x[0]\n\ndef snd(x):\n    return x[1]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-6/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus srcFoo '\"c\"' > obs.txt 2> obs.err\n\t./nexus natFoo '\"e\"' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-6/exp.txt",
    "content": "[\"a\",\"b\",\"c\"]\n[\"c\",\"d\",\"e\"]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-6/foo.py",
    "content": "def morloc_id(x):\n    return x\n\ndef morloc_map(f, xs):\n    return list(map(f, xs))\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-6/main.loc",
    "content": "-- This tests issue #50\n\nmodule main (srcFoo, natFoo)\n\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\nsource Py from \"foo.py\" (\"morloc_map\" as map, \"morloc_id\" as srcId)\nmap :: (a -> b) -> [a] -> [b]\n\nsrcId :: Str -> Str\n\nnatId :: Str -> Str\nnatId x = x\n\nsrcFoo :: Str -> [Str]\nsrcFoo x = map srcId [\"a\",\"b\", x]\n\nnatFoo :: Str -> [Str]\nnatFoo x = map natId [\"c\",\"d\", x]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-7/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-7/exp.txt",
    "content": "[67,true,3.14,6.7,[67,\"Nessie\"],[true,6.7],[6.7,true],[67,true,6.7],67,[[true,42,\"Alice\"],67],[67,true],[67,true,\"Alice\"],420,[420,true],[420,\"Alice\",421,\"Bob\"]]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-7/main.loc",
    "content": "module main (foo)\n\ntest1 = (67,True,3.14)\ntest2 = (67, (True, \"Nessie\", 6.7))\n\nrecord Thing = Thing { a :: Int, b :: (Bool, Int, Str)  }\n\ntest3 :: Thing\ntest3 = { a = 67, b = (True, 42, \"Alice\") }\n\ntest4 :: (Int, Thing)\ntest4 = (42, { a = 67, b = (True, 420, \"Bob\") })\n\ntest5 :: (Thing, Int, Thing)\ntest5 = ({ a = 67, b = (True, 420, \"Alice\") }, 42, { a = 68, b = (False, 421, \"Bob\") })\n\nfoo =\n  ( .0 test1                             -- 67,\n  , .1 test1                             -- true,\n  , .2 test1                             -- 3.14,\n  , .1.2 test2                           -- 6.7,\n  , .(.0, .1.1) test2                    -- [67,\"Nessie\"],\n  , .1.(.0, .2) test2                    -- [true,6.7],\n  , .1.(.2, .0) test2                    -- [6.7,true],\n  , .(.0, .1.(.0, .2)) test2             -- [67,true,6.7],\n  , .a test3                             -- 67,\n  , .(.b, .a) test3                      -- [[true,42,\"Alice\"],67],\n  , .(.a, .b.0) test3                    -- [67,true],\n  , .(.a, .b.(.0, .2)) test3             -- [67,true,\"Alice\"]\n  , .1.b.1 test4                         -- 420\n  , .1.b.(.1,.0) test4                   -- [420,true]\n  , .(.0.b.(.1,.2), .2.b.(.1,.2)) test5  -- [420,\"Alice\",421,\"Bob\"]\n  )\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-8/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-8/exp.txt",
    "content": "[[42,true,3.14],[67,false,3.14],[67,true,6.28],[42,true,6.28],[42,[true,\"Evangeline\",6.7]],[42,[false,\"Nessie\",6.28]],{\"a\":42,\"b\":[true,42,\"Alice\"]},[{\"a\":67,\"b\":[true,210,\"Alice\"]},67,{\"a\":68,\"b\":[false,421,\"Bob\"]}],[42,true,6.28],[68,true,3.14]]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-8/main.loc",
    "content": "module main (foo)\n\ntest1 = (67,True,3.14)\ntest2 = (67, (True, \"Nessie\", 6.7))\n\nrecord Thing = Thing { a :: Int, b :: (Bool, Int, Str)  }\n\ntest3 :: Thing\ntest3 = { a = 67, b = (True, 42, \"Alice\") }\n\ntest4 :: (Thing, Int, Thing)\ntest4 = ({ a = 67, b = (True, 420, \"Alice\") }, 42, { a = 68, b = (False, 421, \"Bob\") })\n\nfoo =\n  ( .(.0 = 42) test1                               -- [42, true, 3.14]\n  , .(.1 = False) test1                            -- [67,false,3.14],\n  , .(.2 = 6.28) test1                             -- [67,true,6.28],\n  , .(.0 = 42, .2 = 6.28) test1                    -- [42,true,6.28],\n  , .(.0 = 42, .1.1 = \"Evangeline\") test2          -- [42,[true,\"Evangeline\",6.7]],\n  , .(.0 = 42, .1.(.0 = False, .2 = 6.28)) test2   -- [42,[false,\"Nessie\",6.28]],\n  , .(.a = 42) test3                               -- {\"a\":42,\"b\":[true,42,\"Alice\"]},\n  , .(.0.b.1 = 210, .1 = 67) test4                 -- [{\"a\":67,\"b\":[true,210,\"Alice\"]},67,{\"a\":68,\"b\":[false,421,\"Bob\"]}]\n  , .(.2 = 6.28, .0 = 42) test1                    -- [42,true,6.28]\n  , .(.0 = .2.a test4) test1\n  )\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-9/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"xxy\"' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-9/exp.txt",
    "content": "[\"xxy+pickle=xxypickle\",\"somewhere in zanzibar\",\"xxy\",\"xxyxxy\",\"<xxy>\",\"xxy>\",\"<xxy\",\"xxy.xxy\"]\n"
  },
  {
    "path": "test-suite/golden-tests/native-morloc-9/main.loc",
    "content": "module main (foo)\n\ntest1 :: Str -> Str -> Str\ntest1 x y = \"#{x}+#{y}=#{x}#{y}\"\n\nfar = (123, \"zanzibar\")\n\nfoo x =\n  [ test1 x \"pickle\"\n  , \"somewhere in #{.1 far}\"\n  , \"#{x}\"\n  , \"#{x}#{x}\"\n  , \"<#{x}>\"\n  , \"#{x}>\"\n  , \"<#{x}\"\n  , \"#{x}.#{x}\"\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/nexus-let-lambda/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- letLambda ---\" > obs.txt\n\t./nexus letLambda >> obs.txt 2> obs.err\n\techo \"--- letLambdaComplex ---\" >> obs.txt\n\t./nexus letLambdaComplex >> obs.txt 2>> obs.err\n\techo \"--- letLambdaMultiUse ---\" >> obs.txt\n\t./nexus letLambdaMultiUse >> obs.txt 2>> obs.err\n\techo \"--- letLambdaNested ---\" >> obs.txt\n\t./nexus letLambdaNested >> obs.txt 2>> obs.err\n\techo \"--- letLambdaCapture ---\" >> obs.txt\n\t./nexus letLambdaCapture >> obs.txt 2>> obs.err\n\techo \"--- letLambdaPartial ---\" >> obs.txt\n\t./nexus letLambdaPartial 2 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/nexus-let-lambda/exp.txt",
    "content": "--- letLambda ---\n42\n--- letLambdaComplex ---\n[5,5]\n--- letLambdaMultiUse ---\n[1,2]\n--- letLambdaNested ---\n42\n--- letLambdaCapture ---\n[1,2]\n--- letLambdaPartial ---\n[1,2]\n"
  },
  {
    "path": "test-suite/golden-tests/nexus-let-lambda/main.loc",
    "content": "-- Tests for let-bound lambdas in the pure nexus evaluator (gAST path).\n-- These require applyLambdas to inline let-bound lambdas before toNexusExpr,\n-- since the nexus evaluator cannot serialize function types.\n\nmodule main\n  ( letLambda\n  , letLambdaComplex\n  , letLambdaMultiUse\n  , letLambdaNested\n  , letLambdaCapture\n  , letLambdaPartial\n  )\n\n-- 1. Simple identity lambda applied in body\nletLambda :: Int\nletLambda = let f = (\\x -> x) in f 42\n\n-- 2. Lambda with compound return type\nletLambdaComplex :: (Int, Int)\nletLambdaComplex = let f = (\\x -> (x, x)) in f 5\n\n-- 3. Let-bound lambda used at multiple call sites\nletLambdaMultiUse :: (Int, Int)\nletLambdaMultiUse = let f = (\\x -> x) in (f 1, f 2)\n\n-- 4. Nested let-bound lambdas: inner lambda calls outer\nletLambdaNested :: Int\nletLambdaNested =\n  let f = (\\x -> x)\n      g = (\\y -> f y)\n  in g 42\n\n-- 5. Lambda that captures an outer let-bound data value\nletLambdaCapture :: (Int, Int)\nletLambdaCapture =\n  let x = 1\n      f = (\\y -> (x, y))\n  in f 2\n\n-- 6. Partial application: let-bound lambda returns a function\nletLambdaPartial :: Int -> (Int, Int)\nletLambdaPartial = let f = (\\x -> \\y -> (x, y)) in f 1\n"
  },
  {
    "path": "test-suite/golden-tests/nexus-let-pure/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- letSimpleInt ---\" > obs.txt\n\t./nexus letSimpleInt >> obs.txt 2> obs.err\n\techo \"--- letSimpleStr ---\" >> obs.txt\n\t./nexus letSimpleStr >> obs.txt 2>> obs.err\n\techo \"--- letSimpleBool ---\" >> obs.txt\n\t./nexus letSimpleBool >> obs.txt 2>> obs.err\n\techo \"--- letSimpleReal ---\" >> obs.txt\n\t./nexus letSimpleReal >> obs.txt 2>> obs.err\n\techo \"--- letSimpleList ---\" >> obs.txt\n\t./nexus letSimpleList >> obs.txt 2>> obs.err\n\techo \"--- letSimpleTuple ---\" >> obs.txt\n\t./nexus letSimpleTuple >> obs.txt 2>> obs.err\n\techo \"--- letNested ---\" >> obs.txt\n\t./nexus letNested >> obs.txt 2>> obs.err\n\techo \"--- letCrossRef ---\" >> obs.txt\n\t./nexus letCrossRef >> obs.txt 2>> obs.err\n\techo \"--- letDeepChain ---\" >> obs.txt\n\t./nexus letDeepChain >> obs.txt 2>> obs.err\n\techo \"--- letMultiRef ---\" >> obs.txt\n\t./nexus letMultiRef >> obs.txt 2>> obs.err\n\techo \"--- letUnused ---\" >> obs.txt\n\t./nexus letUnused >> obs.txt 2>> obs.err\n\techo \"--- letShadow ---\" >> obs.txt\n\t./nexus letShadow >> obs.txt 2>> obs.err\n\techo \"--- letNestedStruct ---\" >> obs.txt\n\t./nexus letNestedStruct >> obs.txt 2>> obs.err\n\techo \"--- letGetter ---\" >> obs.txt\n\t./nexus letGetter >> obs.txt 2>> obs.err\n\techo \"--- letWithArg ---\" >> obs.txt\n\t./nexus letWithArg 42 >> obs.txt 2>> obs.err\n\techo \"--- letSelfRef ---\" >> obs.txt\n\t./nexus letSelfRef >> obs.txt 2>> obs.err\n\techo \"--- letDeepNest ---\" >> obs.txt\n\t./nexus letDeepNest >> obs.txt 2>> obs.err\n\techo \"--- letCrossRefChain ---\" >> obs.txt\n\t./nexus letCrossRefChain >> obs.txt 2>> obs.err\n\techo \"--- letRecord ---\" >> obs.txt\n\t./nexus letRecord >> obs.txt 2>> obs.err\n\techo \"--- letRecordGetter ---\" >> obs.txt\n\t./nexus letRecordGetter >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log\n"
  },
  {
    "path": "test-suite/golden-tests/nexus-let-pure/exp.txt",
    "content": "--- letSimpleInt ---\n42\n--- letSimpleStr ---\n\"hello\"\n--- letSimpleBool ---\ntrue\n--- letSimpleReal ---\n3.14\n--- letSimpleList ---\n[1,2,3]\n--- letSimpleTuple ---\n[42,\"world\"]\n--- letNested ---\n[1,2]\n--- letCrossRef ---\n1\n--- letDeepChain ---\n1\n--- letMultiRef ---\n[42,42,42]\n--- letUnused ---\n42\n--- letShadow ---\n2\n--- letNestedStruct ---\n[1,[2,3],\"hi\"]\n--- letGetter ---\n10\n--- letWithArg ---\n42\n--- letSelfRef ---\n1\n--- letDeepNest ---\n1\n--- letCrossRefChain ---\n100\n--- letRecord ---\n{\"x\":10,\"y\":20}\n--- letRecordGetter ---\n10\n"
  },
  {
    "path": "test-suite/golden-tests/nexus-let-pure/main.loc",
    "content": "-- Tests for let bindings in the pure nexus evaluator (gAST path).\n-- No source declarations, no imports -- all functions are purely nexus-evaluated.\n\nmodule main\n  ( letSimpleInt\n  , letSimpleStr\n  , letSimpleBool\n  , letSimpleReal\n  , letSimpleList\n  , letSimpleTuple\n  , letNested\n  , letCrossRef\n  , letDeepChain\n  , letMultiRef\n  , letUnused\n  , letShadow\n  , letNestedStruct\n  , letGetter\n  , letWithArg\n  , letSelfRef\n  , letDeepNest\n  , letCrossRefChain\n  , letRecord\n  , letRecordGetter\n  )\n\nrecord Point = Point { x :: Int, y :: Int }\n\n-- 1. Simple let with integer\nletSimpleInt :: Int\nletSimpleInt = let x = 42 in x\n\n-- 2. Simple let with string\nletSimpleStr :: Str\nletSimpleStr = let s = \"hello\" in s\n\n-- 3. Simple let with boolean\nletSimpleBool :: Bool\nletSimpleBool = let b = True in b\n\n-- 4. Simple let with real\nletSimpleReal :: Real\nletSimpleReal = let r = 3.14 in r\n\n-- 5. Simple let with list\nletSimpleList :: [Int]\nletSimpleList = let xs = [1, 2, 3] in xs\n\n-- 6. Simple let with tuple\nletSimpleTuple :: (Int, Str)\nletSimpleTuple = let p = (42, \"world\") in p\n\n-- 7. Nested lets returning tuple\nletNested :: (Int, Int)\nletNested = let x = 1 in let y = 2 in (x, y)\n\n-- 8. Cross-referencing let (y uses x)\nletCrossRef :: Int\nletCrossRef = let x = 1 in let y = x in y\n\n-- 9. Deep chain of cross-references (4 levels)\nletDeepChain :: Int\nletDeepChain =\n  let a = 1\n      b = a\n      c = b\n      d = c\n  in d\n\n-- 10. Multiple references to same let-bound variable\nletMultiRef :: (Int, Int, Int)\nletMultiRef = let x = 42 in (x, x, x)\n\n-- 11. Unused let binding\nletUnused :: Int\nletUnused = let x = 999 in 42\n\n-- 12. Let shadowing (inner x shadows outer x, uniquified to x@0 and x@1)\nletShadow :: Int\nletShadow = let x = 1 in let x = 2 in x\n\n-- 13. Nested data structure in let\nletNestedStruct :: (Int, [Int], Str)\nletNestedStruct = let p = (1, [2, 3], \"hi\") in p\n\n-- 14. Pattern getter on let-bound tuple\nletGetter :: Int\nletGetter = let p = (10, 20) in .0 p\n\n-- 15. Let with function argument from CLI\nletWithArg :: Int -> Int\nletWithArg x = let y = x in y\n\n-- 16. Let self-reference: inner let RHS refers to outer x (non-recursive let)\nletSelfRef :: Int\nletSelfRef = let x = 1 in let x = x in x\n\n-- 17. Many let bindings, use first\nletDeepNest :: Int\nletDeepNest =\n  let a = 1\n      b = 2\n      c = 3\n      d = 4\n      e = 5\n  in a\n\n-- 18. Long cross-reference chain\nletCrossRefChain :: Int\nletCrossRefChain =\n  let a = 100\n      b = a\n      c = b\n      d = c\n      e = d\n  in e\n\n-- 19. Record construction in let\nletRecord :: Point\nletRecord = let p = { x = 10, y = 20 } in p\n\n-- 20. Record field access on let-bound record\nletRecordGetter :: Int\nletRecordGetter = let p = { x = 10, y = 20 } in .x p\n"
  },
  {
    "path": "test-suite/golden-tests/null-keyword/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testSafeHead > obs.txt 2> obs.err\n\t./nexus testSafeHeadEmpty >> obs.txt 2>> obs.err\n\t./nexus testFromNull >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/null-keyword/exp.txt",
    "content": "10\nnull\n[0,0,0]\n"
  },
  {
    "path": "test-suite/golden-tests/null-keyword/main.R",
    "content": "fromNull <- function(default_val, x) {\n    if (is.null(x)) return(default_val)\n    return(x)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/null-keyword/main.hpp",
    "content": "#ifndef MORLOC_NULL_KEYWORD_TEST_HPP\n#define MORLOC_NULL_KEYWORD_TEST_HPP\n\n#include <optional>\n\ntemplate <typename T>\nT fromNull(const T& default_val, const std::optional<T>& x) {\n    if (!x.has_value()) return default_val;\n    return *x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/null-keyword/main.loc",
    "content": "module main (testSafeHead, testSafeHeadEmpty, testFromNull)\n\nimport root-py\nimport root-cpp\nimport root-r\n\nsafeHead :: [a] -> ?a\nsafeHead xs\n  ? length xs == 0 = Null\n  : head xs\n\nsource Py from \"main.py\" (\"fromNull\")\nsource R from \"main.R\" (\"fromNull\")\nsource Cpp from \"main.hpp\" (\"fromNull\")\n\nfromNull :: a -> ?a -> a\n\ntestSafeHead :: ?Int\ntestSafeHead = safeHead [10, 20, 30]\n\ntestSafeHeadEmpty :: ?Int\ntestSafeHeadEmpty = safeHead []\n\ntestFromNull :: (Int, Int, Int)\ntestFromNull = ( fromNull 0 (idpy Null)\n               , fromNull 0 (idr Null)\n               , fromNull 0 (idcpp Null)\n               )\n"
  },
  {
    "path": "test-suite/golden-tests/null-keyword/main.py",
    "content": "def fromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/numeric-literals/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/numeric-literals/exp.txt",
    "content": "[[-42,42,42,42,493,493,1,1],[4.2,420000,0.042]]\n"
  },
  {
    "path": "test-suite/golden-tests/numeric-literals/main.loc",
    "content": "module main (test)\n\ntest = (testInts, testFloats)\n\ntestInts = [-42, 42, 0x2a, 0X2a, 0o755, 0O755, 0b0001, 0B0001]\n\ntestFloats = [4.2, 4.2E5, 4.2e-2]\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":34}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-c/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":34}\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-c/main.loc",
    "content": "module main (foo)\n\nimport root\nimport root-cpp\n\nsource Cpp from \"person.h\" (\"PersonObj\")\n\nobject (Person a) = Person {name :: Str, info :: a}\nobject Cpp => (Person a) = \"PersonObj\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-c/person.h",
    "content": "#ifndef __PERSON_H__\n#define __PERSON_H__\n\n#include <vector>\n#include <tuple>\n#include <utility>\n\ntemplate <class T>\nclass PersonObj{\n    public:\n        std::string name;\n        T info; \n\n        PersonObj(std::string name_i, T info_i){\n           name = name_i; \n           info = info_i; \n        }\n\n        PersonObj(T info_i){\n           name = \"anonymous\"; \n           info = info_i; \n        }\n\n        PersonObj(){ }\n\n        std::string initials(){\n            std::string n = \"\";\n            bool in = false;\n            for(size_t i = 0; i < name.size(); i++){\n                if (in && name[i] == ' ') {\n                    in = false;\n                }\n                else if (!in && name[i] != ' ') {\n                    n.push_back(name[i]);\n                    in = true;\n                }\n            }\n            return n;\n        }\n};\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":34}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-py/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":34}\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-py/main.loc",
    "content": "module main (foo)\n\nimport root\nimport root-py\n\nsource Py from \"person.py\" (\"PersonObj\" as Person)\n\nobject (Person a) = Person {name :: Str, info :: a}\nobject Py => (Person a) = \"PersonObj\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-py/person.py",
    "content": "class PersonObj:\n  def __init__(self, name, info):\n    self.name = name \n    self.info = info \n"
  },
  {
    "path": "test-suite/golden-tests/object-1-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":34}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-r/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":34}\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-r/main.loc",
    "content": "module main (foo)\n\nimport root\nimport root-r\n\nsource R from \"person.R\" (\"PersonObj\")\n\nobject (Person a) = Person {name :: Str, info :: a}\nobject R => (Person a) = \"PersonObj\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/object-1-r/person.R",
    "content": "# OK, this isn't really a proper class constructor and eventually I will need\n# to deal with the wonkey variation in field accessors, but for now this will\n# allow testing of the passing to the right constructor.\npersonObj <- function(name, info){\n  list(name=name, info=info)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testCoerceArg > obs.txt 2> obs.err\n\t./nexus testCoerceFromNull >> obs.txt 2>> obs.err\n\t./nexus testCoerceAddOpt >> obs.txt 2>> obs.err\n\t./nexus testCoerceReturn >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-cpp/exp.txt",
    "content": "42\n-1\n7\n99\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-cpp/foo.hpp",
    "content": "#ifndef MORLOC_OPTIONAL_COERCE_TEST_HPP\n#define MORLOC_OPTIONAL_COERCE_TEST_HPP\n\n#include <optional>\n\ntemplate <typename T>\nT fromNull(const T& default_val, const std::optional<T>& x) {\n    if (!x.has_value()) return default_val;\n    return *x;\n}\n\ntemplate <typename T>\nT fromNull(const T& default_val, std::nullopt_t) {\n    return default_val;\n}\n\ntemplate <typename T>\nT fromNull(const T&, const T& x) {\n    return x;\n}\n\nstd::optional<int> addOpt(const std::optional<int>& x, const std::optional<int>& y) {\n    if (!x.has_value() || !y.has_value()) return std::nullopt;\n    return std::optional<int>(*x + *y);\n}\n\nstd::optional<int> identity(const std::optional<int>& x) {\n    return x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-cpp/main.loc",
    "content": "module main\n  ( testCoerceArg\n  , testCoerceFromNull\n  , testCoerceAddOpt\n  , testCoerceReturn\n  )\n\ntype Cpp => Int = \"int\"\n\nfromNull :: a -> ?a -> a\naddOpt :: ?Int -> ?Int -> ?Int\nidentity :: ?Int -> ?Int\n\nsource Cpp from \"foo.hpp\" (\"fromNull\", \"addOpt\", \"identity\")\n\n-- Pass Int where ?Int expected (coerce arg)\ntestCoerceArg :: Int\ntestCoerceArg = fromNull 0 42\n\n-- Pass Int literal where ?Int expected in fromNull default\ntestCoerceFromNull :: Int\ntestCoerceFromNull = fromNull (-1) Null\n\n-- Pass Int where ?Int expected (both args coerced)\ntestCoerceAddOpt :: ?Int\ntestCoerceAddOpt = addOpt 3 4\n\n-- Pass Int where ?Int expected, return ?Int\ntestCoerceReturn :: ?Int\ntestCoerceReturn = identity 99\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testCppIntToPyOpt > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/cfoo.hpp",
    "content": "#ifndef MORLOC_COERCE_INTEROP_HPP\n#define MORLOC_COERCE_INTEROP_HPP\n\nint cAddOne(int x) {\n    return x + 1;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/cppfuncs.loc",
    "content": "module cppfuncs (*)\n\nimport types\n\ncAddOne :: Int -> Int\n\nsource Cpp from \"cfoo.hpp\" (\"cAddOne\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/main.loc",
    "content": "module main\n  ( testCppIntToPyOpt\n  )\n\nimport types\nimport cppfuncs (cAddOne)\nimport pyfuncs (pUnwrapOr)\n\n-- C++ returns Int, Python expects ?Int via coercion at cross-pool boundary.\n-- cAddOne returns Int (=42), pUnwrapOr's 2nd arg expects ?Int.\n-- The serialization schema at the boundary must be ?i4 for correct handling.\ntestCppIntToPyOpt :: Int\ntestCppIntToPyOpt = pUnwrapOr 0 (cAddOne 41)\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/pfoo.py",
    "content": "def pUnwrapOr(default_val, x):\n    if x is None:\n        return default_val\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/pyfuncs.loc",
    "content": "module pyfuncs (*)\n\nimport types\n\npUnwrapOr :: a -> ?a -> a\n\nsource Py from \"pfoo.py\" (\"pUnwrapOr\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-interop/types.loc",
    "content": "module types (*)\n\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testCoerceArg > obs.txt 2> obs.err\n\t./nexus testCoerceFromNull >> obs.txt 2>> obs.err\n\t./nexus testCoerceAddOpt >> obs.txt 2>> obs.err\n\t./nexus testCoerceReturn >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-py/exp.txt",
    "content": "42\n-1\n7\n99\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-py/foo.py",
    "content": "def fromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef addOpt(x, y):\n    if x is None or y is None:\n        return None\n    return x + y\n\ndef identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/optional-coerce-py/main.loc",
    "content": "module main\n  ( testCoerceArg\n  , testCoerceFromNull\n  , testCoerceAddOpt\n  , testCoerceReturn\n  )\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\n\nfromNull :: a -> ?a -> a\naddOpt :: ?Int -> ?Int -> ?Int\nidentity :: ?Int -> ?Int\n\nsource Py from \"foo.py\" (\"fromNull\", \"addOpt\", \"identity\")\n\n-- Pass Int where ?Int expected (coerce arg)\ntestCoerceArg :: Int\ntestCoerceArg = fromNull 0 42\n\n-- Pass Int literal where ?Int expected in fromNull default\ntestCoerceFromNull :: Int\ntestCoerceFromNull = fromNull (-1) Null\n\n-- Pass Int where ?Int expected (both args coerced)\ntestCoerceAddOpt :: ?Int\ntestCoerceAddOpt = addOpt 3 4\n\n-- Pass Int where ?Int expected, return ?Int\ntestCoerceReturn :: ?Int\ntestCoerceReturn = identity 99\n"
  },
  {
    "path": "test-suite/golden-tests/optional-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testNull > obs.txt 2> obs.err\n\t./nexus testValue >> obs.txt 2>> obs.err\n\t./nexus testFromNull >> obs.txt 2>> obs.err\n\t./nexus testSafeHead >> obs.txt 2>> obs.err\n\t./nexus testSafeHeadEmpty >> obs.txt 2>> obs.err\n\t./nexus testOptionalAdd >> obs.txt 2>> obs.err\n\t./nexus testOptionalAddNull >> obs.txt 2>> obs.err\n\t./nexus testOptionalList >> obs.txt 2>> obs.err\n\t./nexus testCountNulls >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-cpp/exp.txt",
    "content": "null\n42\n0\n10\nnull\n7\nnull\n[1,null,3,null,5]\n2\n"
  },
  {
    "path": "test-suite/golden-tests/optional-cpp/foo.hpp",
    "content": "#ifndef MORLOC_OPTIONAL_TEST_HPP\n#define MORLOC_OPTIONAL_TEST_HPP\n\n#include <optional>\n#include <vector>\n#include <string>\n\ntemplate <typename T>\nbool isNull(const std::optional<T>& x) {\n    return !x.has_value();\n}\n\ntemplate <typename T>\nT fromNull(const T& default_val, const std::optional<T>& x) {\n    if (!x.has_value()) return default_val;\n    return *x;\n}\n\ntemplate <typename T>\nT fromNull(const T& default_val, std::nullopt_t) {\n    return default_val;\n}\n\ntemplate <typename T>\nstd::optional<T> toNull(const T& x) {\n    return std::optional<T>(x);\n}\n\nstd::optional<int> safeHead(const std::vector<int>& xs) {\n    if (xs.empty()) return std::nullopt;\n    return std::optional<int>(xs[0]);\n}\n\nstd::optional<int> optionalAdd(const std::optional<int>& x, const std::optional<int>& y) {\n    if (!x.has_value() || !y.has_value()) return std::nullopt;\n    return std::optional<int>(*x + *y);\n}\n\nstd::vector<std::optional<int>> optionalList(const std::vector<int>& xs) {\n    std::vector<std::optional<int>> result;\n    result.reserve(xs.size());\n    for (const auto& x : xs) {\n        if (x < 0) result.push_back(std::nullopt);\n        else result.push_back(std::optional<int>(x));\n    }\n    return result;\n}\n\nint countNulls(const std::vector<std::optional<int>>& xs) {\n    int count = 0;\n    for (const auto& x : xs) {\n        if (!x.has_value()) count++;\n    }\n    return count;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/optional-cpp/main.loc",
    "content": "module main\n  ( testNull\n  , testValue\n  , testFromNull\n  , testSafeHead\n  , testSafeHeadEmpty\n  , testOptionalAdd\n  , testOptionalAddNull\n  , testOptionalList\n  , testCountNulls\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Bool = \"bool\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\nisNull :: ?a -> Bool\nfromNull :: a -> ?a -> a\ntoNull :: a -> ?a\nsafeHead :: [Int] -> ?Int\noptionalAdd :: ?Int -> ?Int -> ?Int\noptionalList :: [Int] -> [?Int]\ncountNulls :: [?Int] -> Int\n\nsource Cpp from \"foo.hpp\"\n  ( \"isNull\"\n  , \"fromNull\"\n  , \"toNull\"\n  , \"safeHead\"\n  , \"optionalAdd\"\n  , \"optionalList\"\n  , \"countNulls\"\n  )\n\ntestNull :: ?Int\ntestNull = Null\n\ntestValue :: ?Int\ntestValue = toNull 42\n\ntestFromNull :: Int\ntestFromNull = fromNull 0 Null\n\ntestSafeHead :: ?Int\ntestSafeHead = safeHead [10, 20, 30]\n\ntestSafeHeadEmpty :: ?Int\ntestSafeHeadEmpty = safeHead []\n\ntestOptionalAdd :: ?Int\ntestOptionalAdd = optionalAdd (toNull 3) (toNull 4)\n\ntestOptionalAddNull :: ?Int\ntestOptionalAddNull = optionalAdd Null (toNull 4)\n\ntestOptionalList :: [?Int]\ntestOptionalList = optionalList [1, -2, 3, -4, 5]\n\ntestCountNulls :: Int\ntestCountNulls = countNulls (optionalList [1, -2, 3, -4, 5])\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testCppToPy > obs.txt 2> obs.err\n\t./nexus testCppToPyNull >> obs.txt 2>> obs.err\n\t./nexus testPyToCpp >> obs.txt 2>> obs.err\n\t./nexus testPyToCppNull >> obs.txt 2>> obs.err\n\t./nexus testChainCppPy >> obs.txt 2>> obs.err\n\t./nexus testChainPyCpp >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/cppfuncs.loc",
    "content": "module cppfuncs (*)\n\nimport types\n\ncSafeDiv :: Int -> Int -> ?Int\ncFromNull :: Int -> ?Int -> Int\ncDouble :: ?Int -> ?Int\n\nsource Cpp from \"foo.hpp\" (\"cSafeDiv\", \"cFromNull\", \"cDouble\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/exp.txt",
    "content": "3\n-1\n5\n-1\n6\n10\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/foo.hpp",
    "content": "#ifndef MORLOC_OPTIONAL_INTEROP_HPP\n#define MORLOC_OPTIONAL_INTEROP_HPP\n\n#include <optional>\n\nstd::optional<int> cSafeDiv(int x, int y) {\n    if (y == 0) return std::nullopt;\n    return std::optional<int>(x / y);\n}\n\nint cFromNull(int default_val, const std::optional<int>& x) {\n    if (!x.has_value()) return default_val;\n    return *x;\n}\n\nstd::optional<int> cDouble(const std::optional<int>& x) {\n    if (!x.has_value()) return std::nullopt;\n    return std::optional<int>(*x * 2);\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/foo.py",
    "content": "def pSafeDiv(x, y):\n    if y == 0:\n        return None\n    return x // y\n\ndef pFromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef pDouble(x):\n    if x is None:\n        return None\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/main.loc",
    "content": "module main\n  ( testCppToPy\n  , testCppToPyNull\n  , testPyToCpp\n  , testPyToCppNull\n  , testChainCppPy\n  , testChainPyCpp\n  )\n\nimport types\nimport cppfuncs (cSafeDiv, cFromNull, cDouble)\nimport pyfuncs (pSafeDiv, pFromNull, pDouble)\n\n-- C++ produces optional, Python consumes it\ntestCppToPy :: Int\ntestCppToPy = pFromNull (-1) (cSafeDiv 10 3)\n\n-- C++ produces null, Python consumes it\ntestCppToPyNull :: Int\ntestCppToPyNull = pFromNull (-1) (cSafeDiv 10 0)\n\n-- Python produces optional, C++ consumes it\ntestPyToCpp :: Int\ntestPyToCpp = cFromNull (-1) (pSafeDiv 10 2)\n\n-- Python produces null, C++ consumes it\ntestPyToCppNull :: Int\ntestPyToCppNull = cFromNull (-1) (pSafeDiv 10 0)\n\n-- Chain: C++ safeDiv -> Python double -> C++ fromNull\ntestChainCppPy :: Int\ntestChainCppPy = cFromNull 0 (pDouble (cSafeDiv 10 3))\n\n-- Chain: Python safeDiv -> C++ double -> Python fromNull\ntestChainPyCpp :: Int\ntestChainPyCpp = pFromNull 0 (cDouble (pSafeDiv 10 2))\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/pyfuncs.loc",
    "content": "module pyfuncs (*)\n\nimport types\n\npSafeDiv :: Int -> Int -> ?Int\npFromNull :: Int -> ?Int -> Int\npDouble :: ?Int -> ?Int\n\nsource Py from \"foo.py\" (\"pSafeDiv\", \"pFromNull\", \"pDouble\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-cp/types.loc",
    "content": "module types (*)\n\ntype Py => Int = \"int\"\ntype Cpp => Int = \"int\"\ntype Py => List a = \"list\" a\ntype Cpp => List a = \"std::vector<$1>\" a\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testPyToR > obs.txt 2> obs.err\n\t./nexus testPyToRNull >> obs.txt 2>> obs.err\n\t./nexus testRToPy >> obs.txt 2>> obs.err\n\t./nexus testRToPyNull >> obs.txt 2>> obs.err\n\t./nexus testChainPyR >> obs.txt 2>> obs.err\n\t./nexus testChainRPy >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/exp.txt",
    "content": "3\n-1\n5\n-1\n6\n10\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/foo.R",
    "content": "rSafeDiv <- function(x, y) {\n    if (y == 0) return(NULL)\n    return(as.integer(x %/% y))\n}\n\nrFromNull <- function(default_val, x) {\n    if (is.null(x)) return(default_val)\n    return(x)\n}\n\nrDouble <- function(x) {\n    if (is.null(x)) return(NULL)\n    return(as.integer(x * 2L))\n}\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/foo.py",
    "content": "def pSafeDiv(x, y):\n    if y == 0:\n        return None\n    return x // y\n\ndef pFromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef pDouble(x):\n    if x is None:\n        return None\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/main.loc",
    "content": "module main\n  ( testPyToR\n  , testPyToRNull\n  , testRToPy\n  , testRToPyNull\n  , testChainPyR\n  , testChainRPy\n  )\n\nimport types\nimport pyfuncs (pSafeDiv, pFromNull, pDouble)\nimport rfuncs (rSafeDiv, rFromNull, rDouble)\n\ntestPyToR :: Int\ntestPyToR = rFromNull (-1) (pSafeDiv 10 3)\n\ntestPyToRNull :: Int\ntestPyToRNull = rFromNull (-1) (pSafeDiv 10 0)\n\ntestRToPy :: Int\ntestRToPy = pFromNull (-1) (rSafeDiv 10 2)\n\ntestRToPyNull :: Int\ntestRToPyNull = pFromNull (-1) (rSafeDiv 10 0)\n\ntestChainPyR :: Int\ntestChainPyR = rFromNull 0 (rDouble (pSafeDiv 10 3))\n\ntestChainRPy :: Int\ntestChainRPy = pFromNull 0 (pDouble (rSafeDiv 10 2))\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/pyfuncs.loc",
    "content": "module pyfuncs (*)\n\nimport types\n\npSafeDiv :: Int -> Int -> ?Int\npFromNull :: Int -> ?Int -> Int\npDouble :: ?Int -> ?Int\n\nsource Py from \"foo.py\" (\"pSafeDiv\", \"pFromNull\", \"pDouble\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/rfuncs.loc",
    "content": "module rfuncs (*)\n\nimport types\n\nrSafeDiv :: Int -> Int -> ?Int\nrFromNull :: Int -> ?Int -> Int\nrDouble :: ?Int -> ?Int\n\nsource R from \"foo.R\" (\"rSafeDiv\", \"rFromNull\", \"rDouble\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-pr/types.loc",
    "content": "module types (*)\n\ntype Py => Int = \"int\"\ntype R => Int = \"integer\"\ntype Py => List a = \"list\" a\ntype R => List a = \"list\" a\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testRToCpp > obs.txt 2> obs.err\n\t./nexus testRToCppNull >> obs.txt 2>> obs.err\n\t./nexus testCppToR >> obs.txt 2>> obs.err\n\t./nexus testCppToRNull >> obs.txt 2>> obs.err\n\t./nexus testChainRCpp >> obs.txt 2>> obs.err\n\t./nexus testChainCppR >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/cppfuncs.loc",
    "content": "module cppfuncs (*)\n\nimport types\n\ncSafeDiv :: Int -> Int -> ?Int\ncFromNull :: Int -> ?Int -> Int\ncDouble :: ?Int -> ?Int\n\nsource Cpp from \"foo.hpp\" (\"cSafeDiv\", \"cFromNull\", \"cDouble\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/exp.txt",
    "content": "5\n-1\n3\n-1\n10\n6\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/foo.R",
    "content": "rSafeDiv <- function(x, y) {\n    if (y == 0) return(NULL)\n    return(as.integer(x %/% y))\n}\n\nrFromNull <- function(default_val, x) {\n    if (is.null(x)) return(default_val)\n    return(x)\n}\n\nrDouble <- function(x) {\n    if (is.null(x)) return(NULL)\n    return(as.integer(x * 2L))\n}\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/foo.hpp",
    "content": "#ifndef MORLOC_OPTIONAL_INTEROP_RC_HPP\n#define MORLOC_OPTIONAL_INTEROP_RC_HPP\n\n#include <optional>\n\nstd::optional<int> cSafeDiv(int x, int y) {\n    if (y == 0) return std::nullopt;\n    return std::optional<int>(x / y);\n}\n\nint cFromNull(int default_val, const std::optional<int>& x) {\n    if (!x.has_value()) return default_val;\n    return *x;\n}\n\nstd::optional<int> cDouble(const std::optional<int>& x) {\n    if (!x.has_value()) return std::nullopt;\n    return std::optional<int>(*x * 2);\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/main.loc",
    "content": "module main\n  ( testRToCpp\n  , testRToCppNull\n  , testCppToR\n  , testCppToRNull\n  , testChainRCpp\n  , testChainCppR\n  )\n\nimport types\nimport cppfuncs (cSafeDiv, cFromNull, cDouble)\nimport rfuncs (rSafeDiv, rFromNull, rDouble)\n\ntestRToCpp :: Int\ntestRToCpp = cFromNull (-1) (rSafeDiv 10 2)\n\ntestRToCppNull :: Int\ntestRToCppNull = cFromNull (-1) (rSafeDiv 10 0)\n\ntestCppToR :: Int\ntestCppToR = rFromNull (-1) (cSafeDiv 10 3)\n\ntestCppToRNull :: Int\ntestCppToRNull = rFromNull (-1) (cSafeDiv 10 0)\n\ntestChainRCpp :: Int\ntestChainRCpp = cFromNull 0 (cDouble (rSafeDiv 10 2))\n\ntestChainCppR :: Int\ntestChainCppR = rFromNull 0 (rDouble (cSafeDiv 10 3))\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/rfuncs.loc",
    "content": "module rfuncs (*)\n\nimport types\n\nrSafeDiv :: Int -> Int -> ?Int\nrFromNull :: Int -> ?Int -> Int\nrDouble :: ?Int -> ?Int\n\nsource R from \"foo.R\" (\"rSafeDiv\", \"rFromNull\", \"rDouble\")\n"
  },
  {
    "path": "test-suite/golden-tests/optional-interop-rc/types.loc",
    "content": "module types (*)\n\ntype Cpp => Int = \"int\"\ntype R => Int = \"integer\"\ntype Cpp => List a = \"std::vector<$1>\" a\ntype R => List a = \"list\" a\n"
  },
  {
    "path": "test-suite/golden-tests/optional-json/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus echoOptInt null > obs.txt 2> obs.err\n\t./nexus echoOptInt 42 >> obs.txt 2>> obs.err\n\t./nexus echoOptStr null >> obs.txt 2>> obs.err\n\t./nexus echoOptStr '\"hello\"' >> obs.txt 2>> obs.err\n\t./nexus echoOptList null >> obs.txt 2>> obs.err\n\t./nexus echoOptList '[1,2,3]' >> obs.txt 2>> obs.err\n\t./nexus fromNullInt null >> obs.txt 2>> obs.err\n\t./nexus fromNullInt 99 >> obs.txt 2>> obs.err\n\t./nexus isNullInt null >> obs.txt 2>> obs.err\n\t./nexus isNullInt 5 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-json/exp.txt",
    "content": "null\n42\nnull\n\"hello\"\nnull\n[1,2,3]\n0\n99\ntrue\nfalse\n"
  },
  {
    "path": "test-suite/golden-tests/optional-json/foo.py",
    "content": "def identity(x):\n    return x\n\ndef fromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef isNull(x):\n    return x is None\n"
  },
  {
    "path": "test-suite/golden-tests/optional-json/main.loc",
    "content": "module main\n  ( echoOptInt\n  , echoOptStr\n  , echoOptList\n  , fromNullInt\n  , isNullInt\n  )\n\ntype Py => Int = \"int\"\ntype Py => Bool = \"bool\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\nidentity :: a -> a\nfromNull :: a -> ?a -> a\nisNull :: ?a -> Bool\n\nsource Py from \"foo.py\" (\"identity\", \"fromNull\", \"isNull\")\n\nechoOptInt :: ?Int -> ?Int\nechoOptInt x = identity x\n\nechoOptStr :: ?Str -> ?Str\nechoOptStr x = identity x\n\nechoOptList :: ?[Int] -> ?[Int]\nechoOptList x = identity x\n\nfromNullInt :: ?Int -> Int\nfromNullInt x = fromNull 0 x\n\nisNullInt :: ?Int -> Bool\nisNullInt x = isNull x\n"
  },
  {
    "path": "test-suite/golden-tests/optional-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testNull > obs.txt 2> obs.err\n\t./nexus testValue >> obs.txt 2>> obs.err\n\t./nexus testFromNull >> obs.txt 2>> obs.err\n\t./nexus testSafeHead >> obs.txt 2>> obs.err\n\t./nexus testSafeHeadEmpty >> obs.txt 2>> obs.err\n\t./nexus testOptionalAdd >> obs.txt 2>> obs.err\n\t./nexus testOptionalAddNull >> obs.txt 2>> obs.err\n\t./nexus testOptionalList >> obs.txt 2>> obs.err\n\t./nexus testCountNulls >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-py/exp.txt",
    "content": "null\n42\n0\n10\nnull\n7\nnull\n[1,null,3,null,5]\n2\n"
  },
  {
    "path": "test-suite/golden-tests/optional-py/foo.py",
    "content": "def isNull(x):\n    return x is None\n\ndef fromNull(default_val, x):\n    if x is None:\n        return default_val\n    return x\n\ndef toNull(x):\n    return x\n\ndef safeHead(xs):\n    if len(xs) == 0:\n        return None\n    return xs[0]\n\ndef optionalAdd(x, y):\n    if x is None or y is None:\n        return None\n    return x + y\n\ndef optionalList(xs):\n    return [None if x < 0 else x for x in xs]\n\ndef countNulls(xs):\n    return sum(1 for x in xs if x is None)\n"
  },
  {
    "path": "test-suite/golden-tests/optional-py/main.loc",
    "content": "module main\n  ( testNull\n  , testValue\n  , testFromNull\n  , testSafeHead\n  , testSafeHeadEmpty\n  , testOptionalAdd\n  , testOptionalAddNull\n  , testOptionalList\n  , testCountNulls\n  )\n\ntype Py => Int = \"int\"\ntype Py => Bool = \"bool\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\nisNull :: ?a -> Bool\nfromNull :: a -> ?a -> a\ntoNull :: a -> ?a\nsafeHead :: [Int] -> ?Int\noptionalAdd :: ?Int -> ?Int -> ?Int\noptionalList :: [Int] -> [?Int]\ncountNulls :: [?Int] -> Int\n\nsource Py from \"foo.py\"\n  ( \"isNull\"\n  , \"fromNull\"\n  , \"toNull\"\n  , \"safeHead\"\n  , \"optionalAdd\"\n  , \"optionalList\"\n  , \"countNulls\"\n  )\n\ntestNull :: ?Int\ntestNull = Null\n\ntestValue :: ?Int\ntestValue = toNull 42\n\ntestFromNull :: Int\ntestFromNull = fromNull 0 Null\n\ntestSafeHead :: ?Int\ntestSafeHead = safeHead [10, 20, 30]\n\ntestSafeHeadEmpty :: ?Int\ntestSafeHeadEmpty = safeHead []\n\ntestOptionalAdd :: ?Int\ntestOptionalAdd = optionalAdd (toNull 3) (toNull 4)\n\ntestOptionalAddNull :: ?Int\ntestOptionalAddNull = optionalAdd Null (toNull 4)\n\ntestOptionalList :: [?Int]\ntestOptionalList = optionalList [1, -2, 3, -4, 5]\n\ntestCountNulls :: Int\ntestCountNulls = countNulls (optionalList [1, -2, 3, -4, 5])\n"
  },
  {
    "path": "test-suite/golden-tests/optional-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testNull > obs.txt 2> obs.err\n\t./nexus testValue >> obs.txt 2>> obs.err\n\t./nexus testFromNull >> obs.txt 2>> obs.err\n\t./nexus testSafeHead >> obs.txt 2>> obs.err\n\t./nexus testSafeHeadEmpty >> obs.txt 2>> obs.err\n\t./nexus testOptionalAdd >> obs.txt 2>> obs.err\n\t./nexus testOptionalAddNull >> obs.txt 2>> obs.err\n\t./nexus testOptionalList >> obs.txt 2>> obs.err\n\t./nexus testCountNulls >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-r/exp.txt",
    "content": "null\n42\n0\n10\nnull\n7\nnull\n[1,null,3,null,5]\n2\n"
  },
  {
    "path": "test-suite/golden-tests/optional-r/foo.R",
    "content": "isNull <- function(x) {\n    is.null(x)\n}\n\nfromNull <- function(default_val, x) {\n    if (is.null(x)) return(default_val)\n    return(x)\n}\n\ntoNull <- function(x) {\n    return(x)\n}\n\nsafeHead <- function(xs) {\n    if (length(xs) == 0) return(NULL)\n    return(xs[[1]])\n}\n\noptionalAdd <- function(x, y) {\n    if (is.null(x) || is.null(y)) return(NULL)\n    return(x + y)\n}\n\noptionalList <- function(xs) {\n    lapply(xs, function(x) if (x < 0) NULL else x)\n}\n\ncountNulls <- function(xs) {\n    sum(sapply(xs, is.null))\n}\n"
  },
  {
    "path": "test-suite/golden-tests/optional-r/main.loc",
    "content": "module main\n  ( testNull\n  , testValue\n  , testFromNull\n  , testSafeHead\n  , testSafeHeadEmpty\n  , testOptionalAdd\n  , testOptionalAddNull\n  , testOptionalList\n  , testCountNulls\n  )\n\ntype R => Int = \"integer\"\ntype R => Bool = \"logical\"\ntype R => Str = \"character\"\ntype R => List a = \"list\" a\n\nisNull :: ?a -> Bool\nfromNull :: a -> ?a -> a\ntoNull :: a -> ?a\nsafeHead :: [Int] -> ?Int\noptionalAdd :: ?Int -> ?Int -> ?Int\noptionalList :: [Int] -> [?Int]\ncountNulls :: [?Int] -> Int\n\nsource R from \"foo.R\"\n  ( \"isNull\"\n  , \"fromNull\"\n  , \"toNull\"\n  , \"safeHead\"\n  , \"optionalAdd\"\n  , \"optionalList\"\n  , \"countNulls\"\n  )\n\ntestNull :: ?Int\ntestNull = Null\n\ntestValue :: ?Int\ntestValue = toNull 42\n\ntestFromNull :: Int\ntestFromNull = fromNull 0 Null\n\ntestSafeHead :: ?Int\ntestSafeHead = safeHead [10, 20, 30]\n\ntestSafeHeadEmpty :: ?Int\ntestSafeHeadEmpty = safeHead []\n\ntestOptionalAdd :: ?Int\ntestOptionalAdd = optionalAdd (toNull 3) (toNull 4)\n\ntestOptionalAddNull :: ?Int\ntestOptionalAddNull = optionalAdd Null (toNull 4)\n\ntestOptionalList :: [?Int]\ntestOptionalList = optionalList [1, -2, 3, -4, 5]\n\ntestCountNulls :: Int\ntestCountNulls = countNulls (optionalList [1, -2, 3, -4, 5])\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMakePerson > obs.txt 2> obs.err\n\t./nexus testOptionalField >> obs.txt 2>> obs.err\n\t./nexus testFindPerson >> obs.txt 2>> obs.err\n\t./nexus testFindPersonMissing >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-cpp/exp.txt",
    "content": "{\"name\":\"Alice\",\"age\":30}\nnull\n{\"name\":\"Alice\",\"age\":30}\nnull\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-cpp/foo.hpp",
    "content": "#ifndef MORLOC_OPTIONAL_RECORDS_HPP\n#define MORLOC_OPTIONAL_RECORDS_HPP\n\n#include <optional>\n#include <string>\n#include <vector>\n\nstruct person_t {\n    std::string name;\n    std::optional<int> age;\n};\n\nperson_t makePerson(const std::string& name, const std::optional<int>& age) {\n    return person_t{name, age};\n}\n\nstd::string getName(const person_t& p) {\n    return p.name;\n}\n\nstd::optional<int> getAge(const person_t& p) {\n    return p.age;\n}\n\ntemplate <typename T>\nstd::optional<T> toNull(const T& x) {\n    return std::optional<T>(x);\n}\n\nstd::optional<person_t> findPerson(const std::string& name, const std::vector<person_t>& people) {\n    for (const auto& p : people) {\n        if (p.name == name) return std::optional<person_t>(p);\n    }\n    return std::nullopt;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-cpp/main.loc",
    "content": "module main\n  ( testMakePerson\n  , testOptionalField\n  , testFindPerson\n  , testFindPersonMissing\n  )\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\nrecord Person where\n  name :: Str\n  age :: ?Int\nrecord Cpp => Person = \"person_t\"\n\nmakePerson :: Str -> ?Int -> Person\ngetName :: Person -> Str\ngetAge :: Person -> ?Int\ntoNull :: a -> ?a\nfindPerson :: Str -> [Person] -> ?Person\n\nsource Cpp from \"foo.hpp\" (\"makePerson\", \"getName\", \"getAge\", \"toNull\", \"findPerson\")\n\nalice :: Person\nalice = makePerson \"Alice\" (toNull 30)\n\nbob :: Person\nbob = makePerson \"Bob\" Null\n\ntestMakePerson :: Person\ntestMakePerson = alice\n\ntestOptionalField :: ?Int\ntestOptionalField = getAge bob\n\ntestFindPerson :: ?Person\ntestFindPerson = findPerson \"Alice\" [alice, bob]\n\ntestFindPersonMissing :: ?Person\ntestFindPersonMissing = findPerson \"Charlie\" [alice, bob]\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMakePerson > obs.txt 2> obs.err\n\t./nexus testOptionalField >> obs.txt 2>> obs.err\n\t./nexus testFindPerson >> obs.txt 2>> obs.err\n\t./nexus testFindPersonMissing >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-py/exp.txt",
    "content": "{\"name\":\"Alice\",\"age\":30}\nnull\n{\"name\":\"Alice\",\"age\":30}\nnull\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-py/foo.py",
    "content": "def makePerson(name, age):\n    return {\"name\": name, \"age\": age}\n\ndef getName(p):\n    return p[\"name\"]\n\ndef getAge(p):\n    return p[\"age\"]\n\ndef toNull(x):\n    return x\n\ndef findPerson(name, people):\n    for p in people:\n        if p[\"name\"] == name:\n            return p\n    return None\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-py/main.loc",
    "content": "module main\n  ( testMakePerson\n  , testOptionalField\n  , testFindPerson\n  , testFindPersonMissing\n  )\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\nrecord Person where\n  name :: Str\n  age :: ?Int\nrecord Py => Person = \"dict\"\n\nmakePerson :: Str -> ?Int -> Person\ngetName :: Person -> Str\ngetAge :: Person -> ?Int\ntoNull :: a -> ?a\nfindPerson :: Str -> [Person] -> ?Person\n\nsource Py from \"foo.py\" (\"makePerson\", \"getName\", \"getAge\", \"toNull\", \"findPerson\")\n\nalice :: Person\nalice = makePerson \"Alice\" (toNull 30)\n\nbob :: Person\nbob = makePerson \"Bob\" Null\n\ntestMakePerson :: Person\ntestMakePerson = alice\n\ntestOptionalField :: ?Int\ntestOptionalField = getAge bob\n\ntestFindPerson :: ?Person\ntestFindPerson = findPerson \"Alice\" [alice, bob]\n\ntestFindPersonMissing :: ?Person\ntestFindPersonMissing = findPerson \"Charlie\" [alice, bob]\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testMakePerson > obs.txt 2> obs.err\n\t./nexus testOptionalField >> obs.txt 2>> obs.err\n\t./nexus testFindPerson >> obs.txt 2>> obs.err\n\t./nexus testFindPersonMissing >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-r/exp.txt",
    "content": "{\"name\":\"Alice\",\"age\":30}\nnull\n{\"name\":\"Alice\",\"age\":30}\nnull\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-r/foo.R",
    "content": "makePerson <- function(name, age) {\n    list(name = name, age = age)\n}\n\ngetName <- function(p) {\n    p$name\n}\n\ngetAge <- function(p) {\n    p$age\n}\n\ntoNull <- function(x) {\n    return(x)\n}\n\nfindPerson <- function(name, people) {\n    for (p in people) {\n        if (p$name == name) return(p)\n    }\n    return(NULL)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/optional-records-r/main.loc",
    "content": "module main\n  ( testMakePerson\n  , testOptionalField\n  , testFindPerson\n  , testFindPersonMissing\n  )\n\ntype R => Int = \"integer\"\ntype R => Str = \"character\"\ntype R => List a = \"list\" a\n\nrecord Person where\n  name :: Str\n  age :: ?Int\nrecord R => Person = \"list\"\n\nmakePerson :: Str -> ?Int -> Person\ngetName :: Person -> Str\ngetAge :: Person -> ?Int\ntoNull :: a -> ?a\nfindPerson :: Str -> [Person] -> ?Person\n\nsource R from \"foo.R\" (\"makePerson\", \"getName\", \"getAge\", \"toNull\", \"findPerson\")\n\nalice :: Person\nalice = makePerson \"Alice\" (toNull 30)\n\nbob :: Person\nbob = makePerson \"Bob\" Null\n\ntestMakePerson :: Person\ntestMakePerson = alice\n\ntestOptionalField :: ?Int\ntestOptionalField = getAge bob\n\ntestFindPerson :: ?Person\ntestFindPerson = findPerson \"Alice\" [alice, bob]\n\ntestFindPersonMissing :: ?Person\ntestFindPersonMissing = findPerson \"Charlie\" [alice, bob]\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"alice\"' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-1/exp.txt",
    "content": "\"{\\\"name\\\": \\\"alice\\\", \\\"hobby\\\": [\\\"phishing\\\", \\\"SIM swapping\\\"]}\"\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-1/foo.py",
    "content": "import json\n\ndef foo(name):\n    return dict(name = name, hobby = [\"phishing\", \"SIM swapping\"])\n\n#  packJsonObj   Py :: pack   => \"str\" -> \"dict\"\ndef packJsonObj(json_str):\n    return json.loads(json_str)\n\n#  unpackJsonObj Py :: unpack => \"dict\" -> \"str\"\ndef unpackJsonObj(json_obj):\n    return json.dumps(json_obj)\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-1/main.loc",
    "content": "module main (foo)\n\nsource Py from \"foo.py\" (\"foo\")\n\nfoo :: Str -> JsonObj\n\ntype Py => Str = \"str\"\ntype Py => JsonObj = \"dict\"\n\nclass Packable a b where\n  pack :: a -> b\n  unpack :: b -> a\n\ninstance Packable (Str) JsonObj where\n  source Py from \"foo.py\"\n    ( \"packJsonObj\" as pack\n    , \"unpackJsonObj\" as unpack\n    )\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"alice\"' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-2/exp.txt",
    "content": "\"{\\\"name\\\": \\\"alice\\\", \\\"hobby\\\": [\\\"phishing\\\", \\\"SIM swapping\\\"]}\"\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-2/foo.py",
    "content": "def foo(name):\n    return dict(name = name, hobby = [\"phishing\", \"SIM swapping\"])\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-2/lib/json/json.py",
    "content": "import json\n\n#  packJsonObj   Py :: pack   => \"str\" -> \"dict\"\ndef packJsonObj(json_str):\n    return json.loads(json_str)\n\n#  unpackJsonObj Py :: unpack => \"dict\" -> \"str\"\ndef unpackJsonObj(json_obj):\n    return json.dumps(json_obj)\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-2/lib/json/main.loc",
    "content": "module lib.json (JsonObj, Str)\n\ntype Py => Str = \"str\"\ntype Py => JsonObj = \"dict\"\n\nclass Packable a b where\n  pack :: a -> b\n  unpack :: b -> a\n\ninstance Packable (Str) JsonObj where\n  source Py from \"json.py\"\n    ( \"packJsonObj\" as pack\n    , \"unpackJsonObj\" as unpack\n    )\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-2/main.loc",
    "content": "module main (foo)\n\nimport lib.json (JsonObj, Str)\n\nsource Py from \"foo.py\" (\"foo\")\n\nfoo :: Str -> JsonObj\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"Alice\"' > obs.txt 2> obs.err\n\n.PHONY: clean\nclean:\n\trm -rf nexus pools *pdf *rda __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-3/dumby.hpp",
    "content": "#ifndef __MORLOC_DUMBY_HPP__\n#define __MORLOC_DUMBY_HPP__\n\n#include <string>\n#include <functional>\n\n// h :: (a -> b) -> Str -> Real\ntemplate <class F>\ndouble h(F f, std::string x){\n  return 4.2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-3/dumby.py",
    "content": "#  f :: Real -> Real\ndef f(x): \n    return (2 * x)\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-3/exp.txt",
    "content": "4.2\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-3/main.loc",
    "content": "module main (foo)\n\ntype Cpp => Real = \"double\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => (Tuple2 a b) = \"std::tuple<$1,$2>\" a b\n\ntype Py => Real = \"float\"\ntype Py => Str = \"str\"\ntype Py => (Tuple2 a b) = \"tuple\" a b\n\nsource Cpp from \"dumby.hpp\" (\"h\")\nh :: (a -> b) -> Str -> Real\n\nsource Py from \"dumby.py\" (\"f\")\nf :: Real -> Real\n\ng :: (Real -> Real) -> Str -> Real\ng f' = h (\\l -> (\"ladida\", f' l))\n\nfoo :: Str -> Real\nfoo = g f\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 > obs.txt 2> obs.err\n\n.PHONY: clean\nclean:\n\trm -rf nexus pools *pdf *rda __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-4/dumby.hpp",
    "content": "#ifndef __MORLOC_DUMBY_HPP__\n#define __MORLOC_DUMBY_HPP__\n\n// fcpp :: Real -> Real\ndouble fcpp(double x){\n  return (2*x);\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-4/dumby.py",
    "content": "#  fpy :: Real -> Real\ndef fpy(x): \n    return (2 * x)\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-4/exp.txt",
    "content": "[84,84]\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-4/main.loc",
    "content": "-- This fails\nmodule main (foo)\n\ntype Cpp => Real = \"double\"\ntype Cpp => (Tuple2 a b) = \"std::tuple<$1,$2>\" a b\n\ntype Py => Real = \"float\"\ntype Py => (Tuple2 a b) = \"tuple\" a b\n\nsource Cpp from \"dumby.hpp\" (\"fcpp\")\nfcpp :: Real -> Real\n\nsource Py from \"dumby.py\" (\"fpy\")\nfpy :: Real -> Real\n\nfoo x = (fcpp x, fpy x)\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 10 20 > obs.txt 2> obs.err\n\n.PHONY: clean\nclean:\n\trm -rf nexus pools *pdf *rda __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-5/exp.txt",
    "content": "[[\"x\",\"y\"],[10,20]]\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-5/main.loc",
    "content": "module main (foo)\n\ntype Cpp => Map a b = \"std::map<$1,$2>\" a b\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Cpp => Tuple2 a b = \"std::tuple<$1,$2>\" a b\ntype Cpp => Str = \"std::string\"\ntype Cpp => Int = \"int\"\n\ntype Py => Map a b = \"dict\" a b\ntype Py => List a = \"list\" a\ntype Py => Tuple2 a b = \"tuple\" a b\ntype Py => Str = \"str\"\ntype Py => Int = \"int\"\n\nclass Packable a b where\n  pack :: a -> b\n  unpack :: b -> a\n\ninstance Packable ([Str],[val]) (Map Str val) where\n  source Py from \"map.py\"\n    ( \"packMap\" as pack\n    , \"unpackMap\" as unpack\n    )\n\n  source Cpp from \"map.hpp\"\n    ( \"morloc_packMap\" as pack\n    , \"morloc_unpackMap\" as unpack\n    )\n\nsource Cpp from \"map.hpp\" (\"insert\")\ninsert :: Map a b -> a -> b -> Map a b\n\nsource Py from \"map.py\" (\"singleton\")\nsingleton :: Str -> a -> Map Str a\n\nfoo :: Int -> Int -> Map Str Int\nfoo x y = insert (singleton \"x\" x) \"y\" y\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-5/map.hpp",
    "content": "#ifndef __MORLOC_PACKER_TEST_5_HPP__\n#define __MORLOC_PACKER_TEST_5_HPP__\n\n#include <vector>\n#include <map>\n#include <utility>\n#include <cassert>\n\ntemplate <class A, class B>\nstd::map<A,B> morloc_packMap(std::tuple<std::vector<A>,std::vector<B>> items){\n    std::map<A,B> m;\n    std::vector<A> a = std::get<0>(items);\n    std::vector<B> b = std::get<1>(items);\n    assert(a.size() == b.size());\n    for(std::size_t i = 0; i < a.size(); i++){\n        m[a[i]] = b[i];\n    }\n    return m;\n}\n\ntemplate <class A, class B>\nstd::tuple<std::vector<A>,std::vector<B>> morloc_unpackMap(std::map<A,B> m){\n    std::vector<A> a;\n    std::vector<B> b;\n    for (auto tuple : m) {\n        a.push_back(std::get<0>(tuple));\n        b.push_back(std::get<1>(tuple));\n    }\n    return std::make_tuple(a, b);\n}\n\ntemplate <class A, class B>\nstd::map<A,B> insert(std::map<A,B> m, A a, B b){\n  m[a] = b;\n  return m;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/packer-definitions-5/map.py",
    "content": "def packMap(xs):\n    return dict(zip(xs[0], xs[1]))\n\ndef unpackMap(d):\n    return (list(d.keys()), list(d.values()))\n\ndef singleton(k, v):\n    return {k : v}\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/.gitignore",
    "content": "test-data.*\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t# below buffer interop\n\tpython3 make_test_data.py 4065 > test-data.json\n\t./nexus pfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t# above buffer interop\n\tpython3 make_test_data.py 4066 > test-data.json\n\t./nexus pfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t# at mesg size\n\tpython3 make_test_data.py 65536 > test-data.json\n\t./nexus pfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t# at mesg size + 1\n\tpython3 make_test_data.py 65537 > test-data.json\n\t./nexus pfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t# above mesg size\n\tpython3 make_test_data.py 1048577 > test-data.json\n\t./nexus pfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rfoo test-data.json | wc -c 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__ data* test-data*\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/exp.txt",
    "content": "4068\n4068\n4068\n4069\n4069\n4069\n65539\n65539\n65539\n65540\n65540\n65540\n1048580\n1048580\n1048580\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/foo.R",
    "content": "rid <- function(x) x\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/foo.h",
    "content": "template <class A>\nA cid(A x){\n  return x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/foo.py",
    "content": "def pid(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/main.loc",
    "content": "module main (cid, pid, rid, pfoo, rfoo, cfoo)\n\nimport root ((.))\n\nsource Cpp from \"foo.h\" (\"cid\")\nsource Py from \"foo.py\" (\"pid\")\nsource R from \"foo.R\" (\"rid\")\n\n\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\n\ncid :: Str -> Str\npid :: Str -> Str\nrid :: Str -> Str\n\ncfoo :: Str -> Str\ncfoo = cid . pid . cid\n\npfoo :: Str -> Str\npfoo = pid . cid . pid\n\nrfoo :: Str -> Str\nrfoo = rid . pid . rid\n"
  },
  {
    "path": "test-suite/golden-tests/packets-interop/make_test_data.py",
    "content": "import sys\nprint('\"' + \"x\" * int(sys.argv[1]) + '\"')\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/.gitignore",
    "content": "test-data.json\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t# below buffer\n\tpython3 make_test_data.py 4065 > test-data.json\n\t./nexus pid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t# above buffer\n\tpython3 make_test_data.py 4066 > test-data.json\n\t./nexus pid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t# above mesg size\n\tpython3 make_test_data.py 1048577 > test-data.json\n\t./nexus pid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus cid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\t./nexus rid test-data.json | wc -c 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__ data* \n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/exp.txt",
    "content": "4068\n4068\n4068\n4069\n4069\n4069\n1048580\n1048580\n1048580\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/foo.R",
    "content": "rid <- function(x) x\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/foo.h",
    "content": "template <class A>\nA cid(A x){\n  return x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/foo.py",
    "content": "def pid(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/main.loc",
    "content": "module main (cid, pid, rid, pfoo, rfoo, cfoo)\n\nimport root ((.))\n\nsource Cpp from \"foo.h\" (\"cid\")\nsource Py from \"foo.py\" (\"pid\")\nsource R from \"foo.R\" (\"rid\")\n\n\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\n\ncid :: Str -> Str\npid :: Str -> Str\nrid :: Str -> Str\n\ncfoo :: Str -> Str\ncfoo = cid . pid . cid\n\npfoo :: Str -> Str\npfoo = pid . cid . pid\n\nrfoo :: Str -> Str\nrfoo = rid . pid . rid\n"
  },
  {
    "path": "test-suite/golden-tests/packets-large/make_test_data.py",
    "content": "import sys\nprint('\"' + \"x\" * int(sys.argv[1]) + '\"')\n"
  },
  {
    "path": "test-suite/golden-tests/parser-stress/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testPrecedence       > obs.txt 2> obs.err\n\t./nexus testParens          2>> obs.err  >> obs.txt\n\t./nexus testNegatives       2>> obs.err  >> obs.txt\n\t./nexus testNumericLiterals 2>> obs.err  >> obs.txt\n\t./nexus testMixedArith      2>> obs.err  >> obs.txt\n\t./nexus testGetterArith     2>> obs.err  >> obs.txt\n\t./nexus testNestedParens    2>> obs.err  >> obs.txt\n\t./nexus testAssociativity   2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/parser-stress/exp.txt",
    "content": "[true,true,true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true,true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/parser-stress/main.loc",
    "content": "module main\n  ( testPrecedence\n  , testParens\n  , testNegatives\n  , testNumericLiterals\n  , testMixedArith\n  , testGetterArith\n  , testNestedParens\n  , testAssociativity\n  )\n\nimport root\nimport root-py\n\n-- ================================================================\n-- Test 1: Basic operator precedence (no parens)\n-- Fixity from root:\n--   infixl 6 (+) (-)\n--   infixl 7 (*) (//) (%)\n--   infixr 8 (**)\n-- ================================================================\ntestPrecedence :: [Bool]\ntestPrecedence =\n  [ 2 + 3 * 4 == 14              -- * before +\n  , 10 - 2 * 3 == 4              -- * before -\n  , 1 + 2 + 3 == 6               -- left assoc +\n  , 10 - 3 - 2 == 5              -- left assoc -\n  , 2 * 3 + 4 * 5 == 26          -- two *'s, then +\n  , 10 - 2 * 3 + 1 == 5          -- mixed - and + with *\n  , 100 // 10 + 5 == 15          -- // before +\n  , 100 // 10 * 2 == 20          -- // and * same prec, left assoc\n  , 7 % 3 + 1 == 2               -- % before +\n  , 2 + 3 * 4 + 5 == 19          -- sandwich: + * +\n  ]\n\n-- ================================================================\n-- Test 2: Parenthesized expressions override precedence\n-- THIS IS THE CORE TEST for the CParenE/ParenE fix\n-- ================================================================\ntestParens :: [Bool]\ntestParens =\n  [ (2 + 3) * 4 == 20            -- parens force + before *\n  , 2 * (3 + 4) == 14            -- parens on right\n  , (2 + 3) * (4 + 5) == 45      -- both sides\n  , (10 - 2) * 3 == 24           -- parens force - before *\n  , 10 - (2 * 3) == 4            -- parens around natural precedence (no change)\n  , 2 * (3 + 4) != 2 * 3 + 4    -- 14 != 10, parens matter!\n  , (1 + 1) * (1 + 1) == 4       -- simple both-sides\n  , ((1 + 2)) == 3               -- double parens\n  , (((1 + 2) * 3)) == 9         -- triple nesting\n  , 100 // (2 + 3) == 20         -- parens with integer division\n  , (100 // 2) + 3 == 53         -- parens not needed but present\n  , (10 % 3) * 2 == 2            -- parens with modulo\n  , 10 % (3 * 2) == 4            -- parens force * inside %\n  ]\n\n-- ================================================================\n-- Test 3: Negative numbers and unary minus\n-- ================================================================\ntestNegatives :: [Bool]\ntestNegatives =\n  [ -1 + 2 == 1                  -- negative literal\n  , -3 * -2 == 6                 -- two negatives\n  , 5 + -3 == 2                  -- negative on right of +\n  , 5 * -2 == -10                -- negative on right of *\n  , -1 * -1 == 1                 -- double negative\n  , (-1) * (-1) == 1             -- parens around negatives\n  , (0 - 1) * 2 == -2             -- subtraction instead of unary minus on expr\n  , -10 + 5 * 2 == 0             -- precedence with negative: -10 + (5*2)\n  , (-10 + 5) * 2 == -10         -- parens override: (-5) * 2\n  , 0 - 1 == -1                  -- subtraction vs negative\n  ]\n\n-- ================================================================\n-- Test 4: Numeric literal formats\n-- ================================================================\ntestNumericLiterals :: [Bool]\ntestNumericLiterals =\n  [ 0xff == 255                  -- hex lowercase\n  , 0xFF == 255                  -- hex uppercase\n  , 0XFF == 255                  -- hex with capital X\n  , 0x0 == 0                    -- hex zero\n  , 0o77 == 63                   -- octal\n  , 0O77 == 63                   -- octal capital O\n  , 0o10 == 8                    -- octal 10 = decimal 8\n  , 0b1010 == 10                 -- binary\n  , 0B1111 == 15                 -- binary capital B\n  , 0b0 == 0                    -- binary zero\n  , 0b11111111 == 255            -- binary 255\n  , 0xff + 1 == 256              -- hex in arithmetic\n  , 0b1010 * 2 == 20             -- binary in arithmetic\n  , 0o10 + 0b10 == 10            -- octal + binary\n  , (0xff + 1) * 2 == 512        -- parens with hex\n  , 0xff * (0b10 + 1) == 765     -- mixed literal formats with parens\n  ]\n\n-- ================================================================\n-- Test 5: Mixed arithmetic with all operators\n-- ================================================================\ntestMixedArith :: [Bool]\ntestMixedArith =\n  [ 2 + 3 * 4 - 1 == 13                 -- + * - chain\n  , (2 + 3) * (4 - 1) == 15             -- parens both sides\n  , 100 // 10 + 100 % 10 == 10          -- // and % then +\n  , (100 + 50) // 10 == 15              -- parens with //\n  , 10 * 2 + 3 * 4 - 5 == 27            -- multi-term\n  , (10 * 2 + 3) * (4 - 5) == -23       -- complex parens\n  , 2 * 3 + 4 * 5 + 6 * 7 == 68         -- three products summed\n  , (2 * 3 + 4) * (5 + 6 * 7) == 470    -- nested precedence\n  , 1 + 2 * 3 + 4 * 5 + 6 == 33         -- long chain\n  , (1 + 2) * (3 + 4) * (5 + 6) == 231  -- three parenthesized groups\n  ]\n\n-- ================================================================\n-- Test 6: Getters/accessors mixed with arithmetic\n-- ================================================================\ntestGetterArith :: [Bool]\ntestGetterArith =\n  [ (.0 (10, 20)) + 5 == 15             -- getter result in arithmetic\n  , (.1 (10, 20)) * 2 == 40             -- getter then multiply\n  , (.0 (3, 7)) * (.1 (3, 7)) == 21     -- two getters multiplied\n  , (.0 (10, 20)) + (.1 (10, 20)) == 30 -- two getters added\n  , (.0 (2, 3)) * (.1 (2, 3)) + 1 == 7  -- getters with precedence\n  , ((.0 (2, 3)) + (.1 (2, 3))) * 2 == 10  -- parens around getter arithmetic\n  , (.0 (100, 200)) // (.1 (5, 10)) == 10  -- getters with integer division\n  ]\n\n-- ================================================================\n-- Test 7: Deeply nested parenthesization\n-- ================================================================\ntestNestedParens :: [Bool]\ntestNestedParens =\n  [ ((((1 + 2)))) == 3                           -- quad nesting\n  , (((2 + 3)) * ((4 + 5))) == 45                -- deep both sides\n  , ((1 + 2) * (3 + 4)) + ((5 + 6) * (7 + 8)) == 186  -- two complex products\n  , (((1 + 1) + 1) + 1) + 1 == 5                 -- left-nested\n  , 1 + (1 + (1 + (1 + 1))) == 5                 -- right-nested\n  , ((2) * ((3) + (4))) == 14                     -- redundant parens everywhere\n  , (1 + (2 * (3 + (4 * 5)))) == 47              -- alternating + and * nesting\n  , ((1 + 2 * 3) + (4 * 5 + 6)) == 33            -- parens grouping subexpressions\n  ]\n\n-- ================================================================\n-- Test 8: Associativity stress tests\n-- ================================================================\ntestAssociativity :: [Bool]\ntestAssociativity =\n  [ -- Addition is associative, these should all be equal\n    (1 + 2) + 3 == 1 + (2 + 3)\n  , (10 + 20) + 30 == 10 + (20 + 30)\n    -- Subtraction is NOT associative\n  , (10 - 3) - 2 == 5                 -- left assoc: (10-3)-2 = 5\n  , 10 - (3 - 2) == 9                 -- right grouping: 10-(3-2) = 9\n  , (10 - 3) - 2 != 10 - (3 - 2)     -- proves non-associativity\n    -- Multiplication is associative\n  , (2 * 3) * 4 == 2 * (3 * 4)\n    -- Integer division is NOT associative\n  , (100 // 10) // 2 == 5             -- left assoc: (100//10)//2 = 5\n  , 100 // (10 // 2) == 20            -- right grouping: 100//(10//2) = 20\n  , (100 // 10) // 2 != 100 // (10 // 2)  -- proves non-associativity\n    -- Mixed precedence chains\n  , 2 + 3 * 4 == (2 + (3 * 4))       -- natural precedence = explicit right grouping of *\n  , 2 * 3 + 4 == ((2 * 3) + 4)       -- natural precedence = explicit left grouping of *\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/bar/main.hpp",
    "content": "#ifndef __BAR_MAIN_HPP__\n#define __BAR_MAIN_HPP__\n\ndouble add (double x, double y){\n    return x + y;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/bar/main.loc",
    "content": "module bar (*)\n\nsource Cpp from \"main.hpp\" (\"add\")\n\ntype Cpp => Real = \"double\"\n\nadd :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/baz/main.hpp",
    "content": "#ifndef __BAZ_MAIN_HPP__\n#define __BAZ_MAIN_HPP__\n\ndouble mul (double x, double y){\n    return x * y;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/baz/main.loc",
    "content": "module baz (*)\n\nsource Cpp from \"main.hpp\" (\"mul\")\n\ntype Cpp => Real = \"double\"\n\nmul :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-c/main.loc",
    "content": "module main (foo)\n\ntype Cpp => Real = \"double\"\n\nimport bar (add)\nimport baz (mul)\n\nfoo x = add x (mul 2.0 20.0)\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/bar/main.loc",
    "content": "module (*)\n\nsource Py from \"main.py\" (\"add\")\n\ntype Py => Real = \"float\"\n\nadd :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/bar/main.py",
    "content": "def add (x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/baz/main.loc",
    "content": "module (*)\n\nsource Py from \"main.py\" (\"mul\")\n\ntype Py => Real = \"float\"\n\nmul :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/baz/main.py",
    "content": "def mul (x, y):\n    return x * y\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-py/main.loc",
    "content": "module main (foo)\n\ntype Py => Real = \"float\"\n\nimport .bar (add)\nimport .baz (mul)\n\nfoo x = add x (mul 2.0 20.0)\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/bar/main.R",
    "content": "add <- function(x, y){\n    x + y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/bar/main.loc",
    "content": "module bar (*)\n\nsource R from \"main.R\" (\"add\")\n\ntype R => Real = \"numeric\"\n\nadd :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/baz/main.R",
    "content": "mul <- function(x, y){\n    x * y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/baz/main.loc",
    "content": "module baz (*)\n\nsource R from \"main.R\" (\"mul\")\n\ntype R => Real = \"numeric\"\n\nmul :: Real -> Real -> Real\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/path-shadowing-r/main.loc",
    "content": "module main (foo)\n\ntype R => Real = \"numeric\"\n\nimport bar (add)\nimport baz (mul)\n\nfoo x = add x (mul 2.0 20.0)\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-getters/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testr   >  obs.txt\n\t./nexus testpy  2>> obs.err  >> obs.txt\n\t./nexus testcpp 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-getters/exp.txt",
    "content": "[true,true,true,true,true]\n[true,true,true,true,true]\n[true,true,true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-getters/main.loc",
    "content": "module main (testpy, testr, testcpp)\n\nimport root\nimport root-py\nimport root-r\nimport root-cpp\n\n-- module main (foo)\n-- foo = idr ( (==) (map .1 [(1,True), (2,False)]) [True,False] )\n\ntestr = idr\n  [ (==) ((.1 (42, \"Jim\")))\n       \"Jim\"\n  , (==) (.1.(.0,.2) (42, (\"Jim\", True, 69)))\n       (\"Jim\", 69)\n  -- , (==) (map .1 [(1,2), (2,3)]) [2,3]\n  , True -- for now, I'm ignoring this case, there is an issue with list versus vector generation\n  , (==) (.(.1, .0) (1, 2))\n       (2,1)\n  , (==) (.(.1) (1, 2))\n       2\n  ]\n\ntestpy = idpy -- force into python\n  [ (==) (.1 (42, \"Jim\"))\n       \"Jim\"\n  , (==) (.1.(.0,.2) (42, (\"Jim\", True, 69)))\n       (\"Jim\", 69)\n  , (==) (map .1 [(1,2), (2,3)])\n       [2,3]\n  , (==) (.(.1, .0) (1, 2))\n       (2,1)\n  , (==) (.(.1) (1, 2))\n       2\n  ]\n\ntestcpp = idcpp\n  [ (==) (.1 (42, \"Jim\"))\n       \"Jim\"\n  , (==) (.1.(.0,.2) (42, (\"Jim\", True, 69)))\n       (\"Jim\", 69)\n  , (==) (map .1 [(1,2), (2,3)]) [2,3]\n  , (==) (.(.1, .0) (1, 2))\n       (2,1)\n  , (==) (.(.1) (1, 2))\n       2\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/.gitignore",
    "content": "nexus*\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus-py main-py.loc 2> build.err\n\tmorloc make -o nexus-r main-r.loc 2> build.err\n\tmorloc make -o nexus-cpp main-cpp.loc 2> build.err\n\t./nexus-py test > obs.txt 2> obs.err\n\t./nexus-r test 2>> obs.err  >> obs.txt\n\t./nexus-cpp test 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/exp.txt",
    "content": "[true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true]\n[true,true,true,true,true,true,true,true]\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/main-cpp.loc",
    "content": "module testcpp (test)\n\nsource Cpp from \"types.hpp\" (\"location_t\", \"worker_t\")\n\nimport tests\nimport root-cpp\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/main-py.loc",
    "content": "module testpy (test)\n\nimport tests\nimport root-py\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/main-r.loc",
    "content": "module testr (test)\n\nimport tests\nimport root-r\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/tests.loc",
    "content": "module tests (test)\n\nimport root\n\nrecord Location = Location\n  { latitude :: Real\n  , longitude :: Real\n  , altitude :: Real\n  }\nrecord Py => Location = \"dict\"\nrecord R => Location = \"list\"\nrecord Cpp => Location = \"location_t\"\n\nrecord Worker = Worker\n  { home :: Location\n  , age :: Int\n  , job :: (Bool, Location)\n  }\nrecord Py => Worker = \"dict\"\nrecord R => Worker = \"list\"\nrecord Cpp => Worker = \"worker_t\"\n\ndefaultLocation :: Location\ndefaultLocation =\n  { latitude = 42.03\n  , longitude = -93.62\n  , altitude = 942.0\n  } \n\ndefaultWorker :: Worker\ndefaultWorker =\n  { home = defaultLocation\n  , age = 12\n  , job = (True, defaultLocation)\n  }\n\ntest = -- map .(.1 = 9) [(1,2), (2,3)]\n  [\n    -- getters\n    (==) (.(.1 = \"Sally\") (42, \"Jim\"))\n       (42, \"Sally\")\n  , (==) (.1.(.0 = \"Sally\", .2 = 70) (42, (\"Jim\", True, 69)))\n       (42, (\"Sally\", True, 70))\n  , (==) (map .(.1 = 9) [(1,2), (2,3)])\n       [(1,9), (2,9)]\n  , (==) (.latitude defaultLocation)\n       (42.03 :: Real)\n  , (==) (.(.age, .job.1.altitude, .age) defaultWorker)\n       (12, 942.0, 12)\n    -- setters\n  , (==) (.latitude ((.latitude = 0.7) defaultLocation)) 0.7\n  , (==) (.(.latitude, .longitude) (.(.latitude = 0.7, .longitude = -100.0) defaultLocation))\n       ( 0.7, -100.0 )\n  , (==) (   .(.home.altitude,         .job.1.altitude)\n         ( .(.home.altitude = 936.0, .job.1.altitude = 666.0) defaultWorker ))\n       (936.0, 666.0)\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/pattern-setters/types.hpp",
    "content": "#include <utility>\n\nstruct location_t {\n  double latitude;\n  double longitude;\n  double altitude;\n};\n\nstruct worker_t {\n  location_t home;\n  int age;\n  std::tuple<bool, location_t> job;\n};\n"
  },
  {
    "path": "test-suite/golden-tests/poly-list-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testListLiteral > obs.txt 2> obs.err\n\t./nexus testListMempty >> obs.txt 2>> obs.err\n\t./nexus testListAppend >> obs.txt 2>> obs.err\n\t./nexus testListCons >> obs.txt 2>> obs.err\n\t./nexus testListUncons >> obs.txt 2>> obs.err\n\t./nexus testListSnoc >> obs.txt 2>> obs.err\n\t./nexus testListUnsnoc >> obs.txt 2>> obs.err\n\t./nexus testListAt >> obs.txt 2>> obs.err\n\t./nexus testDequeLiteral >> obs.txt 2>> obs.err\n\t./nexus testDequeMempty >> obs.txt 2>> obs.err\n\t./nexus testDequeAppend >> obs.txt 2>> obs.err\n\t./nexus testDequeCons >> obs.txt 2>> obs.err\n\t./nexus testDequeUncons >> obs.txt 2>> obs.err\n\t./nexus testDequeSnoc >> obs.txt 2>> obs.err\n\t./nexus testDequeUnsnoc >> obs.txt 2>> obs.err\n\t./nexus testVectorLiteral >> obs.txt 2>> obs.err\n\t./nexus testVectorAppend >> obs.txt 2>> obs.err\n\t./nexus testVectorCons >> obs.txt 2>> obs.err\n\t./nexus testVectorUncons >> obs.txt 2>> obs.err\n\t./nexus testVectorSnoc >> obs.txt 2>> obs.err\n\t./nexus testVectorUnsnoc >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/poly-list-1/exp.txt",
    "content": "[1,2,3]\n[1,2,3]\n[1,2,3,4]\n[0,1,2,3]\n[1,[2,3]]\n[1,2,3,4]\n[[1,2],3]\n20\n[10,20,30]\n[4,5,6]\n[1,2,3,4]\n[0,1,2,3]\n[1,[2,3]]\n[1,2,3,4]\n[[1,2],3]\n[7,8,9]\n[1,2,3,4]\n[0,1,2,3]\n[1,[2,3]]\n[1,2,3,4]\n[[1,2],3]\n"
  },
  {
    "path": "test-suite/golden-tests/poly-list-1/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <vector>\n#include <deque>\n#include <tuple>\n\n// --- List (std::vector) operations ---\n\ntemplate <typename T>\nstd::vector<T> appendList(std::vector<T> a, std::vector<T> b) {\n    a.insert(a.end(), b.begin(), b.end());\n    return a;\n}\n\ntemplate <typename T>\nT listAt(int i, std::vector<T> xs) {\n    return xs[i];\n}\n\ntemplate <typename T>\nstd::vector<T> listCons(T x, std::vector<T> xs) {\n    xs.insert(xs.begin(), x);\n    return xs;\n}\n\ntemplate <typename T>\nstd::tuple<T, std::vector<T>> listUncons(std::vector<T> xs) {\n    T head = xs.front();\n    std::vector<T> tail(xs.begin() + 1, xs.end());\n    return {head, tail};\n}\n\ntemplate <typename T>\nstd::vector<T> listSnoc(std::vector<T> xs, T x) {\n    xs.push_back(x);\n    return xs;\n}\n\ntemplate <typename T>\nstd::tuple<std::vector<T>, T> listUnsnoc(std::vector<T> xs) {\n    T last = xs.back();\n    xs.pop_back();\n    return {xs, last};\n}\n\n// --- Deque (std::deque) operations ---\n\ntemplate <typename T>\nstd::deque<T> appendDeque(std::deque<T> a, std::deque<T> b) {\n    a.insert(a.end(), b.begin(), b.end());\n    return a;\n}\n\ntemplate <typename T>\nstd::deque<T> dequeCons(T x, std::deque<T> xs) {\n    xs.push_front(x);\n    return xs;\n}\n\ntemplate <typename T>\nstd::tuple<T, std::deque<T>> dequeUncons(std::deque<T> xs) {\n    T head = xs.front();\n    xs.pop_front();\n    return {head, xs};\n}\n\ntemplate <typename T>\nstd::deque<T> dequeSnoc(std::deque<T> xs, T x) {\n    xs.push_back(x);\n    return xs;\n}\n\ntemplate <typename T>\nstd::tuple<std::deque<T>, T> dequeUnsnoc(std::deque<T> xs) {\n    T last = xs.back();\n    xs.pop_back();\n    return {xs, last};\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/poly-list-1/main.loc",
    "content": "module main (testListLiteral, testListMempty, testListAppend,\n             testListCons, testListUncons,\n             testListSnoc, testListUnsnoc,\n             testListAt,\n             testDequeLiteral, testDequeMempty, testDequeAppend,\n             testDequeCons, testDequeUncons,\n             testDequeSnoc, testDequeUnsnoc,\n             testVectorLiteral, testVectorAppend,\n             testVectorCons, testVectorUncons,\n             testVectorSnoc, testVectorUnsnoc)\n\n-- Primitive types (standalone, no root import)\ntype Cpp => Int = \"int\"\n\n-- Container types\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Cpp => Deque a = \"std::deque<$1>\" a\ntype Cpp => Vector a = \"std::vector<$1>\" a\n\n-- Tuple for uncons/unsnoc return types\ntype Cpp => Tuple2 a b = \"std::tuple<$1,$2>\" a b\n\n-- General type definitions\ntype List a\ntype Deque a = List a\ntype Vector a = List a\n\n-- Typeclasses\nclass Semigroup a where\n  append :: a -> a -> a\n\nclass Semigroup a => Monoid a where\n  mempty :: a\n\nclass Indexed f where\n  at :: Int -> f a -> a\n\nclass Stack f where\n  cons :: a -> f a -> f a\n  uncons :: f a -> (a, f a)\n\nclass Queue f where\n  snoc :: f a -> a -> f a\n  unsnoc :: f a -> (f a, a)\n\n-- List instances\ninstance Semigroup (List a) where\n  source Cpp from \"foo.hpp\" (\"appendList\" as append)\n\ninstance Monoid (List a) where\n  mempty = []\n\ninstance Indexed List where\n  source Cpp from \"foo.hpp\" (\"listAt\" as at)\n\ninstance Stack List where\n  source Cpp from \"foo.hpp\" (\"listCons\" as cons)\n  source Cpp from \"foo.hpp\" (\"listUncons\" as uncons)\n\ninstance Queue List where\n  source Cpp from \"foo.hpp\" (\"listSnoc\" as snoc)\n  source Cpp from \"foo.hpp\" (\"listUnsnoc\" as unsnoc)\n\n-- Deque instances (maps to std::deque in C++)\ninstance Semigroup (Deque a) where\n  source Cpp from \"foo.hpp\" (\"appendDeque\" as append)\n\ninstance Monoid (Deque a) where\n  mempty = []\n\ninstance Stack Deque where\n  source Cpp from \"foo.hpp\" (\"dequeCons\" as cons)\n  source Cpp from \"foo.hpp\" (\"dequeUncons\" as uncons)\n\ninstance Queue Deque where\n  source Cpp from \"foo.hpp\" (\"dequeSnoc\" as snoc)\n  source Cpp from \"foo.hpp\" (\"dequeUnsnoc\" as unsnoc)\n\n-- No Vector instances needed (reduces to List in C++)\n\n-- List tests\ntestListLiteral :: [Int]\ntestListLiteral = [1, 2, 3]\n\ntestListMempty :: [Int]\ntestListMempty = append mempty [1, 2, 3]\n\ntestListAppend :: [Int]\ntestListAppend = append [1, 2] [3, 4]\n\ntestListCons :: [Int]\ntestListCons = cons 0 [1, 2, 3]\n\ntestListUncons :: (Int, [Int])\ntestListUncons = uncons [1, 2, 3]\n\ntestListSnoc :: [Int]\ntestListSnoc = snoc [1, 2, 3] 4\n\ntestListUnsnoc :: ([Int], Int)\ntestListUnsnoc = unsnoc [1, 2, 3]\n\ntestListAt :: Int\ntestListAt = at 1 [10, 20, 30]\n\n-- Deque tests (uses Deque-specific instances with std::deque)\ntestDequeLiteral :: Deque Int\ntestDequeLiteral = [10, 20, 30]\n\ntestDequeMempty :: Deque Int\ntestDequeMempty = append mempty [4, 5, 6]\n\ntestDequeAppend :: Deque Int\ntestDequeAppend = append [1, 2] [3, 4]\n\ntestDequeCons :: Deque Int\ntestDequeCons = cons 0 [1, 2, 3]\n\ntestDequeUncons :: (Int, Deque Int)\ntestDequeUncons = uncons [1, 2, 3]\n\ntestDequeSnoc :: Deque Int\ntestDequeSnoc = snoc [1, 2, 3] 4\n\ntestDequeUnsnoc :: (Deque Int, Int)\ntestDequeUnsnoc = unsnoc [1, 2, 3]\n\n-- Vector tests (uses List instances via alias, both map to std::vector)\ntestVectorLiteral :: Vector Int\ntestVectorLiteral = [7, 8, 9]\n\ntestVectorAppend :: Vector Int\ntestVectorAppend = append [1, 2] [3, 4]\n\ntestVectorCons :: Vector Int\ntestVectorCons = cons 0 [1, 2, 3]\n\ntestVectorUncons :: (Int, Vector Int)\ntestVectorUncons = uncons [1, 2, 3]\n\ntestVectorSnoc :: Vector Int\ntestVectorSnoc = snoc [1, 2, 3] 4\n\ntestVectorUnsnoc :: (Vector Int, Int)\ntestVectorUnsnoc = unsnoc [1, 2, 3]\n"
  },
  {
    "path": "test-suite/golden-tests/record-docstrings/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc > /dev/null 2> build.err\n\techo foo > obs.txt 2> obs.err\n\t./nexus foo --alg-config=algconf.json [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo --alg-config=algconf.json -m 0 -n 0 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo -m 0 -n 0 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo -m 5 -n 0 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo -m 5 -n 2 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo -n 2 -m 5 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo -m 5 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus foo [1,2,3] 2>> obs.err  >> obs.txt\n\techo bar 2>> obs.err  >> obs.txt\n\t./nexus bar [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus bar -y a [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus bar -y a -t 5 -m 6 [1,2,3] 2>> obs.err  >> obs.txt\n\t./nexus bar -y f -t 5 -m 6 --alg-conf=algconf.json [1,2,3] 2>> obs.err  >> obs.txt\n\techo travelTime 2>> obs.err  >> obs.txt\n\t./nexus travelTime -w 2 3 4 2>> obs.err  >> obs.txt\n\techo baz 2>> obs.err  >> obs.txt\n\t./nexus baz foo 2>> obs.err  >> obs.txt\n\t./nexus baz \"foo bar\" 2>> obs.err  >> obs.txt\n\techo bif 2>> obs.err  >> obs.txt\n\t./nexus bif 2>> obs.err  >> obs.txt\n\t./nexus bif --clean 2>> obs.err  >> obs.txt\n\t./nexus bif --sys-config sysconf.json --clean 2>> obs.err  >> obs.txt\n\t./nexus bif --sys-config sysconf.json 2>> obs.err  >> obs.txt\n\t./nexus bif --sys-config sysconf.json --no-clean 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/record-docstrings/algconf.json",
    "content": "{\"m\":100,\"n\":1000}\n"
  },
  {
    "path": "test-suite/golden-tests/record-docstrings/exp.txt",
    "content": "foo\n100106\n6\n6\n11\n211\n211\n11\n6\nbar\n110\n107\n6507\n6507\ntravelTime\n24\nbaz\n[\"foo\",\"foo\"]\n[\"foo bar\",\"foo bar\"]\nbif\n\"False\"\n\"True\"\n\"True\"\n\"True\"\n\"False\"\n"
  },
  {
    "path": "test-suite/golden-tests/record-docstrings/foo.py",
    "content": "def fooFun(algconf, xs):\n    return sum(xs) + algconf[\"m\"] + algconf[\"n\"] * 100\n\ndef barFun(sysconf, algconf, yolo, xs):\n    return sum(xs) + len(yolo) + 100 * sysconf[\"numThreads\"] + 1000 * algconf[\"m\"]\n\ndef travelTime(v,n,w,b,d):\n    return w * b * d + 5 * v + n\n\ndef bif(algconf):\n    return str(algconf[\"removeCaches\"])\n"
  },
  {
    "path": "test-suite/golden-tests/record-docstrings/main.loc",
    "content": "-- A module main doing foo\n--\n-- Here is a multi-lined\n-- doc about this thing\n--\n-- author: Weena\n-- email: weena@nowhere.com\n-- github: ...\n-- bugs: ...\n-- website: ...\n-- maintainer: ...\nmodule main (\n  -- name: fun\n  -- desc: A fun group of functions\n  fooFun,\n  barFun,\n  -- name: biffy\n  -- desc: A biffy group of functions\n  baz,\n  travelTime,\n  bif\n  )\n\nimport root-py\n\n--' System configuration\n--' metavar: SYS_CONFIG\n--' unroll: true\n--' arg: --sys-config\nrecord SysConfig where\n  --' tmp directory\n  --' arg: --tmp-dir\n  --' metavar: FILE\n  --' default: \"/tmp\"\n  --' literal: true\n  tmpDir :: Str\n\n  --' number of threads to use\n  --' arg: -t/--threads\n  --' metavar: INT\n  --' default: 1\n  numThreads :: Int\n\n  --' clean out caches before running?\n  --' true:  --clean\n  --' false: --no-clean\n  --' default: false\n  removeCaches :: Bool\n\n\n--' Algorithm configuration\n--' metavar: ALG_CONFIG\n--' unroll: true\n--' arg: --alg-config\nrecord AlgConfig where\n  --' some hyper parameter\n  --' arg: -m\n  --' default: 0\n  m :: Int\n\n  --' some other hyper parameter\n  --' arg: -n/--nosy\n  --' default: 0\n  n :: Int\n\nrecord Py => SysConfig = \"dict\"\nrecord Py => AlgConfig = \"dict\"\n\nbif :: SysConfig -> Str\n\n--' Do some foo stuff\n--'\n--' Lalala more info\n--'\n--' name: foo\nfooFun ::\n    AlgConfig ->\n    --' large vector of integers\n    --' metavar: DATA\n    [Int] ->\n    --' the final metric thingy we wanted\n    --' metavar: RESULT\n    Int\n\n--' Do some bar stuff\n--'\n--' details details details\n--' more details details\n--'\n--' name: bar\nbarFun ::\n    SysConfig ->\n    AlgConfig ->\n    --' the lone parameters\n    --' arg: -y/--yolo\n    --' literal: true\n    --' default: \"yolo\"\n    Str ->\n    --' larger vector of integers\n    --' metavar: DATA\n    [Int] ->\n    --' the final metric thingy we wanted\n    --' metavar: RESULT\n    Int\n\nsource Py from \"foo.py\" (\"fooFun\", \"barFun\", \"travelTime\", \"bif\")\n\n--' baz is the dope\nbaz ::\n  --' this is the\n  --' literal: true\n  Str ->\n  --' thing i like\n  [Str]\nbaz x = [x,x]\n\n\n--' Speed of wind [m/s]\n--' arg: -w/--wind-speed\n--' default: 0\ntype WindSpeed = Real\n\n--' Speed under ideal conditions on flat surface [m/s]\ntype BaseSpeed = Real\n\n--' Final distance traveled in meters\ntype Distance = Real\n\n--' Time in seconds\ntype Seconds = Real\n\n--' Calculate travel time\n--' return: Number of seconds in the air\ntravelTime ::\n  --' add five to the result\n  --' true: --add-five\n  Bool ->\n  --' arg: --nothin\n  --' default: 0\n  Int ->\n  WindSpeed ->\n  BaseSpeed ->\n  Distance ->\n  Seconds\n"
  },
  {
    "path": "test-suite/golden-tests/record-docstrings/sysconf.json",
    "content": "{\"tmpDir\":\"tmp\",\"numThreads\":1,\"removeCaches\":true}\n"
  },
  {
    "path": "test-suite/golden-tests/records-alias/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test 42 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/records-alias/exp.txt",
    "content": "{\"x\":42}\n"
  },
  {
    "path": "test-suite/golden-tests/records-alias/foo.py",
    "content": "def make_bar(n):\n    return {\"x\": n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-alias/lib/main.loc",
    "content": "module lib (Foo)\n\nimport root-py (Int)\n\nrecord Foo where\n  x :: Int\n\nrecord Py => Foo = \"dict\"\n"
  },
  {
    "path": "test-suite/golden-tests/records-alias/main.loc",
    "content": "module main (test)\n\nimport root-py (Int)\nimport lib (Foo as Bar)\n\nsource Py from \"foo.py\" (\"make_bar\" as makeBar)\nmakeBar :: Int -> Bar\n\ntest :: Int -> Bar\ntest = makeBar\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 > obs.txt 2> obs.err\n\t./nexus bar 42 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-1/exp.txt",
    "content": "{\"shitlist\":[[\"campers who play load music\",\"earwigs\",\"tollroads\",\"nextflow\"],[0,2,1,3]],\"things\":[\"spiders\"],\"size\":42}\n[[[\"aa\"],[42]],[\"b\"],42]\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-1/foo.R",
    "content": "addShit <- function(name, level_of_shit, foo){\n    foo$shitlist[[name]] <- level_of_shit\n    foo\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-1/foo.hpp",
    "content": "#pragma once\n\n#include <string>\n#include <vector>\n\nstruct foo_t {\n    std::map<std::string, int> shitlist;\n    std::vector<std::string> things;\n    int size;\n};\n\nfoo_t addShit(std::string name, int level_of_shit, foo_t foo){\n    foo.shitlist.insert({name, level_of_shit});\n    return foo;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-1/foo.py",
    "content": "def addShit(name, level_of_shit, foo):\n    foo[\"shitlist\"][name] = level_of_shit\n    return foo\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-1/main.loc",
    "content": "-- Tests passing records of primitives across languages and accessing fields\n\nmodule main (foo, bar)\n\nimport map-py\nimport map-r\nimport map-cpp\n\nrecord Foo = Foo\n  { shitlist :: Map Str Int\n  , things :: [Str]\n  , size :: Int\n  }\nrecord Cpp => Foo = \"foo_t\"\nrecord Py  => Foo = \"dict\"\nrecord R   => Foo = \"list\"\n\nsource Py from \"foo.py\" (\"addShit\" as addShitPy)\nsource Cpp from \"foo.hpp\" (\"addShit\" as addShitCpp)\nsource R from \"foo.R\" (\"addShit\" as addShitR)\naddShitCpp :: Str -> Int -> Foo -> Foo\naddShitPy  :: Str -> Int -> Foo -> Foo\naddShitR   :: Str -> Int -> Foo -> Foo\n\nemptyMap :: Map Str Int\nemptyMap = pack ([],[])\n\n-- test interop\nfoo size =\n  ( addShitPy  \"nextflow\" 3\n  . addShitCpp \"earwigs\" 2\n  . addShitR   \"tollroads\" 1\n  . addShitPy  \"campers who play load music\" 0\n  ) { shitlist = emptyMap\n    , things = [\"spiders\"]\n    , size = size\n    }\n\n-- test access\nbar size = (.shitlist fpy, .things fcpp, .size fr) where\n    fpy = addShitPy \"aa\" 42\n      { shitlist = emptyMap\n      , things = [\"a\"]\n      , size = size\n      }\n\n    fcpp = addShitPy \"bb\" 42\n      { shitlist = emptyMap\n      , things = [\"b\"]\n      , size = size\n      }\n\n    fr = addShitR \"cc\" 42\n      { shitlist = emptyMap\n      , things = [\"c\"]\n      , size = size\n      }\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 > obs.txt 2> obs.err\n\t./nexus bar 42 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-2/exp.txt",
    "content": "{\"shitlists\":[[\"campers who play load music\",\"earwigs\",\"tollroads\",\"nextflow\"],[[[\"yolo\"],[0]],[[\"yolo\"],[2]],[[\"yolo\"],[1]],[[\"yolo\"],[3]]]],\"things\":[\"spiders\"],\"size\":42}\n[[[\"aa\"],[[[\"yolo\"],[42]]]],[\"b\"],42]\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-2/foo.R",
    "content": "addShit <- function(name, level_of_shit, foo){\n    foo$shitlists[[name]] <- list(yolo = level_of_shit)\n    foo\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-2/foo.hpp",
    "content": "#pragma once\n\n#include <string>\n#include <vector>\n\nstruct foo_t {\n    std::map<std::string, std::map<std::string,int>> shitlists;\n    std::vector<std::string> things;\n    int size;\n};\n\nfoo_t addShit(std::string name, int level_of_shit, foo_t foo){\n    std::map<std::string,int> x;\n    x.insert({\"yolo\", level_of_shit});\n    foo.shitlists.insert({name, x});\n    return foo;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-2/foo.py",
    "content": "def addShit(name, level_of_shit, foo):\n    foo[\"shitlists\"][name] = {\"yolo\" : level_of_shit}\n    return foo\n"
  },
  {
    "path": "test-suite/golden-tests/records-complex-2/main.loc",
    "content": "-- Tests passing records of primitives across languages and accessing fields\n\nmodule main (foo, bar)\n\nimport map-r\nimport map-py\nimport map-cpp\n\nrecord Foo = Foo\n  { shitlists :: Map Str (Map Str Int)\n  , things :: [Str]\n  , size :: Int\n  }\nrecord Cpp => Foo = \"foo_t\"\nrecord Py  => Foo = \"dict\"\nrecord R   => Foo = \"list\"\n\nsource Py from \"foo.py\" (\"addShit\" as addShitPy)\nsource Cpp from \"foo.hpp\" (\"addShit\" as addShitCpp)\nsource R from \"foo.R\" (\"addShit\" as addShitR)\naddShitCpp :: Str -> Int -> Foo -> Foo\naddShitPy  :: Str -> Int -> Foo -> Foo\naddShitR   :: Str -> Int -> Foo -> Foo\n\nemptyMap :: Map Str (Map Str Int)\nemptyMap = pack ([],[])\n\n-- test interop\nfoo size =\n  ( addShitPy  \"nextflow\" 3\n  . addShitCpp \"earwigs\" 2\n  . addShitR   \"tollroads\" 1\n  . addShitPy  \"campers who play load music\" 0\n  ) { shitlists = emptyMap\n    , things = [\"spiders\"]\n    , size = size\n    }\n\n-- test access\nbar size = (.shitlists fpy, .things fcpp, .size fr) where\n    fpy = addShitPy \"aa\" 42\n      { shitlists = emptyMap\n      , things = [\"a\"]\n      , size = size\n      }\n\n    fcpp = addShitPy \"bb\" 42\n      { shitlists = emptyMap\n      , things = [\"b\"]\n      , size = size\n      }\n\n    fr = addShitR \"cc\" 42\n      { shitlists = emptyMap\n      , things = [\"c\"]\n      , size = size\n      }\n"
  },
  {
    "path": "test-suite/golden-tests/records-nested/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 > obs.txt 2> obs.err\n\t./nexus bar 42 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/records-nested/exp.txt",
    "content": "{\"bars\":[{\"things\":[],\"size\":0},{\"things\":[],\"size\":1},{\"things\":[],\"size\":2},{\"things\":[],\"size\":3}],\"things\":[\"spiders\"],\"size\":42}\n[[{\"things\":[\"carrot\"],\"size\":1}],[\"b\"],42]\n"
  },
  {
    "path": "test-suite/golden-tests/records-nested/foo.R",
    "content": "addBar <- function(bar, foo){\n    foo$bars[[length(foo$bars) + 1]] <- bar\n    foo\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-nested/foo.hpp",
    "content": "#pragma once\n\n#include <string>\n#include <vector>\n\nstruct bar_t {\n    std::vector<std::string> things;\n    int size;\n};\n\nstruct foo_t {\n    std::vector<bar_t> bars;\n    std::vector<std::string> things;\n    int size;\n};\n\n\nfoo_t addBar(bar_t bar, foo_t foo){\n    foo.bars.push_back(bar);\n    return foo;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-nested/foo.py",
    "content": "def addBar(bar, foo):\n    foo[\"bars\"].append(bar)\n    return foo\n"
  },
  {
    "path": "test-suite/golden-tests/records-nested/main.loc",
    "content": "-- Tests passing records of primitives across languages and accessing fields\n\nmodule main (foo, bar)\n\nimport root\nimport root-r\nimport root-py\nimport root-cpp\n\nrecord Foo where\n  bars :: [Bar]\n  things :: [Str]\n  size :: Int\n\nrecord Cpp => Foo = \"foo_t\"\nrecord Py  => Foo = \"dict\"\nrecord R   => Foo = \"list\"\n\nrecord Bar where\n  things :: [Str] -- note that record names CAN be reused (not like in Haskell)\n  size :: Int\n\nrecord Cpp => Bar = \"bar_t\"\nrecord Py  => Bar = \"dict\"\nrecord R   => Bar = \"list\"\n\nsource Py from \"foo.py\" (\"addBar\" as addBarPy)\nsource Cpp from \"foo.hpp\" (\"addBar\" as addBarCpp)\nsource R from \"foo.R\" (\"addBar\" as addBarR)\naddBarCpp :: Bar -> Foo -> Foo\naddBarPy  :: Bar -> Foo -> Foo\naddBarR   :: Bar -> Foo -> Foo\n\n\n-- test interop\nfoo size =\n  ( addBarPy  { things = [], size = 3}\n  . addBarCpp { things = [], size = 2}\n  . addBarR   { things = [], size = 1}\n  . addBarPy  { things = [], size = 0}\n  ) { bars = []\n    , things = [\"spiders\"]\n    , size = size\n    }\n\n-- test access\nbar size = (.bars fpy, .things fcpp, .size fr) where\n    fpy = addBarPy { things = [\"carrot\"], size = 1}\n      { bars = []\n      , things = [\"a\"]\n      , size = size\n      }\n\n    fcpp = addBarCpp { things = [\"stick\"], size = 2}\n      { bars = []\n      , things = [\"b\"]\n      , size = size\n      }\n\n    fr = addBarR { things = [\"winch\"], size = 3}\n      { bars = []\n      , things = [\"c\"]\n      , size = size\n      }\n"
  },
  {
    "path": "test-suite/golden-tests/records-primitive/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 > obs.txt 2> obs.err\n\t./nexus bar 42 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/records-primitive/exp.txt",
    "content": "{\"flooz\":[0,1,2,3],\"things\":[\"spiders\"],\"size\":42}\n[[42],42]\n"
  },
  {
    "path": "test-suite/golden-tests/records-primitive/foo.R",
    "content": "addFlooz <- function(flooz, foo){\n    foo$flooz <- append(foo$flooz, flooz)\n    foo\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-primitive/foo.hpp",
    "content": "#pragma once\n\n#include <string>\n#include <vector>\n\nstruct foo_t {\n    std::vector<int> flooz;\n    std::vector<std::string> things;\n    int size;\n};\n\nfoo_t addFlooz(int flooz, foo_t foo){\n    foo.flooz.push_back(flooz);\n    return foo;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/records-primitive/foo.py",
    "content": "def addFlooz(flooz, foo):\n    foo[\"flooz\"].append(flooz)\n    return foo\n"
  },
  {
    "path": "test-suite/golden-tests/records-primitive/main.loc",
    "content": "-- Tests passing records of primitives across languages and accessing fields\n\nmodule main (foo, bar)\n\nimport root ((.))\nimport root-py  (Str, Int, Tuple3, List)\nimport root-cpp (Str, Int, Tuple3, List)\nimport root-r   (Str, Int, Tuple3, List)\n\nrecord Foo where\n  flooz :: [Int]\n  things :: [Str]\n  size :: Int\nrecord Cpp => Foo = \"foo_t\"\nrecord R   => Foo = \"list\"\nrecord Py  => Foo = \"dict\"\n\nsource Py from \"foo.py\" (\"addFlooz\" as addFloozPy)\nsource Cpp from \"foo.hpp\" (\"addFlooz\" as addFloozCpp)\nsource R from \"foo.R\" (\"addFlooz\" as addFloozR)\naddFloozCpp :: Int -> Foo -> Foo\naddFloozPy  :: Int -> Foo -> Foo\naddFloozR   :: Int -> Foo -> Foo\n\n-- test interop\nfoo size =\n  ( addFloozPy  3\n  . addFloozCpp 2\n  . addFloozR   1\n  . addFloozPy  0\n  ) { flooz = []\n    , things = [\"spiders\"]\n    , size = size\n    }\n\n-- test access\nbar size = (.flooz fpy, .size fr) where\n    fpy = addFloozPy 42\n      { flooz = []\n      , things = [\"a\"]\n      , size = size\n      }\n\n    fr = addFloozR 42\n      { flooz = []\n      , things = [\"c\"]\n      , size = size\n      }\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-py-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-py-cpp/cpp_helpers.hpp",
    "content": "int cpp_sub(int a, int b) {\n    return a - b;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-py-cpp/exp.txt",
    "content": "1\n1\n120\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-py-cpp/main.loc",
    "content": "module main (fact)\n\nimport root-py\nimport root-cpp\n\nsource Py from \"py_helpers.py\" (\"py_mul\" as mul)\nmul :: Int -> Int -> Int\n\nsource Cpp from \"cpp_helpers.hpp\" (\"cpp_sub\" as sub)\nsub :: Int -> Int -> Int\n\nfact :: Int -> Int\nfact n\n  ? n == 0 = 1\n  : mul n (fact (sub n 1))\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-py-cpp/py_helpers.py",
    "content": "def py_mul(a, b):\n    return a * b\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-r-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-r-cpp/cpp_helpers.hpp",
    "content": "int cpp_sub(int a, int b) {\n    return a - b;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-r-cpp/exp.txt",
    "content": "1\n1\n120\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-r-cpp/main.loc",
    "content": "module main (fact)\n\nimport root-r\nimport root-cpp\n\nsource R from \"r_helpers.R\" (\"r_mul\" as mul)\nmul :: Int -> Int -> Int\n\nsource Cpp from \"cpp_helpers.hpp\" (\"cpp_sub\" as sub)\nsub :: Int -> Int -> Int\n\nfact :: Int -> Int\nfact n\n  ? n == 0 = 1\n  : mul n (fact (sub n 1))\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-cross-r-cpp/r_helpers.R",
    "content": "r_mul <- function(a, b) {\n    a * b\n}\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\t./nexus fact 10 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-cpp/exp.txt",
    "content": "1\n1\n120\n3628800\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-cpp/main.loc",
    "content": "module main (fact)\n\nimport root-cpp\n\nfact :: Int -> Int\nfact n\n  ? n == 0 = 1\n  : n * fact (n - 1)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\t./nexus fact 10 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-py/exp.txt",
    "content": "1\n1\n120\n3628800\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-py/main.loc",
    "content": "module main (fact)\n\nimport root-py\n\nfact :: Int -> Int\nfact n\n  ? n == 0 = 1\n  : n * fact (n - 1)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\t./nexus fact 10 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-r/exp.txt",
    "content": "1\n1\n120\n3628800\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-direct-r/main.loc",
    "content": "module main (fact)\n\nimport root-r\n\nfact :: Int -> Int\nfact n\n  ? n == 0 = 1\n  : n * fact (n - 1)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-helper-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\t./nexus fact 10 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-helper-cpp/exp.txt",
    "content": "1\n1\n120\n3628800\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-helper-cpp/main.loc",
    "content": "module main (fact)\n\nimport root-cpp\n\n-- exported wrapper delegates to non-exported recursive helper\nfact :: Int -> Int\nfact n = factHelper n 1\n\nfactHelper :: Int -> Int -> Int\nfactHelper n acc\n  ? n == 0 = acc\n  : factHelper (n - 1) (acc * n)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-helper-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus fact 0 > obs.txt 2> obs.err\n\t./nexus fact 1 >> obs.txt 2>> obs.err\n\t./nexus fact 5 >> obs.txt 2>> obs.err\n\t./nexus fact 10 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-helper-py/exp.txt",
    "content": "1\n1\n120\n3628800\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-helper-py/main.loc",
    "content": "module main (fact)\n\nimport root-py\n\n-- exported wrapper delegates to non-exported recursive helper\nfact :: Int -> Int\nfact n = factHelper n 1\n\nfactHelper :: Int -> Int -> Int\nfactHelper n acc\n  ? n == 0 = acc\n  : factHelper (n - 1) (acc * n)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus isEven 0 > obs.txt 2> obs.err\n\t./nexus isEven 1 >> obs.txt 2>> obs.err\n\t./nexus isEven 4 >> obs.txt 2>> obs.err\n\t./nexus isOdd 0 >> obs.txt 2>> obs.err\n\t./nexus isOdd 3 >> obs.txt 2>> obs.err\n\t./nexus isOdd 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-cpp/exp.txt",
    "content": "true\nfalse\ntrue\nfalse\ntrue\nfalse\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-cpp/main.loc",
    "content": "module main (isEven, isOdd)\n\nimport root-cpp\n\nisEven :: Int -> Bool\nisEven n\n  ? n == 0 = True\n  : isOdd (n - 1)\n\nisOdd :: Int -> Bool\nisOdd n\n  ? n == 0 = False\n  : isEven (n - 1)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus isEven 0 > obs.txt 2> obs.err\n\t./nexus isEven 1 >> obs.txt 2>> obs.err\n\t./nexus isEven 4 >> obs.txt 2>> obs.err\n\t./nexus isOdd 0 >> obs.txt 2>> obs.err\n\t./nexus isOdd 3 >> obs.txt 2>> obs.err\n\t./nexus isOdd 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-py/exp.txt",
    "content": "true\nfalse\ntrue\nfalse\ntrue\nfalse\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-py/main.loc",
    "content": "module main (isEven, isOdd)\n\nimport root-py\n\nisEven :: Int -> Bool\nisEven n\n  ? n == 0 = True\n  : isOdd (n - 1)\n\nisOdd :: Int -> Bool\nisOdd n\n  ? n == 0 = False\n  : isEven (n - 1)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus isEven 0 > obs.txt 2> obs.err\n\t./nexus isEven 1 >> obs.txt 2>> obs.err\n\t./nexus isEven 4 >> obs.txt 2>> obs.err\n\t./nexus isOdd 0 >> obs.txt 2>> obs.err\n\t./nexus isOdd 3 >> obs.txt 2>> obs.err\n\t./nexus isOdd 4 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-r/exp.txt",
    "content": "true\nfalse\ntrue\nfalse\ntrue\nfalse\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-mutual-r/main.loc",
    "content": "module main (isEven, isOdd)\n\nimport root-r\n\nisEven :: Int -> Bool\nisEven n\n  ? n == 0 = True\n  : isOdd (n - 1)\n\nisOdd :: Int -> Bool\nisOdd n\n  ? n == 0 = False\n  : isEven (n - 1)\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-helper-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus sumDown 0 > obs.txt 2> obs.err\n\t./nexus sumDown 1 >> obs.txt 2>> obs.err\n\t./nexus sumDown 3 >> obs.txt 2>> obs.err\n\t./nexus sumDown 5 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-helper-cpp/exp.txt",
    "content": "0\n2\n12\n30\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-helper-cpp/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\nint double_val(int x) {\n    return x * 2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-helper-cpp/main.loc",
    "content": "-- Test non-exported recursive helper returning an effect type.\n-- sumDown n = 0 + 1 + 2 + ... + n (each term doubled by sourced fn)\n\nmodule main (sumDown)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"double_val\")\n\ntype Cpp => Int = \"int\"\n\ndouble_val :: Int -> <IO> Int\n\n-- Exported wrapper delegates to non-exported recursive helper\nsumDown :: Int -> <IO> Int\nsumDown n = helper n\n\n-- Non-exported recursive helper returning <IO> Int\nhelper :: Int -> <IO> Int\nhelper n\n  ? n <= 0 = double_val 0\n  : do\n      x <- double_val n\n      rest <- helper (n - 1)\n      x + rest\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus sumDown 0 > obs.txt 2> obs.err\n\t./nexus sumDown 1 >> obs.txt 2>> obs.err\n\t./nexus sumDown 3 >> obs.txt 2>> obs.err\n\t./nexus sumDown 5 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-py/exp.txt",
    "content": "0\n2\n12\n30\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-py/foo.py",
    "content": "def double(x):\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/recursion-thunk-py/main.loc",
    "content": "-- Test exported recursive function returning an effect type.\n-- sumDown n = 0 + 1 + 2 + ... + n (each term doubled by sourced fn)\n\nmodule main (sumDown)\n\nimport root-py\n\nsource Py from \"foo.py\" (\"double\")\n\ndouble :: Int -> <IO> Int\n\nsumDown :: Int -> <IO> Int\nsumDown n\n  ? n <= 0 = double 0\n  : do\n      x <- double n\n      rest <- sumDown (n - 1)\n      x + rest\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-1/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-1/w.loc",
    "content": "module main (f)\n\nk = 42\n\n-- a function can use terms from the global scope\nf = k\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-10/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus high.loc 2> build.err\n\t./nexus foo [99,2] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-10/exp.txt",
    "content": "99\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-10/high.loc",
    "content": "module high (foo)\n\nimport low\n\nsource Py from \"high.py\" (\"fst\", \"snd\")\n\ntype Py => Int = \"int\"\ntype Py => Tuple2 a b = \"tuple\" a b\n\nfoo :: (Int, Int) -> Int\nfoo = uncurry const\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-10/high.py",
    "content": "def fst(x):\n    return x[0]\n\ndef snd(x):\n    return x[1]\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-10/low.loc",
    "content": "module low (*)\n\nfst :: (a, b) -> a\nsnd :: (a, b) -> b\n\nconst :: a -> b -> a\nconst a b = a\n\nuncurry :: (a -> b -> c) -> (a, b) -> c\nuncurry fabc ab = fabc (fst ab) (snd ab) \n"
  },
  {
    "path": "test-suite/golden-tests/scoping-11/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-11/exp.txt",
    "content": "\"hellosh\"\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-11/main.loc",
    "content": "module second (foo)\nfoo = \"hellosh\"  \n\n\nmodule main (*)\nimport second\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-12/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-12/exp.txt",
    "content": "4\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-12/foo.py",
    "content": "def morloc_add (x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-12/main.loc",
    "content": "-- base module defines a general typeclass and function using it\nmodule foo (*)\nclass Addable a where\n    add :: a -> a -> a\ninc :: Int -> Int\ninc = add 1\n\n-- language-specific module adds an implementation\nmodule foo-py (*)\nimport foo \ntype Py => Int = \"int\"\ninstance Addable Int where\n    source Py from \"foo.py\" (\"morloc_add\" as add)\n\n-- 3rd party module uses a function depending on add but does NOT explicitly use add\nmodule main (foo)\nimport foo-py\nfoo x = inc (inc x)\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/core.hpp",
    "content": "#ifndef __MORLOC__CORE_HPP__\n#define __MORLOC__CORE_HPP__\n\n#include <utility>\n\n// fst      :: forall a b . (a, b) -> a ;\ntemplate <class A, class B>\nA morloc_fst(std::tuple<A,B> x){\n    return(std::get<0>(x));\n}\n\ntemplate <class A>\nbool morloc_eq(A x, A y){\n   return x == y;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/exp.txt",
    "content": "true\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/foo/main.loc",
    "content": "module foo (*)\n\nclass Eq a where\n  eq :: a -> a -> Bool\n\nfst :: (a, b) -> a\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/foo/test/bar.py",
    "content": "import sys\n\ndef runTest(xs):\n    if(all(xs)):\n        print(\"success\", file=sys.stderr)\n        return True\n    else:\n        print(\"fail\", file=sys.stderr)\n        return False\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/foo/test/main.loc",
    "content": "module foo.test (test)\n\n-- import everything from foo (functions this module tests)\nimport foo\n\ntype Py => Int = \"int\"\ntype Py => Bool = \"bool\"\ntype Py => List a = \"list\" a\ntype Py => Tuple2 a b = \"tuple\" a b\n\nsource Py from \"bar.py\" (\"runTest\")\nrunTest :: [Bool] -> Bool\n\ntest :: Bool\ntest = runTest\n  [ eq 0 (fst (0,1))\n  , eq 1 (fst (1,0))\n  ]\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/foo-cpp.loc",
    "content": "module foo-cpp (*)\n\nimport foo \n\ntype Cpp => Filename = \"std::string\"\ntype Cpp => Unit = \"mlc::Unit\" -- this is an enum with a single element, NOT `void`, which corresponds to bottom\ntype Cpp => Real = \"double\"\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => Bool = \"bool\"\ntype Cpp => (Map a b) = \"std::map<$1,$2>\" a b\ntype Cpp => (List a) = \"std::vector<$1>\" a\ntype Cpp => (Tuple2 a b) = \"std::tuple<$1,$2>\" a b\ntype Cpp => (Tuple3 a b c) = \"std::tuple<$1,$2,$3>\" a b c\n\nsource Cpp from \"core.hpp\"\n    ( \"morloc_fst\" as fst\n    )\n\ninstance Eq a where\n  source Cpp from \"core.hpp\" (\"morloc_eq\" as eq)\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-13/main.loc",
    "content": "module main (test)\n\nimport foo-cpp\nimport foo.test (test)\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f 42 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-2/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-2/w.loc",
    "content": "module main (f)\n\nm = 20\n\n-- a function's parameters may shadow terms from the global scope\nf :: Int -> Int\nf m = m\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-3/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-3/w.loc",
    "content": "module main (f)\n\n-- a function may use terms from its where-scope\nf = x where\n    x = 42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-4/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-4/w.loc",
    "content": "module main (f)\n\nimport root-py\n\n-- terms in the function's where-scope are unordered and share scope\nf = x where\n    a = 1.0\n    x = a + b\n    b = 41.0\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-5/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-5/w.loc",
    "content": "module main (f)\n\nimport root-py\n\n-- where scopes may be nested\nf = x where\n    x = y where\n        y = 42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-6/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-6/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-6/w.loc",
    "content": "module main (f)\n\nimport root-py\n\n-- where-scopes inherit the scope of their parent\nf = x where\n   x = y where\n       y = a + b\n       a = 1.0\n   b = 41.0\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-7/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-7/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-7/w.loc",
    "content": "module main (f)\n\nimport root-py\n\n-- where-scopes inherit the scope of all their ancestors\nf = x where\n   x = y where\n       y = z where\n           z = c + a + b\n           a = 1.0\n       b = 2.0\n   c = 39.0\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-8/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-8/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-8/w.loc",
    "content": "module main (f)\n\nimport root-py\n\n-- cousins do not interfere with one another\nf = x where\n   x = a + b\n   a = 19.0 + c where\n       c = 1.0\n   b = 1.0 + c where\n       c = 21.0\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-9/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus w.loc 2> build.err\n\t./nexus f > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-9/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/scoping-9/w.loc",
    "content": "module main (f)\n\nimport root-py\n\n-- functions with or without signatures may be defined in the where-scope\nf = bar (bif 2.0) where\n\n   bif x = x + 10.0\n\n   bar :: Real -> Real\n   bar x = x + 30.0\n"
  },
  {
    "path": "test-suite/golden-tests/selection-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 1 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/selection-1/exp.txt",
    "content": "45\n"
  },
  {
    "path": "test-suite/golden-tests/selection-1/main.loc",
    "content": "module main (foo)\n\n-- A function may be imported from multiple modules. The compiler is\n-- responsible for deciding which of the available implementations to use.\n--\n-- In the case below, C++ should be selected for both `add` instances, since\n-- C++ is the faster language.\n\nimport root-py\nimport root-cpp\n\nfoo x y = x + y + 42.0\n"
  },
  {
    "path": "test-suite/golden-tests/selection-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 1 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/selection-2/arithmetic/main.loc",
    "content": "module arithmetic (*)\n\n-- Imports multiple instances and exports all of them. I expect\n-- this will be a very common pattern.\n\nimport root-py (Integral, Numeric)\nimport root-cpp (Integral, Numeric)\nimport root-r (Integral, Numeric)\n"
  },
  {
    "path": "test-suite/golden-tests/selection-2/exp.txt",
    "content": "45\n"
  },
  {
    "path": "test-suite/golden-tests/selection-2/main.loc",
    "content": "module main (foo)\n\n-- This should compile to exactly the same code as selection-1.\n-- The only difference is that it uses an extra module, Arithmetic, to abstract\n-- away the concrete language imports, this is likely to be a common pattern.\n\nimport arithmetic\n\nfoo x y = x + y + 42.0\n"
  },
  {
    "path": "test-suite/golden-tests/selection-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,3,6] [7,7,7] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/selection-3/exp.txt",
    "content": "10.9157800414902\n"
  },
  {
    "path": "test-suite/golden-tests/selection-3/main.loc",
    "content": "module main (foo)\n\ntype R => Real = \"numeric\"\ntype R => List a = \"list\" a\n\n-- NOTE: previously I also included the rms2 function, however, the compiler\n-- does not yet have a way to choose which to use.\nsource R from \"rms.R\" (\"rms1\" as rms, \"add\") \nrms :: [Real] -> Real\nadd :: Real -> Real -> Real\n\nfoo xs ys = add (rms xs) (rms ys)\n"
  },
  {
    "path": "test-suite/golden-tests/selection-3/rms.R",
    "content": "rms1 <- function(xs){\n  sqrt ( sum(xs ^ 2) / length(xs) )\n}\n\nrms2 <- function(xs){\n  sqrt (mean (xs ^ 2))\n}\n\nadd <- function(x, y){\n  x + y\n}\n"
  },
  {
    "path": "test-suite/golden-tests/selection-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 1 2 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/selection-4/exp.txt",
    "content": "45\n"
  },
  {
    "path": "test-suite/golden-tests/selection-4/main.loc",
    "content": "-- This should compile to exactly the same code as selection-2. Expect the\n-- Arithmetic module is in the same file.\n\nmodule arithmetic (*)\nimport root (Real, Int)\nimport root-r (Integral)\nimport root-py (Integral)\nimport root-cpp (Integral)\n\nmodule main (foo)\nimport arithmetic (Integral)\nfoo x y = x + y + 42.0\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"Bob\",\"info\":\"charming\"}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-c/exp.txt",
    "content": "{\"name\":\"Bob\",\"info\":\"charming\"}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp (idcpp, Str)\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord cpp => (Person a) = \"struct\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Str -> Person Str\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"Bob\",\"info\":\"charming\"}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-py/exp.txt",
    "content": "{\"name\":\"Bob\",\"info\":\"charming\"}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord py => (Person a) = \"dict\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Str -> Person Str\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"Bob\",\"info\":\"charming\"}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-r/exp.txt",
    "content": "{\"name\":\"Bob\",\"info\":\"charming\"}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-10-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord R => (Person a) = \"list\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Str -> Person Str\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":[[\"a\",\"b\"],[1,2]]}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-c/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":[[\"a\",\"b\"],[1,2]]}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-c/main.loc",
    "content": "module main (foo)\n\nimport map-cpp (idcpp, Map, Str, Int, Tuple2, List)\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord cpp => (Person a) = \"struct\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person (Map Str Int) -> Person (Map Str Int)\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":[[\"a\",\"b\"],[1,2]]}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-py/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":[[\"a\",\"b\"],[1,2]]}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-py/main.loc",
    "content": "module main (foo)\n\nimport map-py\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord py => (Person a) = \"dict\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person (Map Str Int) -> Person (Map Str Int)\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":[[\"a\",\"b\"],[1,2]]}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-r/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":[[\"a\",\"b\"],[1,2]]}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-11-r/main.loc",
    "content": "module main (foo)\n\nimport map-r\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord R => (Person a) = \"list\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person (Map Str Int) -> Person (Map Str Int)\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":{\"name\":\"bob\",\"info\":42}}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-c/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":{\"name\":\"bob\",\"info\":42}}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp (idcpp, Int, Str)\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord cpp => (Person a) = \"struct\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person (Person Int) -> Person (Person Int)\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":{\"name\":\"bob\",\"info\":42}}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-py/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":{\"name\":\"bob\",\"info\":42}}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord py => (Person a) = \"dict\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person (Person Int) -> Person (Person Int)\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":\"alice\",\"info\":{\"name\":\"bob\",\"info\":42}}' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-r/exp.txt",
    "content": "{\"name\":\"alice\",\"info\":{\"name\":\"bob\",\"info\":42}}\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-12-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord R => (Person a) = \"list\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person (Person Int) -> Person (Person Int)\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"b\",\"a\"],[55,42]]' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-c/exp.txt",
    "content": "[[\"a\",\"b\"],[42,55]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-c/main.loc",
    "content": "module main (foo)\n\nimport map-cpp\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str Int -> Map Str Int\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"b\",\"a\"],[55,42]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-py/exp.txt",
    "content": "[[\"b\",\"a\"],[55,42]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-py/main.loc",
    "content": "module main (foo)\n\nimport map-py\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str Int -> Map Str Int\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"b\",\"a\"],[55,42]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-r/exp.txt",
    "content": "[[\"b\",\"a\"],[55,42]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-2-r/main.loc",
    "content": "module main (foo)\n\nimport map-r\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str Int -> Map Str Int\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"b\",55],[\"a\",42]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-c/exp.txt",
    "content": "[[\"b\",55],[\"a\",42]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp (idcpp, Int, Str, List, Tuple2)\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo     :: [(Str, Int)] -> [(Str, Int)]\nfoo     :: [(Str, Int)] -> [(Str, Int)]\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"b\",55],[\"a\",42]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-py/exp.txt",
    "content": "[[\"b\",55],[\"a\",42]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [(Str, Int)] -> [(Str, Int)]\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"b\",55],[\"a\",42]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-r/exp.txt",
    "content": "[[\"b\",55],[\"a\",42]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-4-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [(Str, Int)] -> [(Str, Int)]\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[[\"a\",\"b\"],[1,5]],[[\"c\",\"d\"],[2,6]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-c/exp.txt",
    "content": "[[[\"a\",\"b\"],[1,5]],[[\"c\",\"d\"],[2,6]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-c/main.loc",
    "content": "module main (foo)\n\nimport map-cpp (idcpp, Map, Int, Str, List, Tuple2)\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [Map Str Int] -> [Map Str Int]\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[[\"a\",\"b\"],[1,5]],[[\"c\",\"d\"],[2,6]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-py/exp.txt",
    "content": "[[[\"a\",\"b\"],[1,5]],[[\"c\",\"d\"],[2,6]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-py/main.loc",
    "content": "module main (foo)\n\nimport map-py\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [Map Str Int] -> [Map Str Int]\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[[\"a\",\"b\"],[1,5]],[[\"c\",\"d\"],[2,6]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-r/exp.txt",
    "content": "[[[\"a\",\"b\"],[1,5]],[[\"c\",\"d\"],[2,6]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-5-r/main.loc",
    "content": "module main (foo)\n\nimport map-r\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [Map Str Int] -> [Map Str Int]\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[{\"name\":\"alice\",\"info\":42},{\"name\":\"bob\",\"info\":43}]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-c/exp.txt",
    "content": "[{\"name\":\"alice\",\"info\":42},{\"name\":\"bob\",\"info\":43}]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp (idcpp, Str, Int, List)\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord Cpp => Person a = \"struct\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [Person Int] -> [Person Int]\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[{\"name\":\"alice\",\"info\":42},{\"name\":\"bob\",\"info\":43}]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-py/exp.txt",
    "content": "[{\"name\":\"alice\",\"info\":42},{\"name\":\"bob\",\"info\":43}]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord Py  => Person a = \"dict\"   {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [Person Int] -> [Person Int]\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[{\"name\":\"alice\",\"info\":42},{\"name\":\"bob\",\"info\":43}]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-r/exp.txt",
    "content": "[{\"name\":\"alice\",\"info\":42},{\"name\":\"bob\",\"info\":43}]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-6-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord R   => Person a = \"list\"   {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: [Person Int] -> [Person Int]\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[[1,2,3],[4,5,6]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-c/exp.txt",
    "content": "[[\"a\",\"b\"],[[1,2,3],[4,5,6]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-c/main.loc",
    "content": "module main (foo)\n\nimport map-cpp (idcpp, Map, Str, List, Int, Tuple2)\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str [Int] -> Map Str [Int]\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[[1,2,3],[4,5,6]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-py/exp.txt",
    "content": "[[\"a\",\"b\"],[[1,2,3],[4,5,6]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-py/main.loc",
    "content": "module main (foo)\n\nimport map-py\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str [Int] -> Map Str [Int]\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[[1,2,3],[4,5,6]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-r/exp.txt",
    "content": "[[\"a\",\"b\"],[[1,2,3],[4,5,6]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-7-r/main.loc",
    "content": "module main (foo)\n\nimport map-r\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str [Int] -> Map Str [Int]\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[[[3,4],[2.48,1.2]],[[1,2],[1.2,2.48]]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-c/exp.txt",
    "content": "[[\"a\",\"b\"],[[[3,4],[2.48,1.2]],[[1,2],[1.2,2.48]]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-c/main.loc",
    "content": "module main (foo)\n\nimport map-cpp (idcpp, Map, Str, Int, Real, Tuple2, List)\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str (Map Int Real) -> Map Str (Map Int Real)\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[[[3,4],[2.48,1.2]],[[1,2],[1.2,2.48]]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-py/exp.txt",
    "content": "[[\"a\",\"b\"],[[[3,4],[2.48,1.2]],[[1,2],[1.2,2.48]]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-py/main.loc",
    "content": "module main (foo)\n\nimport map-py\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str (Map Int Real) -> Map Str (Map Int Real)\n\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[[[\"3\",\"4\"],[2.48,1.2]],[[\"1\",\"2\"],[1.2,2.48]]]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-r/exp.txt",
    "content": "[[\"a\",\"b\"],[[[\"3\",\"4\"],[2.48,1.2]],[[\"1\",\"2\"],[1.2,2.48]]]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-8-r/main.loc",
    "content": "module main (foo)\n\nimport map-r\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str (Map Str Real) -> Map Str (Map Str Real)\n\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[{\"name\":\"alice\",\"info\":2},{\"name\":\"bob\",\"info\":3}]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-c/exp.txt",
    "content": "[[\"a\",\"b\"],[{\"name\":\"alice\",\"info\":2},{\"name\":\"bob\",\"info\":3}]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-c/main.loc",
    "content": "module main (foo)\n\nimport map-cpp (idcpp, Map, Int, Str, List, Tuple2)\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord cpp => (Person a) = \"struct\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str (Person Int) -> Map Str (Person Int)\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[{\"name\":\"alice\",\"info\":2},{\"name\":\"bob\",\"info\":3}]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-py/exp.txt",
    "content": "[[\"a\",\"b\"],[{\"name\":\"alice\",\"info\":2},{\"name\":\"bob\",\"info\":3}]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-py/main.loc",
    "content": "module main (foo)\n\nimport map-py\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord py => (Person a) = \"dict\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str (Person Int) -> Map Str (Person Int)\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[[\"a\",\"b\"],[{\"name\":\"alice\",\"info\":2},{\"name\":\"bob\",\"info\":3}]]' | sed 's/ //g' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-r/exp.txt",
    "content": "[[\"a\",\"b\"],[{\"name\":\"alice\",\"info\":2},{\"name\":\"bob\",\"info\":3}]]\n"
  },
  {
    "path": "test-suite/golden-tests/serial-form-9-r/main.loc",
    "content": "module main (foo)\n\nimport map-r\n\nrecord (Person a) = Person {name :: Str, info :: a}\nrecord R => (Person a) = \"list\" {name :: Str, info :: a}\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Map Str (Person Int) -> Map Str (Person Int)\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/slurm-label-codegen/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 5 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/slurm-label-codegen/bar.py",
    "content": "def bar(x):\n    return x + 1\n"
  },
  {
    "path": "test-suite/golden-tests/slurm-label-codegen/exp.txt",
    "content": "6\n"
  },
  {
    "path": "test-suite/golden-tests/slurm-label-codegen/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nsource Py from \"bar.py\" (\"bar\")\n\nbar :: Int -> Int\n\nfoo :: Int -> Int\nfoo x = heavy:bar x\n"
  },
  {
    "path": "test-suite/golden-tests/slurm-label-codegen/main.yaml",
    "content": "labeled-groups:\n  heavy:\n    cache: false\n    benchmark: false\n"
  },
  {
    "path": "test-suite/golden-tests/source-old-op-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test_add 3 4 > obs.txt 2> obs.err\n\t./nexus test_sub 10 3 >> obs.txt 2>> obs.err\n\t./nexus test_mul 5 6 >> obs.txt 2>> obs.err\n\t./nexus test_neg 42 >> obs.txt 2>> obs.err\n\t./nexus test_expr >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/source-old-op-py/exp.txt",
    "content": "7\n7\n30\n-42\n13\n"
  },
  {
    "path": "test-suite/golden-tests/source-old-op-py/foo.py",
    "content": "def neg(x):\n    return -x\n\ndef abs_val(x):\n    return abs(x)\n"
  },
  {
    "path": "test-suite/golden-tests/source-old-op-py/main.loc",
    "content": "-- Test bare operators in old-style source declarations\nmodule main (test_add, test_sub, test_mul, test_neg, test_expr)\n\ntype Py => Int = \"int\"\n\nclass Arith a where\n  neg :: a -> a\n  (+) :: a -> a -> a\n  (-) :: a -> a -> a\n  (*) :: a -> a -> a\n\ninfixl 6 +, -\ninfixl 7 *\n\n-- Test: bare operators (+), (-), (*) and op-to-name alias (+) as add_fn\ninstance Arith Int where\n  %inline source Py from \"foo.py\"\n    ( \"neg\" as neg\n    , (+), (-), (*)\n    )\n\ntest_add :: Int -> Int -> Int\ntest_add x y = x + y\n\ntest_sub :: Int -> Int -> Int\ntest_sub x y = x - y\n\ntest_mul :: Int -> Int -> Int\ntest_mul x y = x * y\n\ntest_neg :: Int -> Int\ntest_neg x = neg x\n\n-- (3 + 4) * 2 - 1 = 13\ntest_expr :: Int\ntest_expr = (3 + 4) * 2 - 1\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-c/exp.txt",
    "content": "[1,2,3,10,12,14,16,18]\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-c/foo.hpp",
    "content": "#include <stack>\n#include <list>\n#include <forward_list>\n#include <deque>\n#include <queue>\n#include <vector>\n#include <algorithm>\n\n\n// Stack to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::stack<T>& xs) {\n    std::vector<T> result;\n    std::stack<T> temp = xs;  // Create a temporary stack to preserve the original\n    result.reserve(temp.size());\n    while (!temp.empty()) {\n        result.push_back(temp.top());\n        temp.pop();\n    }\n    std::reverse(result.begin(), result.end());  // Reverse to maintain original order\n    return result;\n}\n\n// List to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::list<T>& xs) {\n    return std::vector<T>(xs.begin(), xs.end());\n}\n\n// Forward List to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::forward_list<T>& xs) {\n    return std::vector<T>(xs.begin(), xs.end());\n}\n\n// Deque to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::deque<T>& xs) {\n    return std::vector<T>(xs.begin(), xs.end());\n}\n\n// Queue to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(std::queue<T> xs) {  // Note: pass by value to allow modification\n    std::vector<T> result;\n    result.reserve(xs.size());\n    while (!xs.empty()) {\n        result.push_back(std::move(xs.front()));\n        xs.pop();\n    }\n    return result;\n}\n\n\n\n\ntemplate <typename T>\nstd::stack<T> vectorToStack(const std::vector<T>& xs) {\n    return std::stack<T>(std::deque<T>(xs.begin(), xs.end()));\n}\n\ntemplate <typename T>\nstd::list<T> vectorToList(const std::vector<T>& xs) {\n    return std::list<T>(xs.begin(), xs.end());\n}\n\ntemplate <typename T>\nstd::forward_list<T> vectorToForwardList(const std::vector<T>& xs) {\n    return std::forward_list<T>(xs.begin(), xs.end());\n}\n\ntemplate <typename T>\nstd::deque<T> vectorToDeque(const std::vector<T>& xs) {\n    return std::deque<T>(xs.begin(), xs.end());\n}\n\ntemplate <typename T>\nstd::queue<T> vectorToQueue(const std::vector<T>& xs) {\n    return std::queue<T>(std::deque<T>(xs.begin(), xs.end()));\n}\n\n\ntemplate <typename T>\nT& id(const T& x) {\n    return x;\n}\n\n// Vector\ntemplate <typename T>\nstd::vector<T> append(T element, std::vector<T> container) {\n    container.push_back(element);\n    return container;\n}\n\n// List\ntemplate <typename T>\nstd::list<T> append(T element, std::list<T> container) {\n    container.push_back(element);\n    return container;\n}\n\n// Forward List - a horribly dodgy thing to do in a forward list\ntemplate <typename T>\nstd::forward_list<T> append(T element, std::forward_list<T> container) {\n    if (container.empty()) {\n        container.push_front(std::move(element));\n    } else {\n        auto it = container.before_begin();\n        auto end = container.end();\n        while (std::next(it) != end) {\n            ++it;\n        }\n        container.insert_after(it, std::move(element));\n    }\n    return container;\n}\n\n// Deque\ntemplate <typename T>\nstd::deque<T> append(T element, std::deque<T> container) {\n    container.push_back(element);\n    return container;\n}\n\n// Stack\ntemplate <typename T>\nstd::stack<T> append(T element, std::stack<T> container) {\n    container.push(element);\n    return container;\n}\n\n// Queue\ntemplate <typename T>\nstd::queue<T> append(T element, std::queue<T> container) {\n    container.push(element);\n    return container;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp ((.), Int, List)\n\n\n-- aliases at the general level\ntype Stack'       a = List a\ntype List'        a = List a\ntype ForwardList' a = List a\ntype Deque'       a = List a\ntype Queue'       a = List a\ntype Vector'      a = List a\n\n\n-- define a C++ specialization for each type\ntype Cpp => Stack' a = \"std::stack<$1>\" a\ntype Cpp => List' a = \"std::list<$1>\" a\ntype Cpp => ForwardList' a = \"std::forward_list<$1>\" a\ntype Cpp => Deque' a = \"std::deque<$1>\" a\ntype Cpp => Queue' a = \"std::queue<$1>\" a\n\n\n-- a typeclass for orderd containers\nclass Listlike f where\n    -- add a new element to the end of a list\n    append :: a -> f a -> f a\n\n    -- convert to the canonical list type\n    toList :: f a -> List a\n\n\n-- define instances for each data structure, `append` and `toVector` have overloaded C++ definitions\ninstance Listlike Stack' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike List' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike ForwardList' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike Deque' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike Queue' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike Vector' where\n    source Cpp from \"foo.hpp\" (\"append\", \"id\" as toList)\n\n\nsource Cpp from \"foo.hpp\"\n    ( \"vectorToDeque\"\n    , \"vectorToForwardList\"\n    , \"vectorToList\"\n    , \"vectorToQueue\"\n    , \"vectorToStack\"\n    )\n\nvectorToDeque :: Vector' a -> Deque' a\nvectorToForwardList :: Vector' a -> ForwardList' a\nvectorToList :: Vector' a -> List' a\nvectorToQueue :: Vector' a -> Queue' a\nvectorToStack :: Vector' a -> Stack' a\n\nfoo = toList\n    . append 18\n    . vectorToList\n    . toList\n    . append 16\n    . vectorToQueue\n    . toList\n    . append 14\n    . vectorToDeque\n    . toList\n    . append 12\n    . vectorToStack\n    . toList\n    . append 10\n    . vectorToForwardList\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo [1,2,3] > obs.txt 2> obs.err\n\t./nexus square [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-py/exp.txt",
    "content": "[1,4,9]\n[1,4,9]\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-py/foo.R",
    "content": "rid <- function(x) x\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-py/foo.py",
    "content": "import numpy as np\nfrom typing import List, Union, TypeVar\n\nT = TypeVar('T', int, float, bool)\n\ndef numpy2list(xs: np.ndarray) -> List[Union[int, float, bool]]:\n    \"\"\"\n    Convert numpy array xs to a normal python list\n    \"\"\"\n    return xs.tolist()\n\ndef list2numpy(xs: List[T]) -> np.ndarray:\n    \"\"\"\n    Convert pure python list xs to numpy array\n    \"\"\"\n    return np.array(xs)\n\ndef square(x: Union[int, float, np.ndarray]) -> Union[int, float, np.ndarray]:\n    \"\"\"\n    Square the input value or array\n    \"\"\"\n    return x ** 2\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-py/main.loc",
    "content": "module main (foo, square)\n\ntype Py => Numpy1D a = \"numpy.ndarray\" a\ntype Py => List a = \"list\" a\ntype Py => Int64 = \"int64\"\n\n-- No, this is not an error.\n\n-- The R \"double\" type is a vector of IEEE 64 bit floats, but it is also how R\n-- stores integers larger than 32 bits. These still cannot truly represent 64 bit\n-- integers because 11 bits used in the exponent. So in reality this is a 53 bit\n-- integer.\ntype R => List Int64 = \"double\" Int64\ntype R => Int64 = \"double\"\n\ntype Numpy1D a = List a\n\nsource Py from \"foo.py\" (\"list2numpy\", \"numpy2list\", \"square\")\nlist2numpy :: List a -> Numpy1D a\nnumpy2list :: Numpy1D a -> List a\nsquare :: Numpy1D Int64 -> Numpy1D Int64\n\nsource R from \"foo.R\" (\"rid\")\nrid :: a -> a\n\nfoo :: List Int64 -> List Int64\nfoo = numpy2list . square . list2numpy\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus length '\"yolo my fomo\"' > obs.txt 2> obs.err\n\t./nexus upper '\"yolo my fomo\"' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-r/exp.txt",
    "content": "12\n\"YOLO MY FOMO\"\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-r/foo.R",
    "content": "upper <- function(raw_vec) {\n  # Create a mask for lowercase ASCII values (a-z)\n  mask <- raw_vec >= as.raw(0x61) & raw_vec <= as.raw(0x7A)\n  \n  # Apply the bitwise XOR operation only to lowercase letters\n  raw_vec[mask] <- as.raw(bitwXor(as.integer(raw_vec[mask]), as.integer(0x20)))\n  \n  raw_vec\n}\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-1-r/main.loc",
    "content": "module main (length, upper)\n\ntype Bytes = Str\n\ntype R => Str = \"character\"\ntype R => Bytes = \"raw\" \ntype R => Int = \"integer\" \n\nsource R from \"foo.R\" (\"length\", \"upper\")\n\nlength :: Bytes -> Int \nupper :: Bytes -> Bytes\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t# g++ -O3 --std=c++17 -ocpp.out pool.cpp -std=c++17 -I/home/z/.morloc/include -I.\n\t./nexus foo [7,8,9] > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-c/exp.txt",
    "content": "[7,8,9,10,12,14,16,18]\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-c/foo.hpp",
    "content": "#include <stack>\n#include <list>\n#include <forward_list>\n#include <deque>\n#include <queue>\n#include <vector>\n#include <algorithm>\n\n\n// Stack to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::stack<T>& xs) {\n    std::vector<T> result;\n    std::stack<T> temp = xs;  // Create a temporary stack to preserve the original\n    result.reserve(temp.size());\n    while (!temp.empty()) {\n        result.push_back(temp.top());\n        temp.pop();\n    }\n    std::reverse(result.begin(), result.end());  // Reverse to maintain original order\n    return result;\n}\n\n// List to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::list<T>& xs) {\n    return std::vector<T>(xs.begin(), xs.end());\n}\n\n// Forward List to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::forward_list<T>& xs) {\n    return std::vector<T>(xs.begin(), xs.end());\n}\n\n// Deque to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(const std::deque<T>& xs) {\n    return std::vector<T>(xs.begin(), xs.end());\n}\n\n// Queue to Vector and back\ntemplate <typename T>\nstd::vector<T> toVector(std::queue<T> xs) {  // Note: pass by value to allow modification\n    std::vector<T> result;\n    result.reserve(xs.size());\n    while (!xs.empty()) {\n        result.push_back(std::move(xs.front()));\n        xs.pop();\n    }\n    return result;\n}\n\n\n\ntemplate <typename T>\nstd::stack<T> vectorToStack(const std::vector<T>& xs) {\n    std::stack<T> result;\n    for (auto it = xs.rbegin(); it != xs.rend(); ++it) {\n        result.push(*it);\n    }\n    return result;\n}\n\ntemplate <typename T>\nstd::list<T> vectorToList(const std::vector<T>& xs) {\n    return std::list<T>(xs.begin(), xs.end());\n}\n\ntemplate <typename T>\nstd::forward_list<T> vectorToForwardList(const std::vector<T>& xs) {\n    return std::forward_list<T>(xs.begin(), xs.end());\n}\n\ntemplate <typename T>\nstd::deque<T> vectorToDeque(const std::vector<T>& xs) {\n    return std::deque<T>(xs.begin(), xs.end());\n}\n\ntemplate <typename T>\nstd::queue<T> vectorToQueue(const std::vector<T>& xs) {\n    return std::queue<T>(std::deque<T>(xs.begin(), xs.end()));\n}\n\n\ntemplate <typename T>\nT id(const T& x) {\n    return x;\n}\n\n// Vector\ntemplate <typename T>\nstd::vector<T> append(T element, std::vector<T> container) {\n    container.push_back(element);\n    return container;\n}\n\n// List\ntemplate <typename T>\nstd::list<T> append(T element, std::list<T> container) {\n    container.push_back(element);\n    return container;\n}\n\n// Deque\ntemplate <typename T>\nstd::deque<T> append(T element, std::deque<T> container) {\n    container.push_back(element);\n    return container;\n}\n\n// Queue - WARNING: O(n)\ntemplate <typename T>\nstd::queue<T> append(T element, std::queue<T> container) {\n    std::queue<T> temp_queue;\n\n    // Copy elements from original queue to temp_queue\n    while (!container.empty()) {\n        temp_queue.push(container.front());\n        container.pop();\n    }\n\n    // Push the new element onto the end of the queue\n    temp_queue.push(element);\n\n    // Copy elements back from temp_queue to original queue\n    while (!temp_queue.empty()) {\n        container.push(temp_queue.front());\n        temp_queue.pop();\n    }\n\n    return container;\n}\n\n// Forward List - WARNING: O(n)\ntemplate <typename T>\nstd::forward_list<T> append(T element, std::forward_list<T> container) {\n    std::forward_list<T> new_container;\n    new_container.push_front(element); // Add the new element to the *front* of the new list.\n\n    // Reverse the new list\n    new_container.reverse();\n\n    // Concatenate the new element with the original list\n    container.reverse();\n    container.splice_after(container.before_begin(), new_container);\n    container.reverse();\n    \n    return container;\n}\n\n// Stack - WARNING: O(n)\ntemplate <typename T>\nstd::stack<T> append(T element, std::stack<T> container) {\n    std::stack<T> temp_stack;\n\n    // Transfer elements from original stack to temp_stack in reverse order\n    while (!container.empty()) {\n        temp_stack.push(container.top());\n        container.pop();\n    }\n\n    // Push the new element onto the now-empty original stack\n    container.push(element);\n\n    // Push the elements back from temp_stack to original stack\n    while (!temp_stack.empty()) {\n        container.push(temp_stack.top());\n        temp_stack.pop();\n    }\n\n    return container;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-c/foo.py",
    "content": "def ident(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-c/main.loc",
    "content": "module main (foo)\n\n-- base types\ntype Cpp => Int = \"int\"\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Cpp => Tuple2 a b = \"std::tuple<$1,$2>\" a b\n\ntype Py => Int = \"int\"\ntype Py => List a = \"list\" a\ntype Py => Tuple2 a b = \"tuple\" a b\n\n\n-- aliases at the general level\ntype Stack'       a = List a\ntype List'        a = List a\ntype ForwardList' a = List a\ntype Deque'       a = List a\ntype Queue'       a = List a\ntype Vector'      a = List a\n\n\n-- define a C++ specialization for each type\ntype Cpp => Stack' a = \"std::stack<$1>\" a\ntype Cpp => List' a = \"std::list<$1>\" a\ntype Cpp => ForwardList' a = \"std::forward_list<$1>\" a\ntype Cpp => Deque' a = \"std::deque<$1>\" a\ntype Cpp => Queue' a = \"std::queue<$1>\" a\n-- note Vector' does not need a definition, it resolves to List and then \"std::vector\"\n\n\n-- a typeclass for orderd containers\nclass Listlike f where\n    -- add a new element to the end of a list\n    append f a :: a -> f a -> f a\n\n    -- convert to the canonical list type\n    toList f a :: f a -> List a\n\n\n-- define instances for each data structure, `append` and `toVector` have overloaded C++ definitions\ninstance Listlike Stack' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike List' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike ForwardList' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike Deque' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike Queue' where\n    source Cpp from \"foo.hpp\" (\"append\", \"toVector\" as toList)\n\ninstance Listlike List where\n    source Cpp from \"foo.hpp\" (\"append\", \"id\" as toList)\n\n\nsource Cpp from \"foo.hpp\"\n    ( \"vectorToDeque\"\n    , \"vectorToForwardList\"\n    , \"vectorToList\"\n    , \"vectorToQueue\"\n    , \"vectorToStack\"\n    )\n\nsource Py from \"foo.py\" (\"ident\" as id)\nid :: a -> a \n\nvectorToDeque :: List a -> Deque' a\nvectorToForwardList :: List a -> ForwardList' a\nvectorToList :: List a -> List' a\nvectorToQueue :: List a -> Queue' a\nvectorToStack :: List a -> Stack' a\n\nfoo = id\n    . append 18\n    . vectorToList\n    . toList\n    . id\n    . append 16\n    . vectorToQueue\n    . toList\n    . id\n    . append 14\n    . vectorToDeque\n    . toList\n    . id\n    . append 12\n    . vectorToStack\n    . toList\n    . id\n    . append 10\n    . vectorToForwardList\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus dobytes '\"asdf\"' > obs.txt 2> obs.err\n\t./nexus doarray '\"asdf\"' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-py/exp.txt",
    "content": "\"asdf!\"\n\"asdf!\"\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-py/foo.R",
    "content": "rid <- function(x) x\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-py/foo.py",
    "content": "def pid(x):\n    return x\n\ndef changebytes(x):\n    if isinstance(x, bytes):\n        return x + b'!'\n    else:\n        raise TypeError(f\"Expected bytes, found {type(x)!s}\")\n\ndef changebytearray(x):\n    if isinstance(x, bytearray):\n        return x + b'!'\n    else:\n        raise TypeError(f\"Expected bytearray, found {type(x)!s}\")\n"
  },
  {
    "path": "test-suite/golden-tests/specialization-2-py/main.loc",
    "content": "module main (doarray, dobytes)\n\ntype Py => ByteArray = \"bytearray\"\ntype Py => Bytes = \"bytes\"\n\ntype ByteArray = Str\ntype Bytes = Str\n\ntype R => ByteArray = \"raw\"\ntype R => Bytes = \"raw\"\n\nsource Py from \"foo.py\" (\"pid\", \"changebytearray\", \"changebytes\")\npid :: a -> a\nchangebytearray :: ByteArray -> ByteArray\nchangebytes :: Bytes -> Bytes\n\nsource R from \"foo.R\" (\"rid\")\nrid :: a -> a\n\ndoarray :: ByteArray -> ByteArray\ndoarray = pid . changebytearray . rid . pid\n\ndobytes :: Bytes -> Bytes\ndobytes = pid . changebytes . rid . pid\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus pfoo '\"wena\"' 2> obs.txt 2>> obs.err  >> obs.txt\n\t./nexus rfoo '\"wena\"' 2>> obs.txt 2>> obs.err  >> obs.txt\n\t./nexus cfoo '\"wena\"' 2>> obs.txt 2>> obs.err  >> obs.txt\n\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/exp.txt",
    "content": "STDERR Hello wena\nSTDOUT Hello wena\nSTDERR Hello wena \nSTDOUT Hello wena \nSTDERR Hello wena\nSTDOUT Hello wena\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/foo.R",
    "content": "rfoo <- function(name) {\n  cat(\"STDERR Hello\", name, \"\\n\", file = stderr())\n  cat(\"STDOUT Hello\", name, \"\\n\")\n}\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/foo.hpp",
    "content": "#include <iostream>\n#include <string>\n#include \"mlccpptypes/prelude.hpp\"\n\nmlc::Unit cfoo(const std::string& name) {\n    std::cerr << \"STDERR Hello \" << name << std::endl;\n    std::cout << \"STDOUT Hello \" << name << std::endl;\n    return mlc::_Unit;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/foo.py",
    "content": "import sys\n\ndef pfoo(name):\n    print(\"STDERR Hello \" + name, file=sys.stderr)\n    print(\"STDOUT Hello \" + name)\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/main.loc",
    "content": "module main (cfoo, pfoo, rfoo)\n\nimport root-cpp\n\nsource Py from \"foo.py\" (\"pfoo\")\nsource R from \"foo.R\" (\"rfoo\")\nsource Cpp from \"foo.hpp\" (\"cfoo\")\n\npfoo :: Str -> ()\ncfoo :: Str -> ()\nrfoo :: Str -> ()\n"
  },
  {
    "path": "test-suite/golden-tests/stderr-stdout/package.yaml",
    "content": "name: math\nversion: 0.0.0\nhomepage: https://github.com/morloc-project\nsynopsis: test\ndescription: test\ncategory: test\nlicense: MIT\nauthor: \"Zebulun Arendsee\"\nmaintainer: \"z@morloc.io\"\ngithub: https://github.com/morloc-lib/math\nbug-reports: \"/dev/null\"\ndependencies:\n  - \"mlc\"\n\n# -Wall -lm -lgsl -lcblas\n# # cblas is a faster library, but gslcblas comes with gsl, so it more likely to\n# # be installed. I need a system for specifying alternative sets of flags.\n"
  },
  {
    "path": "test-suite/golden-tests/stdout-flush-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- forceOnce ---\" > obs.txt\n\t./nexus forceOnce >> obs.txt 2>> obs.err\n\techo \"--- forceTwice ---\" >> obs.txt\n\t./nexus forceTwice >> obs.txt 2>> obs.err\n\techo \"--- forceShared ---\" >> obs.txt\n\t./nexus forceShared >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/stdout-flush-py/exp.txt",
    "content": "--- forceOnce ---\nEVAL 5\n10\n--- forceTwice ---\nEVAL 5\nEVAL 5\n20\n--- forceShared ---\nEVAL 5\n20\n"
  },
  {
    "path": "test-suite/golden-tests/stdout-flush-py/fun.py",
    "content": "def sideEffect(x):\n    print(\"EVAL \" + str(x))\n    return x * 2\n\ndef add(a, b):\n    return a + b\n"
  },
  {
    "path": "test-suite/golden-tests/stdout-flush-py/main.loc",
    "content": "-- Test that stdout from Python user functions is flushed before pool shutdown.\n-- This is a Python analog of thunk-effects (which uses C++).\n-- Without line-buffered stdout, print() output can be silently lost when\n-- the nexus kills the pool process group.\n\nmodule main (forceOnce, forceTwice, forceShared)\n\nimport root-py\n\nsource Py from \"fun.py\" (\"sideEffect\", \"add\")\n\ntype Py => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\nadd :: Int -> Int -> Int\n\n-- Single evaluation\n-- sideEffect 5 prints \"EVAL 5\", returns 10\nforceOnce :: <IO> Int\nforceOnce = sideEffect 5\n\n-- Two independent evaluations\n-- sideEffect 5 called twice: \"EVAL 5\" appears twice, 10 + 10 = 20\nforceTwice :: <IO> Int\nforceTwice = do\n    x <- sideEffect 5\n    y <- sideEffect 5\n    add x y\n\n-- Shared binding: evaluates once, result shared\n-- sideEffect 5 called once: \"EVAL 5\" appears once, 10 + 10 = 20\nforceShared :: <IO> Int\nforceShared = do\n    x <- sideEffect 5\n    add x x\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/.gitignore",
    "content": "a.txt\nb.txt\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus foop a.json 2>> obs.err  >> obs.txt\n\t./nexus foor a.json 2>> obs.err  >> obs.txt\n\t./nexus fooc a.json 2>> obs.err  >> obs.txt\n\t./nexus foop b.json 2>> obs.err  >> obs.txt\n\t./nexus foor b.json 2>> obs.err  >> obs.txt\n\t./nexus fooc b.json 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/a.json",
    "content": "\"<\\\\,\\\",\\f,\\n,\\r,\\t>\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/b.json",
    "content": "\"你知道得太多了\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/exp.txt",
    "content": "\"<\\\\,\\\",\\f,\\n,\\r,\\t>\\npy: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\\npy: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n\"<\\\\,\\\",\\f,\\n,\\r,\\t>\\nR: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\\nR: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n\"<\\\\,\\\",\\f,\\n,\\r,\\t>\\nc++: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\\nc++: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n\"你知道得太多了\\npy: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\\npy: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n\"你知道得太多了\\nR: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\\nR: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n\"你知道得太多了\\nc++: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\\nc++: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/foo.R",
    "content": "mlc_foo <- function(x){\n  paste(x, \"R: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\", sep=\"\\n\")\n}\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\ntemplate <class A>\nstd::string mlc_foo(A x){\n  A y = x + \"\\n\" + \"c++: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\";\n  return y;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/foo.py",
    "content": "def mlc_foo(x):\n    y = x + \"\\n\" + \"py: <\\\\,\\\",\\f,\\n,\\r,\\t,草泥马>\"\n    return y\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding/main.loc",
    "content": "module main (foor, fooc, foop)\n\nimport root ((.))\n\nsource Py from \"foo.py\" (\"mlc_foo\" as pfoo)\nsource Cpp from \"foo.hpp\" (\"mlc_foo\" as cfoo)\nsource R from \"foo.R\" (\"mlc_foo\" as rfoo)\n\npfoo :: a -> a\ncfoo :: a -> a\nrfoo :: a -> a\n\ntype Py => Str = \"str\"\ntype Cpp => Str = \"std::string\"\ntype R => Str = \"character\"\n\nfoor :: Str -> Str\nfoor = rfoo . rfoo\n\nfooc :: Str -> Str\nfooc = cfoo . cfoo\n\nfoop :: Str -> Str\nfoop = pfoo . pfoo\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo a.json > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/a.json",
    "content": "\"你知道得太多了\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/exp.txt",
    "content": "\"你知道得太多了!?.\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/foo.R",
    "content": "rfoo <- function(x){\n    paste0(x, \"?\")\n}\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/foo.hpp",
    "content": "std::string cfoo(std::string x){\n    return x + \".\";\n}\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/foo.py",
    "content": "def pfoo(x):\n    return x + \"!\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-encoding-utf8/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\n\ntype Py => Str = \"str\"\ntype R => Str = \"character\"\ntype Cpp => Str = \"std::string\"\n\nsource Py from \"foo.py\" (\"pfoo\")\nsource R from \"foo.R\" (\"rfoo\")\nsource Cpp from \"foo.hpp\" (\"cfoo\")\nrfoo :: Str -> Str\npfoo :: Str -> Str\ncfoo :: Str -> Str\n\nfoo = cfoo . rfoo . pfoo\n"
  },
  {
    "path": "test-suite/golden-tests/string-escape/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus pyQ    2>> obs.err >> obs.txt\n\t./nexus cppQ   2>> obs.err >> obs.txt\n\t./nexus rQ     2>> obs.err >> obs.txt\n\t./nexus pyAll  2>> obs.err >> obs.txt\n\t./nexus cppAll 2>> obs.err >> obs.txt\n\t./nexus rAll   2>> obs.err >> obs.txt\n\t./nexus pureQ  2>> obs.err >> obs.txt\n\t./nexus pureAll 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/string-escape/exp.txt",
    "content": "\"he said \\\"hi\\\"\"\n\"he said \\\"hi\\\"\"\n\"he said \\\"hi\\\"\"\n\"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n\"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n\"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n\"he said \\\"hi\\\"\"\n\"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-escape/helper.R",
    "content": "identity <- function(x) {\n  x\n}\n"
  },
  {
    "path": "test-suite/golden-tests/string-escape/helper.hpp",
    "content": "#ifndef __HELPER_HPP__\n#define __HELPER_HPP__\n\n#include <string>\n\nstd::string identity(std::string x) {\n    return x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/string-escape/helper.py",
    "content": "def identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/string-escape/main.loc",
    "content": "module main (pyQ, cppQ, rQ, pyAll, cppAll, rAll, pureQ, pureAll)\n\nsource Py from \"helper.py\" (\"identity\" as pyId)\nsource Cpp from \"helper.hpp\" (\"identity\" as cppId)\nsource R from \"helper.R\" (\"identity\" as rId)\n\npyId :: Str -> Str\ncppId :: Str -> Str\nrId :: Str -> Str\n\ntype Py => Str = \"str\"\ntype Cpp => Str = \"std::string\"\ntype R => Str = \"character\"\n\n-- embedded double quotes\npyQ :: Str\npyQ = pyId \"he said \\\"hi\\\"\"\n\ncppQ :: Str\ncppQ = cppId \"he said \\\"hi\\\"\"\n\nrQ :: Str\nrQ = rId \"he said \\\"hi\\\"\"\n\n-- all supported escape sequences combined\npyAll :: Str\npyAll = pyId \"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n\ncppAll :: Str\ncppAll = cppId \"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n\nrAll :: Str\nrAll = rId \"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n\n-- pure morloc string constants\npureQ :: Str\npureQ = \"he said \\\"hi\\\"\"\n\npureAll :: Str\npureAll = \"q:\\\" bs:\\\\ nl:\\n tab:\\t end\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-interpolation/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus sr   '\"Alice\"' '\"Bob\"'  > obs.txt 2> obs.err\n\t./nexus spy  '\"Alice\"' '\"Bob\"' 2>> obs.err  >> obs.txt\n\t./nexus scpp '\"Alice\"' '\"Bob\"' 2>> obs.err  >> obs.txt\n\t./nexus escR   '\"hi\"' 2>> obs.err >> obs.txt\n\t./nexus escPy  '\"hi\"' 2>> obs.err >> obs.txt\n\t./nexus escCpp '\"hi\"' 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/string-interpolation/exp.txt",
    "content": "\"x=Alice;y=Bob;\"\n\"x=Alice;y=Bob;\"\n\"x=Alice;y=Bob;\"\n\"a\\t\\\"hi\\nb\"\n\"a\\t\\\"\\\"\\\"hi\\nb\"\n\"a\\t\\\"hi\\nb\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-interpolation/main.loc",
    "content": "module main (sr, spy, scpp, escR, escPy, escCpp)\n\nimport root\nimport root-r\nimport root-py\nimport root-cpp\n\nsr :: Str -> Str -> Str\nsr x y = \"x=#{idr x};y=#{idr y};\"\n\nspy :: Str -> Str -> Str\nspy x y = \"x=#{idpy x};y=#{idpy y};\"\n\nscpp :: Str -> Str -> Str\nscpp x y = \"x=#{idcpp x};y=#{idcpp y};\"\n\n-- test escape sequences inside interpolated strings\nescR :: Str -> Str\nescR x = \"a\\t\\\"#{idr x}\\nb\"\n\nescPy :: Str -> Str\nescPy x = \"a\\t\\\"\\\"\\\"#{idpy x}\\nb\"\n\nescCpp :: Str -> Str\nescCpp x = \"a\\t\\\"#{idcpp x}\\nb\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus pfoo a.json > obs.txt 2> obs.err\n\t./nexus pfoo b.json 2>> obs.err  >> obs.txt\n\t./nexus cfoo a.json 2>> obs.err  >> obs.txt\n\t./nexus cfoo b.json 2>> obs.err  >> obs.txt\n\t./nexus rfoo a.json 2>> obs.err  >> obs.txt\n\t./nexus rfoo b.json 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/a.json",
    "content": "  [   [\"\\\" [hi\",[\"bi]  \\t]\", \"{si\", \"\"] , [-43.5] ], [\"\", [], []]]  \n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/b.json",
    "content": "[]\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/c.json",
    "content": "[[\"\", [], []]]\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/exp.txt",
    "content": "[[\"\\\" [hi\",[\"bi]  \\t]\",\"{si\",\"\"],[-43.5]],[\"\",[],[]]]\n[]\n[[\"\\\" [hi\",[\"bi]  \\t]\",\"{si\",\"\"],[-43.5]],[\"\",[],[]]]\n[]\n[[\"\\\" [hi\",[\"bi]  \\t]\",\"{si\",\"\"],[-43.5]],[\"\",[],[]]]\n[]\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/foo.R",
    "content": "ident <- function(x) { x }\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/foo.hpp",
    "content": "template <class A>\nA ident(A x){\n    return x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/foo.py",
    "content": "def ident(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/string-json-parsing/main.loc",
    "content": "module main (pfoo, cfoo, rfoo)\n\nsource Py from \"foo.py\" (\"ident\" as pident)\nsource Cpp from \"foo.hpp\" (\"ident\" as cident)\nsource R from \"foo.R\" (\"ident\" as rident)\n\ntype Py => Str = \"str\"\ntype Py => Real = \"float\"\ntype Py => List a = \"list\" a\ntype Py => Tuple3 a b c = \"tuple\" a b c\n\ntype R => Str = \"character\"\ntype R => Real = \"numeric\"\ntype R => List a = \"list\" a\ntype R => Tuple3 a b c = \"list\" a b c\n\ntype Cpp => Str = \"std::string\"\ntype Cpp => Real = \"double\"\ntype Cpp => List a = \"std::vector<$1>\" a\ntype Cpp => Tuple3 a b c = \"std::tuple<$1,$2,$3>\" a b c\n\npident :: a -> a\ncident :: a -> a\nrident :: a -> a\n\npfoo :: [(Str, [Str], [Real])] -> [(Str, [Str], [Real])]\npfoo = pident\n\ncfoo :: [(Str, [Str], [Real])] -> [(Str, [Str], [Real])]\ncfoo = cident\n\nrfoo :: [(Str, [Str], [Real])] -> [(Str, [Str], [Real])]\nrfoo = rident\n"
  },
  {
    "path": "test-suite/golden-tests/string-multiline/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus s1 >  obs.txt\n\t./nexus s2 2>> obs.err  >> obs.txt\n\t./nexus s3 2>> obs.err  >> obs.txt\n\t./nexus s4 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/string-multiline/exp.txt",
    "content": "\"  This is the start of a grand paragraph. That\\nhas many lines that have few initial spaces than\\nthe first paragraph.\\n\"\n\"you can also use \\\"multiline\\\" strings in one line with internal quotes\\n\"\n\"single quotes are also OK\\n\"\n\"\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-multiline/main.loc",
    "content": "module main (s1, s2, s3, s4)\n\ns1 =\n  \"\"\"\n    This is the start of a grand paragraph. That\n  has many lines that have few initial spaces than\n  the first paragraph.\n  \"\"\"\n\ns2 = \"\"\"you can also use \"multiline\" strings in one line with internal quotes\"\"\"\n\ns3 = '''single quotes are also OK'''\n\n-- empty string\ns4 =\n  \"\"\"\n  \"\"\"\n"
  },
  {
    "path": "test-suite/golden-tests/string-pretty/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- greeting ---\" > obs.txt\n\t./nexus --print greeting >> obs.txt 2>> obs.err\n\techo \"--- multiline ---\" >> obs.txt\n\t./nexus --print multiline >> obs.txt 2>> obs.err\n\techo \"--- number ---\" >> obs.txt\n\t./nexus --print number >> obs.txt 2>> obs.err\n\techo \"--- flag ---\" >> obs.txt\n\t./nexus --print flag >> obs.txt 2>> obs.err\n\techo \"--- items ---\" >> obs.txt\n\t./nexus --print items >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/string-pretty/exp.txt",
    "content": "--- greeting ---\nhello world\n--- multiline ---\nline one\nline two\nline three\n--- number ---\n42\n--- flag ---\ntrue\n--- items ---\n[\n  1,\n  2,\n  3\n]\n"
  },
  {
    "path": "test-suite/golden-tests/string-pretty/main.loc",
    "content": "module main (greeting, multiline, number, flag, items)\n\ngreeting :: Str\ngreeting = \"hello world\"\n\nmultiline :: Str\nmultiline = \"line one\\nline two\\nline three\"\n\nnumber :: Int\nnumber = 42\n\nflag :: Bool\nflag = True\n\nitems :: [Int]\nitems = [1, 2, 3]\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-c/exp.txt",
    "content": "{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-c/main.loc",
    "content": "module main (foo)\n\nimport root\nimport root-cpp\n\ntable (Person a) = Person {name :: Str, info :: a}\ntable Cpp => (Person a) = \"struct\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idcpp xs\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-py/exp.txt",
    "content": "{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-py/main.loc",
    "content": "module main (foo)\n\nimport root\nimport root-py\n\ntable (Person a) = Person {name :: Str, info :: a}\ntable Py => (Person a) = \"dict\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-r/exp.txt",
    "content": "{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}\n"
  },
  {
    "path": "test-suite/golden-tests/table-1-r/main.loc",
    "content": "module main (foo)\n\nimport root\nimport root-r\n\ntable (Person a) = Person {name :: Str, info :: a}\ntable R => (Person a) = \"data.frame\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-c/exp.txt",
    "content": "{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-c/main.loc",
    "content": "module main (foo)\n\ntype Cpp => Str = \"std::string\"\ntype Cpp => Int = \"int\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\nsource Cpp from \"person.h\" (\"PersonYay\", \"id\")\nid :: a -> a \n\ntable (Person a) = Person {name :: Str, info :: a}\ntable Cpp => Person a = \"PersonYay\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = id xs\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-c/person.h",
    "content": "#ifndef __PERSON_H__\n#define __PERSON_H__\n\ntemplate <class T>\nclass PersonYay{\n    public:\n        std::vector<std::string> name;\n        std::vector<T> info; \n\n        PersonYay(std::vector<std::string> names_i, std::vector<T> infos_i){\n            name = names_i;\n            info = infos_i;\n        }\n        PersonYay(){\n            name = {};\n            info = {};\n        }\n};\n\ntemplate <class T>\nT id(T x){\n    return x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-py/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-py/exp.txt",
    "content": "{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-py/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nsource py from \"person.py\" (\"PersonObj\")\n\ntable (Person a) = Person {name :: Str, info :: a}\ntable Py => (Person a) = \"PersonObj\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idpy xs\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-py/person.py",
    "content": "class PersonObj:\n  def __init__(self, name, info):\n    self.name = name \n    self.info = info\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-r/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-r/exp.txt",
    "content": "{\"name\":[\"alice\",\"bob\"],\"info\":[34,35]}\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-r/main.loc",
    "content": "module main (foo)\n\nimport root-r\n\nsource R from \"person.R\" (\"person\")\n\ntable (Person a) = Person {name :: Str, info :: a}\ntable R => (Person a) = \"person\"\n\n-- This function returns the input, but passes it though a language-specific id\n-- function, which forces deserialization and then serialization.\nfoo :: Person Int -> Person Int\nfoo xs = idr xs\n"
  },
  {
    "path": "test-suite/golden-tests/table-2-r/person.R",
    "content": "# OK, this isn't really a proper class constructor and eventually I will need\n# to deal with the wonkey variation in field accessors, but for now this will\n# allow testing of the passing to the right constructor.\nperson <- function(name, info){\n  list(name=name, info=info)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- vec1d ---\" > obs.txt\n\t./nexus vec1d >> obs.txt 2>> obs.err\n\techo \"--- mat2d ---\" >> obs.txt\n\t./nexus mat2d >> obs.txt 2>> obs.err\n\techo \"--- ten3d ---\" >> obs.txt\n\t./nexus ten3d >> obs.txt 2>> obs.err\n\techo \"--- emptyVec ---\" >> obs.txt\n\t./nexus emptyVec >> obs.txt 2>> obs.err\n\techo \"--- singleElem ---\" >> obs.txt\n\t./nexus singleElem >> obs.txt 2>> obs.err\n\techo \"--- oneByOne ---\" >> obs.txt\n\t./nexus oneByOne >> obs.txt 2>> obs.err\n\techo \"--- intVec ---\" >> obs.txt\n\t./nexus intVec >> obs.txt 2>> obs.err\n\techo \"--- f32Vec ---\" >> obs.txt\n\t./nexus f32Vec >> obs.txt 2>> obs.err\n\techo \"--- testSumMat ---\" >> obs.txt\n\t./nexus testSumMat >> obs.txt 2>> obs.err\n\techo \"--- testSumVec ---\" >> obs.txt\n\t./nexus testSumVec >> obs.txt 2>> obs.err\n\techo \"--- testSum3d ---\" >> obs.txt\n\t./nexus testSum3d >> obs.txt 2>> obs.err\n\techo \"--- largeVec ---\" >> obs.txt\n\t./nexus largeVec >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cpp/exp.txt",
    "content": "--- vec1d ---\n[1,2,3,4,5]\n--- mat2d ---\n[[1,2,3,4],[5,6,7,8],[9,10,11,12]]\n--- ten3d ---\n[[[0,1,2,3],[4,5,6,7],[8,9,10,11]],[[12,13,14,15],[16,17,18,19],[20,21,22,23]]]\n--- emptyVec ---\n[]\n--- singleElem ---\n[42]\n--- oneByOne ---\n[[99]]\n--- intVec ---\n[-10,0,42,2147483647]\n--- f32Vec ---\n[1.5,-2.5,0]\n--- testSumMat ---\n78\n--- testSumVec ---\n15\n--- testSum3d ---\n276\n--- largeVec ---\n4999950000\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cpp/main.loc",
    "content": "-- Comprehensive tensor tests: multiple dimensions, element types,\n-- edge cases, compositions, all within C++ only.\nmodule main\n  ( vec1d\n  , mat2d\n  , ten3d\n  , emptyVec\n  , singleElem\n  , oneByOne\n  , intVec\n  , f32Vec\n  , testSumMat\n  , testSumVec\n  , testSum3d\n  , largeVec\n  )\n\nimport root\nimport root-cpp\n\nsource Cpp from \"src.hpp\"\n  ( \"makeVec\"\n  , \"makeMat\"\n  , \"make3d\"\n  , \"makeEmpty\"\n  , \"makeSingle\"\n  , \"makeOneByOne\"\n  , \"makeIntVec\"\n  , \"makeF32Vec\"\n  , \"sumMat\"\n  , \"sumVec\"\n  , \"sum3d\"\n  , \"makeLargeVec\"\n  , \"checkLargeVec\"\n  )\n\n-- Basic constructors for each dimension\nmakeVec :: Tensor1 5 Real\nmakeMat :: Tensor2 3 4 Real\nmake3d :: Tensor3 2 3 4 Real\n\n-- Edge cases: empty and minimal\nmakeEmpty :: Tensor1 0 Real\nmakeSingle :: Tensor1 1 Real\nmakeOneByOne :: Tensor2 1 1 Real\n\n-- Element type variety\nmakeIntVec :: Tensor1 4 Int\nmakeF32Vec :: Tensor1 3 Float32\n\n-- Extraction: tensor -> scalar\nsumMat :: Tensor2 3 4 Real -> Real\nsumVec :: Tensor1 5 Real -> Real\nsum3d :: Tensor3 2 3 4 Real -> Real\n\n-- Large tensor to test SHM path (exceeds inline threshold)\nmakeLargeVec :: Tensor1 100000 Real\ncheckLargeVec :: Tensor1 100000 Real -> Real\n\n-- Direct output tests\nvec1d = makeVec\nmat2d = makeMat\nten3d = make3d\nemptyVec = makeEmpty\nsingleElem = makeSingle\noneByOne = makeOneByOne\nintVec = makeIntVec\nf32Vec = makeF32Vec\n\n-- Composed scalar tests (zero-arg)\ntestSumMat :: Real\ntestSumMat = sumMat makeMat\n\ntestSumVec :: Real\ntestSumVec = sumVec makeVec\n\ntestSum3d :: Real\ntestSum3d = sum3d make3d\n\n-- Large vector round-trip (sum of 0..99999 = 4999950000)\nlargeVec :: Real\nlargeVec = checkLargeVec makeLargeVec\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cpp/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n#include <cmath>\n\n// 1D vector: [1.0, 2.0, 3.0, 4.0, 5.0]\nmlc::Tensor1<double> makeVec() {\n    mlc::Tensor1<double> v({5});\n    for (int i = 0; i < 5; i++) v[i] = (double)(i + 1);\n    return v;\n}\n\n// 2D matrix: [[1..4],[5..8],[9..12]]\nmlc::Tensor2<double> makeMat() {\n    mlc::Tensor2<double> m({3, 4});\n    for (int i = 0; i < 3; i++)\n        for (int j = 0; j < 4; j++)\n            m(i, j) = (double)(i * 4 + j + 1);\n    return m;\n}\n\n// 3D: [[[0..3],[4..7],[8..11]],[[12..15],[16..19],[20..23]]]\nmlc::Tensor3<double> make3d() {\n    mlc::Tensor3<double> t({2, 3, 4});\n    for (size_t k = 0; k < 24; k++) t[k] = (double)k;\n    return t;\n}\n\n// Edge: empty\nmlc::Tensor1<double> makeEmpty() {\n    return mlc::Tensor1<double>({0});\n}\n\n// Edge: single element\nmlc::Tensor1<double> makeSingle() {\n    mlc::Tensor1<double> v({1});\n    v[0] = 42.0;\n    return v;\n}\n\n// Edge: 1x1 matrix\nmlc::Tensor2<double> makeOneByOne() {\n    mlc::Tensor2<double> m({1, 1});\n    m(0, 0) = 99.0;\n    return m;\n}\n\n// Int elements\nmlc::Tensor1<int> makeIntVec() {\n    mlc::Tensor1<int> v({4});\n    v[0] = -10; v[1] = 0; v[2] = 42; v[3] = 2147483647;\n    return v;\n}\n\n// Bool elements\nmlc::Tensor1<uint8_t> makeBoolVec() {\n    mlc::Tensor1<uint8_t> v({3});\n    v[0] = 1; v[1] = 0; v[2] = 1;\n    return v;\n}\n\n// Float32 elements\nmlc::Tensor1<float> makeF32Vec() {\n    mlc::Tensor1<float> v({3});\n    v[0] = 1.5f; v[1] = -2.5f; v[2] = 0.0f;\n    return v;\n}\n\n// Sum operations\ndouble sumMat(const mlc::Tensor2<double>& m) {\n    double s = 0;\n    for (size_t k = 0; k < m.size(); k++) s += m.data()[k];\n    return s;\n}\n\ndouble sumVec(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\ndouble sum3d(const mlc::Tensor3<double>& t) {\n    double s = 0;\n    for (size_t k = 0; k < t.size(); k++) s += t.data()[k];\n    return s;\n}\n\n// Tensor -> Tensor: scale each element\nmlc::Tensor1<double> scaleVec(const mlc::Tensor1<double>& v, double factor) {\n    mlc::Tensor1<double> result({v.shape(0)});\n    for (size_t i = 0; i < v.size(); i++)\n        result[i] = v.data()[i] * factor;\n    return result;\n}\n\n// Tensor + Tensor: element-wise add\nmlc::Tensor1<double> addVecs(const mlc::Tensor1<double>& a, const mlc::Tensor1<double>& b) {\n    mlc::Tensor1<double> result({a.shape(0)});\n    for (size_t i = 0; i < a.size(); i++)\n        result[i] = a.data()[i] + b.data()[i];\n    return result;\n}\n\n// Large vector (800KB at float64 -- exceeds inline threshold)\nmlc::Tensor1<double> makeLargeVec() {\n    mlc::Tensor1<double> v({100000});\n    for (int i = 0; i < 100000; i++) v[i] = (double)i;\n    return v;\n}\n\n// Sum of 0+1+...+99999 = 4999950000\ndouble checkLargeVec(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cross/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- cpSum2d ---\" > obs.txt\n\t./nexus cpSum2d >> obs.txt 2>> obs.err\n\techo \"--- pcSum2d ---\" >> obs.txt\n\t./nexus pcSum2d >> obs.txt 2>> obs.err\n\techo \"--- prSum2d ---\" >> obs.txt\n\t./nexus prSum2d >> obs.txt 2>> obs.err\n\techo \"--- rpSum2d ---\" >> obs.txt\n\t./nexus rpSum2d >> obs.txt 2>> obs.err\n\techo \"--- crSum2d ---\" >> obs.txt\n\t./nexus crSum2d >> obs.txt 2>> obs.err\n\techo \"--- rcSum2d ---\" >> obs.txt\n\t./nexus rcSum2d >> obs.txt 2>> obs.err\n\techo \"--- cpSum3d ---\" >> obs.txt\n\t./nexus cpSum3d >> obs.txt 2>> obs.err\n\techo \"--- pcSum3d ---\" >> obs.txt\n\t./nexus pcSum3d >> obs.txt 2>> obs.err\n\techo \"--- rTransposeVerify ---\" >> obs.txt\n\t./nexus rTransposeVerify >> obs.txt 2>> obs.err\n\techo \"--- cpSum1d ---\" >> obs.txt\n\t./nexus cpSum1d >> obs.txt 2>> obs.err\n\techo \"--- pcSum1d ---\" >> obs.txt\n\t./nexus pcSum1d >> obs.txt 2>> obs.err\n\techo \"--- prSum1d ---\" >> obs.txt\n\t./nexus prSum1d >> obs.txt 2>> obs.err\n\techo \"--- rpSum1d ---\" >> obs.txt\n\t./nexus rpSum1d >> obs.txt 2>> obs.err\n\techo \"--- crSum1d ---\" >> obs.txt\n\t./nexus crSum1d >> obs.txt 2>> obs.err\n\techo \"--- rcSum1d ---\" >> obs.txt\n\t./nexus rcSum1d >> obs.txt 2>> obs.err\n\techo \"--- cpSum4d ---\" >> obs.txt\n\t./nexus cpSum4d >> obs.txt 2>> obs.err\n\techo \"--- pcSum4d ---\" >> obs.txt\n\t./nexus pcSum4d >> obs.txt 2>> obs.err\n\techo \"--- prSum4d ---\" >> obs.txt\n\t./nexus prSum4d >> obs.txt 2>> obs.err\n\techo \"--- rpSum4d ---\" >> obs.txt\n\t./nexus rpSum4d >> obs.txt 2>> obs.err\n\techo \"--- cpSumInt ---\" >> obs.txt\n\t./nexus cpSumInt >> obs.txt 2>> obs.err\n\techo \"--- pcSumInt ---\" >> obs.txt\n\t./nexus pcSumInt >> obs.txt 2>> obs.err\n\techo \"--- prSumInt ---\" >> obs.txt\n\t./nexus prSumInt >> obs.txt 2>> obs.err\n\techo \"--- rpSumInt ---\" >> obs.txt\n\t./nexus rpSumInt >> obs.txt 2>> obs.err\n\techo \"--- crSumInt ---\" >> obs.txt\n\t./nexus crSumInt >> obs.txt 2>> obs.err\n\techo \"--- rcSumInt ---\" >> obs.txt\n\t./nexus rcSumInt >> obs.txt 2>> obs.err\n\techo \"--- cpCountTrue ---\" >> obs.txt\n\t./nexus cpCountTrue >> obs.txt 2>> obs.err\n\techo \"--- pcCountTrue ---\" >> obs.txt\n\t./nexus pcCountTrue >> obs.txt 2>> obs.err\n\techo \"--- prCountTrue ---\" >> obs.txt\n\t./nexus prCountTrue >> obs.txt 2>> obs.err\n\techo \"--- rpCountTrue ---\" >> obs.txt\n\t./nexus rpCountTrue >> obs.txt 2>> obs.err\n\techo \"--- crCountTrue ---\" >> obs.txt\n\t./nexus crCountTrue >> obs.txt 2>> obs.err\n\techo \"--- rcCountTrue ---\" >> obs.txt\n\t./nexus rcCountTrue >> obs.txt 2>> obs.err\n\techo \"--- cpSumEmpty ---\" >> obs.txt\n\t./nexus cpSumEmpty >> obs.txt 2>> obs.err\n\techo \"--- pcSumEmpty ---\" >> obs.txt\n\t./nexus pcSumEmpty >> obs.txt 2>> obs.err\n\techo \"--- prSumEmpty ---\" >> obs.txt\n\t./nexus prSumEmpty >> obs.txt 2>> obs.err\n\techo \"--- rpSumEmpty ---\" >> obs.txt\n\t./nexus rpSumEmpty >> obs.txt 2>> obs.err\n\techo \"--- cpSumSingle ---\" >> obs.txt\n\t./nexus cpSumSingle >> obs.txt 2>> obs.err\n\techo \"--- pcSumSingle ---\" >> obs.txt\n\t./nexus pcSumSingle >> obs.txt 2>> obs.err\n\techo \"--- prSumSingle ---\" >> obs.txt\n\t./nexus prSumSingle >> obs.txt 2>> obs.err\n\techo \"--- rpSumSingle ---\" >> obs.txt\n\t./nexus rpSumSingle >> obs.txt 2>> obs.err\n\techo \"--- cpSumLarge ---\" >> obs.txt\n\t./nexus cpSumLarge >> obs.txt 2>> obs.err\n\techo \"--- pcSumLarge ---\" >> obs.txt\n\t./nexus pcSumLarge >> obs.txt 2>> obs.err\n\techo \"--- prSumLarge ---\" >> obs.txt\n\t./nexus prSumLarge >> obs.txt 2>> obs.err\n\techo \"--- rpSumLarge ---\" >> obs.txt\n\t./nexus rpSumLarge >> obs.txt 2>> obs.err\n\techo \"--- crSumLarge ---\" >> obs.txt\n\t./nexus crSumLarge >> obs.txt 2>> obs.err\n\techo \"--- rcSumLarge ---\" >> obs.txt\n\t./nexus rcSumLarge >> obs.txt 2>> obs.err\n\techo \"--- cpSumHuge ---\" >> obs.txt\n\t./nexus cpSumHuge >> obs.txt 2>> obs.err\n\techo \"--- pcSumHuge ---\" >> obs.txt\n\t./nexus pcSumHuge >> obs.txt 2>> obs.err\n\techo \"--- prSumHuge ---\" >> obs.txt\n\t./nexus prSumHuge >> obs.txt 2>> obs.err\n\techo \"--- rpSumHuge ---\" >> obs.txt\n\t./nexus rpSumHuge >> obs.txt 2>> obs.err\n\techo \"--- crSumHuge ---\" >> obs.txt\n\t./nexus crSumHuge >> obs.txt 2>> obs.err\n\techo \"--- rcSumHuge ---\" >> obs.txt\n\t./nexus rcSumHuge >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cross/exp.txt",
    "content": "--- cpSum2d ---\n78\n--- pcSum2d ---\n78\n--- prSum2d ---\n78\n--- rpSum2d ---\n78\n--- crSum2d ---\n78\n--- rcSum2d ---\n78\n--- cpSum3d ---\n276\n--- pcSum3d ---\n276\n--- rTransposeVerify ---\n[1,2,3,24]\n--- cpSum1d ---\n55\n--- pcSum1d ---\n55\n--- prSum1d ---\n55\n--- rpSum1d ---\n55\n--- crSum1d ---\n55\n--- rcSum1d ---\n55\n--- cpSum4d ---\n276\n--- pcSum4d ---\n276\n--- prSum4d ---\n276\n--- rpSum4d ---\n276\n--- cpSumInt ---\n108\n--- pcSumInt ---\n108\n--- prSumInt ---\n108\n--- rpSumInt ---\n108\n--- crSumInt ---\n108\n--- rcSumInt ---\n108\n--- cpCountTrue ---\n4\n--- pcCountTrue ---\n4\n--- prCountTrue ---\n4\n--- rpCountTrue ---\n4\n--- crCountTrue ---\n4\n--- rcCountTrue ---\n4\n--- cpSumEmpty ---\n0\n--- pcSumEmpty ---\n0\n--- prSumEmpty ---\n0\n--- rpSumEmpty ---\n0\n--- cpSumSingle ---\n42\n--- pcSumSingle ---\n42\n--- prSumSingle ---\n42\n--- rpSumSingle ---\n42\n--- cpSumLarge ---\n12497500\n--- pcSumLarge ---\n12497500\n--- prSumLarge ---\n12497500\n--- rpSumLarge ---\n12497500\n--- crSumLarge ---\n12497500\n--- rcSumLarge ---\n12497500\n--- cpSumHuge ---\n1249975000\n--- pcSumHuge ---\n1249975000\n--- prSumHuge ---\n1249975000\n--- rpSumHuge ---\n1249975000\n--- crSumHuge ---\n1249975000\n--- rcSumHuge ---\n1249975000\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cross/main.loc",
    "content": "-- Cross-language tensor tests: all language pairs, multiple dimensions,\n-- element types (Real, Int, Bool), edge cases, and performance.\nmodule main\n  ( -- 2D Real: all six language pairs\n    cpSum2d\n  , pcSum2d\n  , prSum2d\n  , rpSum2d\n  , crSum2d\n  , rcSum2d\n  -- 3D Real: C++/Python pairs + R element verification\n  , cpSum3d\n  , pcSum3d\n  , rTransposeVerify\n  -- 1D Real: all six language pairs\n  , cpSum1d\n  , pcSum1d\n  , prSum1d\n  , rpSum1d\n  , crSum1d\n  , rcSum1d\n  -- 4D Real: Py<->Cpp, Py<->R\n  , cpSum4d\n  , pcSum4d\n  , prSum4d\n  , rpSum4d\n  -- Int element type: all six language pairs (1D)\n  , cpSumInt\n  , pcSumInt\n  , prSumInt\n  , rpSumInt\n  , crSumInt\n  , rcSumInt\n  -- Bool element type: all six pairs (1D)\n  , cpCountTrue\n  , pcCountTrue\n  , prCountTrue\n  , rpCountTrue\n  , crCountTrue\n  , rcCountTrue\n  -- Edge cases: empty and single-element tensors\n  , cpSumEmpty\n  , pcSumEmpty\n  , prSumEmpty\n  , rpSumEmpty\n  , cpSumSingle\n  , pcSumSingle\n  , prSumSingle\n  , rpSumSingle\n  -- Large tensor (5000 doubles = 40KB, all language pairs)\n  , cpSumLarge\n  , pcSumLarge\n  , prSumLarge\n  , rpSumLarge\n  , crSumLarge\n  , rcSumLarge\n  -- Very large tensor (50000 doubles = 400KB, crosses 64KB SHM threshold)\n  , cpSumHuge\n  , pcSumHuge\n  , prSumHuge\n  , rpSumHuge\n  , crSumHuge\n  , rcSumHuge\n  )\n\nimport root\nimport root-cpp\nimport root-py\nimport root-r (Real, Int, Bool)\n\nsource Cpp from \"src.hpp\"\n  ( \"cppMakeMat\"\n  , \"cppSumMat\"\n  , \"cppMake3d\"\n  , \"cppSum3d\"\n  , \"cppGetCorners3d\"\n  , \"cppMakeVec\"\n  , \"cppSumVec\"\n  , \"cppMake4d\"\n  , \"cppSum4d\"\n  , \"cppMakeIntVec\"\n  , \"cppSumIntVec\"\n  , \"cppMakeBoolVec\"\n  , \"cppCountTrue\"\n  , \"cppMakeEmpty\"\n  , \"cppSumEmpty\"\n  , \"cppMakeSingle\"\n  , \"cppSumSingle\"\n  , \"cppMakeLarge\"\n  , \"cppSumLarge\"\n  , \"cppMakeHuge\"\n  , \"cppSumHuge\"\n  )\n\nsource Py from \"src.py\"\n  ( \"pyMakeMat\"\n  , \"pySumMat\"\n  , \"pyMake3d\"\n  , \"pySum3d\"\n  , \"pyMakeVec\"\n  , \"pySumVec\"\n  , \"pyMake4d\"\n  , \"pySum4d\"\n  , \"pyMakeIntVec\"\n  , \"pySumIntVec\"\n  , \"pyMakeBoolVec\"\n  , \"pyCountTrue\"\n  , \"pyMakeEmpty\"\n  , \"pySumEmpty\"\n  , \"pyMakeSingle\"\n  , \"pySumSingle\"\n  , \"pyMakeLarge\"\n  , \"pySumLarge\"\n  , \"pyMakeHuge\"\n  , \"pySumHuge\"\n  )\n\nsource R from \"src.R\"\n  ( \"rMakeMat\"\n  , \"rSumMat\"\n  , \"rMake3d\"\n  , \"rMakeVec\"\n  , \"rSumVec\"\n  , \"rMake4d\"\n  , \"rSum4d\"\n  , \"rMakeIntVec\"\n  , \"rSumIntVec\"\n  , \"rMakeBoolVec\"\n  , \"rCountTrue\"\n  , \"rMakeEmpty\"\n  , \"rSumEmpty\"\n  , \"rMakeSingle\"\n  , \"rSumSingle\"\n  , \"rMakeLarge\"\n  , \"rSumLarge\"\n  , \"rMakeHuge\"\n  , \"rSumHuge\"\n  )\n\n-- === Type signatures ===\n\n-- 2D Real (3x4, values 1..12, sum=78)\ncppMakeMat :: Tensor2 3 4 Real\ncppSumMat :: Tensor2 3 4 Real -> Real\npyMakeMat :: Tensor2 3 4 Real\npySumMat :: Tensor2 3 4 Real -> Real\nrMakeMat :: Tensor2 3 4 Real\nrSumMat :: Tensor2 3 4 Real -> Real\n\n-- 3D Real (2x3x4, values 0..23, sum=276)\ncppMake3d :: Tensor3 2 3 4 Real\ncppSum3d :: Tensor3 2 3 4 Real -> Real\npyMake3d :: Tensor3 2 3 4 Real\npySum3d :: Tensor3 2 3 4 Real -> Real\nrMake3d :: Tensor3 2 3 4 Real\ncppGetCorners3d :: Tensor3 2 3 4 Real -> [Real]\n\n-- 1D Real (10 elements, values 1..10, sum=55)\ncppMakeVec :: Tensor1 10 Real\ncppSumVec :: Tensor1 10 Real -> Real\npyMakeVec :: Tensor1 10 Real\npySumVec :: Tensor1 10 Real -> Real\nrMakeVec :: Tensor1 10 Real\nrSumVec :: Tensor1 10 Real -> Real\n\n-- 4D Real (2x3x2x2, values 0..23, sum=276)\ncppMake4d :: Tensor4 2 3 2 2 Real\ncppSum4d :: Tensor4 2 3 2 2 Real -> Real\npyMake4d :: Tensor4 2 3 2 2 Real\npySum4d :: Tensor4 2 3 2 2 Real -> Real\nrMake4d :: Tensor4 2 3 2 2 Real\nrSum4d :: Tensor4 2 3 2 2 Real -> Real\n\n-- 1D Int (8 elements, values 10..17, sum=108)\ncppMakeIntVec :: Tensor1 8 Int\ncppSumIntVec :: Tensor1 8 Int -> Int\npyMakeIntVec :: Tensor1 8 Int\npySumIntVec :: Tensor1 8 Int -> Int\nrMakeIntVec :: Tensor1 8 Int\nrSumIntVec :: Tensor1 8 Int -> Int\n\n-- 1D Bool (6 elements: T,F,T,T,F,T -> count=4)\ncppMakeBoolVec :: Tensor1 6 Bool\ncppCountTrue :: Tensor1 6 Bool -> Int\npyMakeBoolVec :: Tensor1 6 Bool\npyCountTrue :: Tensor1 6 Bool -> Int\nrMakeBoolVec :: Tensor1 6 Bool\nrCountTrue :: Tensor1 6 Bool -> Int\n\n-- Empty tensor (0 elements, sum=0)\ncppMakeEmpty :: Tensor1 0 Real\ncppSumEmpty :: Tensor1 0 Real -> Real\npyMakeEmpty :: Tensor1 0 Real\npySumEmpty :: Tensor1 0 Real -> Real\nrMakeEmpty :: Tensor1 0 Real\nrSumEmpty :: Tensor1 0 Real -> Real\n\n-- Single element (1 element, value 42, sum=42)\ncppMakeSingle :: Tensor1 1 Real\ncppSumSingle :: Tensor1 1 Real -> Real\npyMakeSingle :: Tensor1 1 Real\npySumSingle :: Tensor1 1 Real -> Real\nrMakeSingle :: Tensor1 1 Real\nrSumSingle :: Tensor1 1 Real -> Real\n\n-- Large tensor (5000 doubles = 40KB, under SHM threshold)\n-- Values 0..4999, sum = 4999*5000/2 = 12497500\ncppMakeLarge :: Tensor1 5000 Real\ncppSumLarge :: Tensor1 5000 Real -> Real\npyMakeLarge :: Tensor1 5000 Real\npySumLarge :: Tensor1 5000 Real -> Real\nrMakeLarge :: Tensor1 5000 Real\nrSumLarge :: Tensor1 5000 Real -> Real\n\n-- Very large tensor (50000 doubles = 400KB, exceeds 64KB SHM threshold)\n-- Values 0..49999, sum = 49999*50000/2 = 1249975000\ncppMakeHuge :: Tensor1 50000 Real\ncppSumHuge :: Tensor1 50000 Real -> Real\npyMakeHuge :: Tensor1 50000 Real\npySumHuge :: Tensor1 50000 Real -> Real\nrMakeHuge :: Tensor1 50000 Real\nrSumHuge :: Tensor1 50000 Real -> Real\n\n-- === 2D Real: all six language pairs ===\ncpSum2d :: Real\ncpSum2d = pySumMat cppMakeMat\npcSum2d :: Real\npcSum2d = cppSumMat pyMakeMat\nprSum2d :: Real\nprSum2d = rSumMat pyMakeMat\nrpSum2d :: Real\nrpSum2d = pySumMat rMakeMat\ncrSum2d :: Real\ncrSum2d = rSumMat cppMakeMat\nrcSum2d :: Real\nrcSum2d = cppSumMat rMakeMat\n\n-- === 3D Real ===\ncpSum3d :: Real\ncpSum3d = pySum3d cppMake3d\npcSum3d :: Real\npcSum3d = cppSum3d pyMake3d\nrTransposeVerify :: [Real]\nrTransposeVerify = cppGetCorners3d rMake3d\n\n-- === 1D Real: all six language pairs ===\ncpSum1d :: Real\ncpSum1d = pySumVec cppMakeVec\npcSum1d :: Real\npcSum1d = cppSumVec pyMakeVec\nprSum1d :: Real\nprSum1d = rSumVec pyMakeVec\nrpSum1d :: Real\nrpSum1d = pySumVec rMakeVec\ncrSum1d :: Real\ncrSum1d = rSumVec cppMakeVec\nrcSum1d :: Real\nrcSum1d = cppSumVec rMakeVec\n\n-- === 4D Real ===\ncpSum4d :: Real\ncpSum4d = pySum4d cppMake4d\npcSum4d :: Real\npcSum4d = cppSum4d pyMake4d\nprSum4d :: Real\nprSum4d = rSum4d pyMake4d\nrpSum4d :: Real\nrpSum4d = pySum4d rMake4d\n\n-- === Int element type: all six pairs (1D) ===\ncpSumInt :: Int\ncpSumInt = pySumIntVec cppMakeIntVec\npcSumInt :: Int\npcSumInt = cppSumIntVec pyMakeIntVec\nprSumInt :: Int\nprSumInt = rSumIntVec pyMakeIntVec\nrpSumInt :: Int\nrpSumInt = pySumIntVec rMakeIntVec\ncrSumInt :: Int\ncrSumInt = rSumIntVec cppMakeIntVec\nrcSumInt :: Int\nrcSumInt = cppSumIntVec rMakeIntVec\n\n-- === Bool element type: all six pairs (1D) ===\ncpCountTrue :: Int\ncpCountTrue = pyCountTrue cppMakeBoolVec\npcCountTrue :: Int\npcCountTrue = cppCountTrue pyMakeBoolVec\nprCountTrue :: Int\nprCountTrue = rCountTrue pyMakeBoolVec\nrpCountTrue :: Int\nrpCountTrue = pyCountTrue rMakeBoolVec\ncrCountTrue :: Int\ncrCountTrue = rCountTrue cppMakeBoolVec\nrcCountTrue :: Int\nrcCountTrue = cppCountTrue rMakeBoolVec\n\n-- === Edge cases: empty tensors ===\ncpSumEmpty :: Real\ncpSumEmpty = pySumEmpty cppMakeEmpty\npcSumEmpty :: Real\npcSumEmpty = cppSumEmpty pyMakeEmpty\nprSumEmpty :: Real\nprSumEmpty = rSumEmpty pyMakeEmpty\nrpSumEmpty :: Real\nrpSumEmpty = pySumEmpty rMakeEmpty\n\n-- === Edge cases: single-element tensors ===\ncpSumSingle :: Real\ncpSumSingle = pySumSingle cppMakeSingle\npcSumSingle :: Real\npcSumSingle = cppSumSingle pyMakeSingle\nprSumSingle :: Real\nprSumSingle = rSumSingle pyMakeSingle\nrpSumSingle :: Real\nrpSumSingle = pySumSingle rMakeSingle\n\n-- === Large tensor (40KB, all language pairs) ===\ncpSumLarge :: Real\ncpSumLarge = pySumLarge cppMakeLarge\npcSumLarge :: Real\npcSumLarge = cppSumLarge pyMakeLarge\nprSumLarge :: Real\nprSumLarge = rSumLarge pyMakeLarge\nrpSumLarge :: Real\nrpSumLarge = pySumLarge rMakeLarge\ncrSumLarge :: Real\ncrSumLarge = rSumLarge cppMakeLarge\nrcSumLarge :: Real\nrcSumLarge = cppSumLarge rMakeLarge\n\n-- === Very large tensor (400KB, crosses SHM threshold, all six pairs) ===\ncpSumHuge :: Real\ncpSumHuge = pySumHuge cppMakeHuge\npcSumHuge :: Real\npcSumHuge = cppSumHuge pyMakeHuge\nprSumHuge :: Real\nprSumHuge = rSumHuge pyMakeHuge\nrpSumHuge :: Real\nrpSumHuge = pySumHuge rMakeHuge\ncrSumHuge :: Real\ncrSumHuge = rSumHuge cppMakeHuge\nrcSumHuge :: Real\nrcSumHuge = cppSumHuge rMakeHuge\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cross/src.R",
    "content": "# --- 2D Real (3x4, values 1..12) ---\nrMakeMat <- function() {\n  matrix(as.double(1:12), nrow=3, ncol=4, byrow=TRUE)\n}\n\nrSumMat <- function(m) {\n  sum(m)\n}\n\n# --- 3D Real (2x3x4, values 1..24 but R is 1-indexed)\nrMake3d <- function() {\n  array(as.double(1:24), dim=c(2, 3, 4))\n}\n\n# --- 1D Real (10 elements, values 1..10) ---\nrMakeVec <- function() {\n  array(as.double(1:10), dim=c(10))\n}\n\nrSumVec <- function(v) {\n  sum(v)\n}\n\n# --- 4D Real (2x3x2x2, values 0..23) ---\nrMake4d <- function() {\n  array(as.double(0:23), dim=c(2, 3, 2, 2))\n}\n\nrSum4d <- function(t) {\n  sum(t)\n}\n\n# --- 1D Int (8 elements, values 10..17) ---\nrMakeIntVec <- function() {\n  array(as.integer(10:17), dim=c(8))\n}\n\nrSumIntVec <- function(v) {\n  as.integer(sum(v))\n}\n\n# --- 1D Bool (6 elements: T,F,T,T,F,T) ---\nrMakeBoolVec <- function() {\n  array(c(TRUE, FALSE, TRUE, TRUE, FALSE, TRUE), dim=c(6))\n}\n\nrCountTrue <- function(v) {\n  as.integer(sum(v))\n}\n\n# --- Empty tensor (0 elements) ---\nrMakeEmpty <- function() {\n  array(double(0), dim=c(0))\n}\n\nrSumEmpty <- function(v) {\n  sum(v)\n}\n\n# --- Single element (value 42) ---\nrMakeSingle <- function() {\n  array(42.0, dim=c(1))\n}\n\nrSumSingle <- function(v) {\n  sum(v)\n}\n\n# --- Large tensor (5000 doubles, values 0..4999) ---\nrMakeLarge <- function() {\n  array(as.double(0:4999), dim=c(5000))\n}\n\nrSumLarge <- function(v) {\n  sum(v)\n}\n\n# --- Very large tensor (50000 doubles, crosses SHM threshold) ---\nrMakeHuge <- function() {\n  array(as.double(0:49999), dim=c(50000))\n}\n\nrSumHuge <- function(v) {\n  sum(v)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cross/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n#include <cstdint>\n\n// --- 2D Real (3x4, values 1..12) ---\n\nmlc::Tensor2<double> cppMakeMat() {\n    mlc::Tensor2<double> m({3, 4});\n    for (int i = 0; i < 3; i++)\n        for (int j = 0; j < 4; j++)\n            m(i, j) = (double)(i * 4 + j + 1);\n    return m;\n}\n\ndouble cppSumMat(const mlc::Tensor2<double>& m) {\n    double s = 0;\n    for (size_t k = 0; k < m.size(); k++) s += m.data()[k];\n    return s;\n}\n\n// --- 3D Real (2x3x4, values 0..23) ---\n\nmlc::Tensor3<double> cppMake3d() {\n    mlc::Tensor3<double> t({2, 3, 4});\n    for (size_t k = 0; k < 24; k++) t[k] = (double)k;\n    return t;\n}\n\ndouble cppSum3d(const mlc::Tensor3<double>& t) {\n    double s = 0;\n    for (size_t k = 0; k < t.size(); k++) s += t.data()[k];\n    return s;\n}\n\nstd::vector<double> cppGetCorners3d(const mlc::Tensor3<double>& t) {\n    std::vector<double> result;\n    result.push_back(t.data()[0*12 + 0*4 + 0]);  // t(0,0,0)\n    result.push_back(t.data()[1*12 + 0*4 + 0]);  // t(1,0,0)\n    result.push_back(t.data()[0*12 + 1*4 + 0]);  // t(0,1,0)\n    result.push_back(t.data()[1*12 + 2*4 + 3]);  // t(1,2,3)\n    return result;\n}\n\n// --- 1D Real (10 elements, values 1..10) ---\n\nmlc::Tensor1<double> cppMakeVec() {\n    mlc::Tensor1<double> v({10});\n    for (int i = 0; i < 10; i++) v[i] = (double)(i + 1);\n    return v;\n}\n\ndouble cppSumVec(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n// --- 4D Real (2x3x2x2, values 0..23) ---\n\nmlc::Tensor4<double> cppMake4d() {\n    mlc::Tensor4<double> t({2, 3, 2, 2});\n    for (size_t k = 0; k < 24; k++) t[k] = (double)k;\n    return t;\n}\n\ndouble cppSum4d(const mlc::Tensor4<double>& t) {\n    double s = 0;\n    for (size_t k = 0; k < t.size(); k++) s += t.data()[k];\n    return s;\n}\n\n// --- 1D Int (8 elements, values 10..17) ---\n\nmlc::Tensor1<int> cppMakeIntVec() {\n    mlc::Tensor1<int> v({8});\n    for (int i = 0; i < 8; i++) v[i] = i + 10;\n    return v;\n}\n\nint cppSumIntVec(const mlc::Tensor1<int>& v) {\n    int s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n// --- 1D Bool (6 elements: T,F,T,T,F,T) ---\n\nmlc::Tensor1<bool> cppMakeBoolVec() {\n    mlc::Tensor1<bool> v({6});\n    v[0] = 1; v[1] = 0; v[2] = 1; v[3] = 1; v[4] = 0; v[5] = 1;\n    return v;\n}\n\nint cppCountTrue(const mlc::Tensor1<bool>& v) {\n    int count = 0;\n    for (size_t k = 0; k < v.size(); k++) {\n        if (v.data()[k]) count++;\n    }\n    return count;\n}\n\n// --- Empty tensor (0 elements) ---\n\nmlc::Tensor1<double> cppMakeEmpty() {\n    return mlc::Tensor1<double>({0});\n}\n\ndouble cppSumEmpty(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n// --- Single element (value 42) ---\n\nmlc::Tensor1<double> cppMakeSingle() {\n    mlc::Tensor1<double> v({1});\n    v[0] = 42.0;\n    return v;\n}\n\ndouble cppSumSingle(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n// --- Large tensor (5000 doubles = 40KB) ---\n\nmlc::Tensor1<double> cppMakeLarge() {\n    mlc::Tensor1<double> v({5000});\n    for (int i = 0; i < 5000; i++) v[i] = (double)i;\n    return v;\n}\n\ndouble cppSumLarge(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n// --- Very large tensor (50000 doubles = 400KB, crosses SHM threshold) ---\n\nmlc::Tensor1<double> cppMakeHuge() {\n    mlc::Tensor1<double> v({50000});\n    for (int i = 0; i < 50000; i++) v[i] = (double)i;\n    return v;\n}\n\ndouble cppSumHuge(const mlc::Tensor1<double>& v) {\n    double s = 0;\n    for (size_t k = 0; k < v.size(); k++) s += v.data()[k];\n    return s;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-comprehensive-cross/src.py",
    "content": "import numpy as np\n\n# --- 2D Real (3x4, values 1..12) ---\ndef pyMakeMat():\n    return np.arange(1, 13, dtype=np.float64).reshape(3, 4)\n\ndef pySumMat(m):\n    return float(np.sum(m))\n\n# --- 3D Real (2x3x4, values 0..23) ---\ndef pyMake3d():\n    return np.arange(24, dtype=np.float64).reshape(2, 3, 4)\n\ndef pySum3d(t):\n    return float(np.sum(t))\n\n# --- 1D Real (10 elements, values 1..10) ---\ndef pyMakeVec():\n    return np.arange(1, 11, dtype=np.float64)\n\ndef pySumVec(v):\n    return float(np.sum(v))\n\n# --- 4D Real (2x3x2x2, values 0..23) ---\ndef pyMake4d():\n    return np.arange(24, dtype=np.float64).reshape(2, 3, 2, 2)\n\ndef pySum4d(t):\n    return float(np.sum(t))\n\n# --- 1D Int (8 elements, values 10..17) ---\ndef pyMakeIntVec():\n    return np.arange(10, 18, dtype=np.int32)\n\ndef pySumIntVec(v):\n    return int(np.sum(v))\n\n# --- 1D Bool (6 elements: T,F,T,T,F,T) ---\ndef pyMakeBoolVec():\n    return np.array([True, False, True, True, False, True])\n\ndef pyCountTrue(v):\n    return int(np.sum(v))\n\n# --- Empty tensor (0 elements) ---\ndef pyMakeEmpty():\n    return np.array([], dtype=np.float64)\n\ndef pySumEmpty(v):\n    return float(np.sum(v))\n\n# --- Single element (value 42) ---\ndef pyMakeSingle():\n    return np.array([42.0], dtype=np.float64)\n\ndef pySumSingle(v):\n    return float(np.sum(v))\n\n# --- Large tensor (5000 doubles, values 0..4999) ---\ndef pyMakeLarge():\n    return np.arange(5000, dtype=np.float64)\n\ndef pySumLarge(v):\n    return float(np.sum(v))\n\n# --- Very large tensor (50000 doubles, crosses SHM threshold) ---\ndef pyMakeHuge():\n    return np.arange(50000, dtype=np.float64)\n\ndef pySumHuge(v):\n    return float(np.sum(v))\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-cp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-cp/exp.txt",
    "content": "78\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-cp/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-cpp\nimport root-py\n\nsource Cpp from \"src.hpp\" (\"makeMatrix\")\nsource Py from \"src.py\" (\"sumAll\")\n\nmakeMatrix :: Tensor2 3 4 Real\nsumAll :: Tensor2 3 4 Real -> Real\n\nresult :: Real\nresult = sumAll makeMatrix\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-cp/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n\nmlc::Tensor2<double> makeMatrix() {\n    mlc::Tensor2<double> m({3, 4});\n    for (int i = 0; i < 3; i++)\n        for (int j = 0; j < 4; j++)\n            m(i, j) = (double)(i * 4 + j + 1);\n    return m;\n}\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-cp/src.py",
    "content": "import numpy as np\n\ndef sumAll(m):\n    return float(np.sum(m))\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-dimensions/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- t1d ---\" > obs.txt\n\t./nexus t1d >> obs.txt 2>> obs.err\n\techo \"--- t2d ---\" >> obs.txt\n\t./nexus t2d >> obs.txt 2>> obs.err\n\techo \"--- t3d ---\" >> obs.txt\n\t./nexus t3d >> obs.txt 2>> obs.err\n\techo \"--- t4d ---\" >> obs.txt\n\t./nexus t4d >> obs.txt 2>> obs.err\n\techo \"--- t5d ---\" >> obs.txt\n\t./nexus t5d >> obs.txt 2>> obs.err\n\techo \"--- testSum1d ---\" >> obs.txt\n\t./nexus testSum1d >> obs.txt 2>> obs.err\n\techo \"--- testSum2d ---\" >> obs.txt\n\t./nexus testSum2d >> obs.txt 2>> obs.err\n\techo \"--- testSum3d ---\" >> obs.txt\n\t./nexus testSum3d >> obs.txt 2>> obs.err\n\techo \"--- testSum4d ---\" >> obs.txt\n\t./nexus testSum4d >> obs.txt 2>> obs.err\n\techo \"--- testSum5d ---\" >> obs.txt\n\t./nexus testSum5d >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-dimensions/exp.txt",
    "content": "--- t1d ---\n[0,1,2,3,4,5]\n--- t2d ---\n[[0,1,2],[3,4,5]]\n--- t3d ---\n[[[0,1,2,3],[4,5,6,7],[8,9,10,11]],[[12,13,14,15],[16,17,18,19],[20,21,22,23]]]\n--- t4d ---\n[[[[0,1],[2,3]],[[4,5],[6,7]],[[8,9],[10,11]]],[[[12,13],[14,15]],[[16,17],[18,19]],[[20,21],[22,23]]]]\n--- t5d ---\n[[[[[0,1],[2,3],[4,5]],[[6,7],[8,9],[10,11]]],[[[12,13],[14,15],[16,17]],[[18,19],[20,21],[22,23]]]],[[[[24,25],[26,27],[28,29]],[[30,31],[32,33],[34,35]]],[[[36,37],[38,39],[40,41]],[[42,43],[44,45],[46,47]]]]]\n--- testSum1d ---\n15\n--- testSum2d ---\n15\n--- testSum3d ---\n276\n--- testSum4d ---\n276\n--- testSum5d ---\n1128\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-dimensions/main.loc",
    "content": "-- Test all tensor dimensions (1D through 5D) with Python->C++ cross-language.\nmodule main\n  ( t1d\n  , t2d\n  , t3d\n  , t4d\n  , t5d\n  , testSum1d\n  , testSum2d\n  , testSum3d\n  , testSum4d\n  , testSum5d\n  )\n\nimport root\nimport root-py\nimport root-cpp\n\nsource Py from \"src.py\"\n  ( \"make1d\" as pyMake1d\n  , \"make2d\" as pyMake2d\n  , \"make3d\" as pyMake3d\n  , \"make4d\" as pyMake4d\n  , \"make5d\" as pyMake5d\n  )\n\nsource Cpp from \"src.hpp\"\n  ( \"cppSum1d\"\n  , \"cppSum2d\"\n  , \"cppSum3d\"\n  , \"cppSum4d\"\n  , \"cppSum5d\"\n  )\n\npyMake1d :: Tensor1 6 Int\npyMake2d :: Tensor2 2 3 Int\npyMake3d :: Tensor3 2 3 4 Int\npyMake4d :: Tensor4 2 3 2 2 Int\npyMake5d :: Tensor5 2 2 2 3 2 Int\n\ncppSum1d :: Tensor1 6 Int -> Int\ncppSum2d :: Tensor2 2 3 Int -> Int\ncppSum3d :: Tensor3 2 3 4 Int -> Int\ncppSum4d :: Tensor4 2 3 2 2 Int -> Int\ncppSum5d :: Tensor5 2 2 2 3 2 Int -> Int\n\nt1d = pyMake1d\nt2d = pyMake2d\nt3d = pyMake3d\nt4d = pyMake4d\nt5d = pyMake5d\n\ntestSum1d :: Int\ntestSum1d = cppSum1d pyMake1d\n\ntestSum2d :: Int\ntestSum2d = cppSum2d pyMake2d\n\ntestSum3d :: Int\ntestSum3d = cppSum3d pyMake3d\n\ntestSum4d :: Int\ntestSum4d = cppSum4d pyMake4d\n\ntestSum5d :: Int\ntestSum5d = cppSum5d pyMake5d\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-dimensions/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n\nmlc::Tensor<int, 4> cppMake4d() {\n    int64_t dims[4] = {2, 3, 2, 2};\n    mlc::Tensor<int, 4> t(dims);\n    for (size_t i = 0; i < t.size(); i++) t[i] = (int)i;\n    return t;\n}\n\nmlc::Tensor<int, 5> cppMake5d() {\n    int64_t dims[5] = {2, 2, 2, 3, 2};\n    mlc::Tensor<int, 5> t(dims);\n    for (size_t i = 0; i < t.size(); i++) t[i] = (int)i;\n    return t;\n}\n\nint cppSum1d(const mlc::Tensor1<int>& t) {\n    int s = 0; for (size_t i = 0; i < t.size(); i++) s += t.data()[i]; return s;\n}\nint cppSum2d(const mlc::Tensor2<int>& t) {\n    int s = 0; for (size_t i = 0; i < t.size(); i++) s += t.data()[i]; return s;\n}\nint cppSum3d(const mlc::Tensor3<int>& t) {\n    int s = 0; for (size_t i = 0; i < t.size(); i++) s += t.data()[i]; return s;\n}\nint cppSum4d(const mlc::Tensor<int, 4>& t) {\n    int s = 0; for (size_t i = 0; i < t.size(); i++) s += t.data()[i]; return s;\n}\nint cppSum5d(const mlc::Tensor<int, 5>& t) {\n    int s = 0; for (size_t i = 0; i < t.size(); i++) s += t.data()[i]; return s;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-dimensions/src.py",
    "content": "import numpy as np\n\ndef make1d():\n    return np.arange(6, dtype=np.int32)\n\ndef make2d():\n    return np.arange(6, dtype=np.int32).reshape(2, 3)\n\ndef make3d():\n    return np.arange(24, dtype=np.int32).reshape(2, 3, 4)\n\ndef make4d():\n    return np.arange(24, dtype=np.int32).reshape(2, 3, 2, 2)\n\ndef make5d():\n    return np.arange(48, dtype=np.int32).reshape(2, 2, 2, 3, 2)\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-basic/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testMakeVec ---\" > obs.txt\n\t./nexus testMakeVec >> obs.txt 2>> obs.err\n\techo \"--- testMakeMat ---\" >> obs.txt\n\t./nexus testMakeMat >> obs.txt 2>> obs.err\n\techo \"--- testId ---\" >> obs.txt\n\t./nexus testId >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-basic/exp.txt",
    "content": "--- testMakeVec ---\n[1,2,3]\n--- testMakeMat ---\n[[0,1,2],[10,11,12]]\n--- testId ---\n[1,2,3,4]\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-basic/labeled.hpp",
    "content": "#ifndef __LABELED_HPP__\n#define __LABELED_HPP__\n\n#include \"mlc_tensor.hpp\"\n\n// Create a vector of given length, filled with 1.0, 2.0, ...\nmlc::Tensor1<double> makeVec(int n) {\n    mlc::Tensor1<double> v({(int64_t)n});\n    for (int i = 0; i < n; i++) v[i] = (double)(i + 1);\n    return v;\n}\n\n// Create an m x n matrix filled with row*10 + col\nmlc::Tensor2<double> makeMat(int m, int n) {\n    mlc::Tensor2<double> mat({(int64_t)m, (int64_t)n});\n    for (int i = 0; i < m; i++)\n        for (int j = 0; j < n; j++)\n            mat(i, j) = (double)(i * 10 + j);\n    return mat;\n}\n\n// Identity: return the vector unchanged (must clone since Tensor has no copy ctor)\nmlc::Tensor1<double> idVec(const mlc::Tensor1<double>& v) {\n    mlc::Tensor1<double> out({v.shape(0)});\n    for (size_t i = 0; i < v.size(); i++) out[i] = v.data()[i];\n    return out;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-basic/main.loc",
    "content": "-- Test labeled nat params (m:Int syntax)\n-- Labels bind Int args to nat-kinded type params\n\nmodule main (testMakeVec, testMakeMat, testId)\n\nimport root\nimport root-cpp\n\nsource Cpp from \"labeled.hpp\"\n  ( \"makeVec\"\n  , \"makeMat\"\n  , \"idVec\"\n  )\n\n-- Labeled: Int arg determines vector length\nmakeVec :: n:Int -> Tensor1 n Real\n\n-- Labeled: two Int args determine matrix dims\nmakeMat :: m:Int -> n:Int -> Tensor2 m n Real\n\n-- Generic: works for any length vector\nidVec :: Tensor1 n Real -> Tensor1 n Real\n\ntestMakeVec :: Tensor1 3 Real\ntestMakeVec = makeVec 3\n\ntestMakeMat :: Tensor2 2 3 Real\ntestMakeMat = makeMat 2 3\n\ntestId :: Tensor1 4 Real\ntestId = idVec (makeVec 4)\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-labeled/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- predictDigit ---\" > obs.txt\n\t./nexus predictDigit >> obs.txt 2>> obs.err\n\techo \"--- testConv ---\" >> obs.txt\n\t./nexus testConv >> obs.txt 2>> obs.err\n\techo \"--- testRelu ---\" >> obs.txt\n\t./nexus testRelu >> obs.txt 2>> obs.err\n\techo \"--- testFlatten ---\" >> obs.txt\n\t./nexus testFlatten >> obs.txt 2>> obs.err\n\techo \"--- testDense ---\" >> obs.txt\n\t./nexus testDense >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-labeled/cnn.hpp",
    "content": "#ifndef __CNN_HPP__\n#define __CNN_HPP__\n\n#include \"mlc_tensor.hpp\"\n#include <cmath>\n#include <algorithm>\n\n// --- Input data (now parameterized by dimension args) ---\n\n// Create an image with a cross pattern of the given dimensions\nmlc::Tensor2<double> makeImage(int h, int w) {\n    mlc::Tensor2<double> img({(int64_t)h, (int64_t)w});\n    int midR = h / 2;\n    int midC = w / 2;\n    // vertical bar\n    for (int i = 0; i < h; i++) img(i, midC) = 1.0;\n    // horizontal bar\n    for (int j = 0; j < w; j++) img(midR, j) = 1.0;\n    return img;\n}\n\n// Create k convolution kernels of size fh x fw\n// Kernel 0: horizontal edge detector\n// Kernel 1: vertical edge detector (if k >= 2)\nmlc::Tensor3<double> makeKernels(int k, int fh, int fw) {\n    mlc::Tensor3<double> kern({(int64_t)k, (int64_t)fh, (int64_t)fw});\n    int ksize = fh * fw;\n    if (k >= 1) {\n        // Kernel 0: horizontal edges (-1 top, 0 mid, 1 bottom)\n        for (int i = 0; i < fh; i++) {\n            double row_val = (i < fh/2) ? -1.0 : (i > fh/2) ? 1.0 : 0.0;\n            for (int j = 0; j < fw; j++) {\n                kern[i * fw + j] = row_val;\n            }\n        }\n    }\n    if (k >= 2) {\n        // Kernel 1: vertical edges (-1 left, 0 mid, 1 right)\n        for (int i = 0; i < fh; i++) {\n            for (int j = 0; j < fw; j++) {\n                double col_val = (j < fw/2) ? -1.0 : (j > fw/2) ? 1.0 : 0.0;\n                kern[ksize + i * fw + j] = col_val;\n            }\n        }\n    }\n    return kern;\n}\n\n// Bias per filter\nmlc::Tensor1<double> makeBias(int k) {\n    mlc::Tensor1<double> b({(int64_t)k});\n    for (int i = 0; i < k; i++) b[i] = 0.1;\n    return b;\n}\n\n// --- Layers ---\n\n// 2D convolution (valid, no padding, stride 1)\nmlc::Tensor3<double> conv2d(\n    const mlc::Tensor2<double>& image,\n    const mlc::Tensor3<double>& kernels,\n    const mlc::Tensor1<double>& bias)\n{\n    int64_t H = image.shape(0);\n    int64_t W = image.shape(1);\n    int64_t F = kernels.shape(0);\n    int64_t Kh = kernels.shape(1);\n    int64_t Kw = kernels.shape(2);\n    int64_t Oh = H - Kh + 1;\n    int64_t Ow = W - Kw + 1;\n\n    mlc::Tensor3<double> out({F, Oh, Ow});\n    for (int64_t f = 0; f < F; f++) {\n        for (int64_t i = 0; i < Oh; i++) {\n            for (int64_t j = 0; j < Ow; j++) {\n                double sum = bias[f];\n                for (int64_t ki = 0; ki < Kh; ki++) {\n                    for (int64_t kj = 0; kj < Kw; kj++) {\n                        sum += image(i + ki, j + kj)\n                             * kernels.data()[f * Kh * Kw + ki * Kw + kj];\n                    }\n                }\n                out.data()[f * Oh * Ow + i * Ow + j] = sum;\n            }\n        }\n    }\n    return out;\n}\n\n// ReLU: element-wise max(0, x)\nmlc::Tensor3<double> reluMap(const mlc::Tensor3<double>& t) {\n    mlc::Tensor3<double> out({t.shape(0), t.shape(1), t.shape(2)});\n    for (size_t i = 0; i < t.size(); i++) {\n        out[i] = std::max(0.0, t.data()[i]);\n    }\n    return out;\n}\n\n// Flatten 3D to 1D\nmlc::Tensor1<double> flatten3d(const mlc::Tensor3<double>& t) {\n    size_t n = t.size();\n    mlc::Tensor1<double> out({(int64_t)n});\n    for (size_t i = 0; i < n; i++) {\n        out[i] = t.data()[i];\n    }\n    return out;\n}\n\n// Dense layer: out = W * x + b\nmlc::Tensor1<double> dense(\n    const mlc::Tensor2<double>& W,\n    const mlc::Tensor1<double>& b,\n    const mlc::Tensor1<double>& x)\n{\n    int64_t out_dim = W.shape(0);\n    int64_t in_dim = W.shape(1);\n    mlc::Tensor1<double> out({out_dim});\n    for (int64_t i = 0; i < out_dim; i++) {\n        double sum = b[i];\n        for (int64_t j = 0; j < in_dim; j++) {\n            sum += W(i, j) * x[j];\n        }\n        out[i] = sum;\n    }\n    return out;\n}\n\n// Dense layer weights (parameterized by dimensions)\nmlc::Tensor2<double> makeWeights(int out_dim, int in_dim) {\n    mlc::Tensor2<double> W({(int64_t)out_dim, (int64_t)in_dim});\n    for (int i = 0; i < out_dim; i++) {\n        for (int j = 0; j < in_dim; j++) {\n            if (i == 0) W(i, j) = 0.01 * (j % 5 - 2);  // small, centered around 0\n            else if (i == 1) W(i, j) = 0.1;              // uniformly positive\n            else W(i, j) = -0.05;                         // uniformly negative\n        }\n    }\n    return W;\n}\n\n// Dense bias (parameterized by dimension)\nmlc::Tensor1<double> makeDenseBias(int n) {\n    mlc::Tensor1<double> b({(int64_t)n});\n    for (int i = 0; i < n; i++) b[i] = 0.0;\n    if (n >= 2) b[1] = 0.5;  // boost class 1\n    return b;\n}\n\n// Argmax: index of maximum element\nint argmax(const mlc::Tensor1<double>& v) {\n    int best = 0;\n    for (int64_t i = 1; i < v.shape(0); i++) {\n        if (v[i] > v[best]) best = (int)i;\n    }\n    return best;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-labeled/exp.txt",
    "content": "--- predictDigit ---\n1\n--- testConv ---\n[[[2.1,2.1,2.1],[0.1,0.1,0.1],[-1.9,-1.9,-1.9]],[[2.1,0.1,-1.9],[2.1,0.1,-1.9],[2.1,0.1,-1.9]]]\n--- testRelu ---\n[[[2.1,2.1,2.1],[0.1,0.1,0.1],[0,0,0]],[[2.1,0.1,0],[2.1,0.1,0],[2.1,0.1,0]]]\n--- testFlatten ---\n[2.1,2.1,2.1,0.1,0.1,0.1,0,0,0,2.1,0.1,0,2.1,0.1,0,2.1,0.1,0]\n--- testDense ---\n[-0.064,1.82,-0.66]\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nat-labeled/main.loc",
    "content": "-- Simple CNN inference for character recognition using morloc tensors.\n-- Architecture: conv2d(3x3, 2 filters) -> relu -> flatten -> dense(18->3) -> argmax\n-- Tests labeled nat params (m:Int syntax) and generic nat dimensions.\n\nmodule main\n  ( predictDigit\n  , testConv\n  , testRelu\n  , testFlatten\n  , testDense\n  )\n\nimport root\nimport root-cpp\n\nsource Cpp from \"cnn.hpp\"\n  ( \"makeImage\"\n  , \"makeKernels\"\n  , \"makeBias\"\n  , \"makeWeights\"\n  , \"makeDenseBias\"\n  , \"conv2d\"\n  , \"reluMap\"\n  , \"flatten3d\"\n  , \"dense\"\n  , \"argmax\"\n  )\n\n-- Labeled params: Int args determine tensor dimensions\nmakeImage :: h:Int -> w:Int -> Tensor2 h w Real\nmakeKernels :: k:Int -> fh:Int -> fw:Int -> Tensor3 k fh fw Real\nmakeBias :: k:Int -> Tensor1 k Real\n\n-- Generic dimensions: ops work for any tensor sizes\nconv2d :: Tensor2 h w Real -> Tensor3 k fh fw Real -> Tensor1 k Real -> Tensor3 k (h - fh + 1) (w - fw + 1) Real\nreluMap :: Tensor3 a b c Real -> Tensor3 a b c Real\nflatten3d :: Tensor3 a b c Real -> Tensor1 (a * b * c) Real\n\n-- Dense layer with generic dimensions\nmakeWeights :: nout:Int -> nin:Int -> Tensor2 nout nin Real\nmakeDenseBias :: n:Int -> Tensor1 n Real\ndense :: Tensor2 m n Real -> Tensor1 m Real -> Tensor1 n Real -> Tensor1 m Real\n\n-- Argmax: find index of maximum value\nargmax :: Tensor1 n Real -> Int\n\n-- Full pipeline: image -> predicted class\npredictDigit :: Int\npredictDigit =\n  let image = makeImage 5 5\n      kernels = makeKernels 2 3 3\n      bias = makeBias 2\n      convOut = conv2d image kernels bias\n      activated = reluMap convOut\n      flat = flatten3d activated\n      weights = makeWeights 3 18\n      denseBias = makeDenseBias 3\n      logits = dense weights denseBias flat\n  in argmax logits\n\n-- Individual layer tests\n-- testConv :: Tensor3 2 3 3 Real\ntestConv = conv2d (makeImage 5 5) (makeKernels 2 3 3) (makeBias 2)\n\n-- testRelu :: Tensor3 2 3 3 Real\ntestRelu = reluMap (conv2d (makeImage 5 5) (makeKernels 2 3 3) (makeBias 2))\n\n-- testFlatten :: Tensor1 18 Real\ntestFlatten = flatten3d (reluMap (conv2d (makeImage 5 5) (makeKernels 2 3 3) (makeBias 2)))\n\n-- testDense :: Tensor1 3 Real\ntestDense =\n  let flat = flatten3d (reluMap (conv2d (makeImage 5 5) (makeKernels 2 3 3) (makeBias 2)))\n  in dense (makeWeights 3 18) (makeDenseBias 3) flat\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nexus-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus makeMatrix > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nexus-cpp/exp.txt",
    "content": "[[1,2,3,4],[5,6,7,8],[9,10,11,12]]\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nexus-cpp/main.loc",
    "content": "module main (makeMatrix)\n\nimport root\nimport root-cpp\n\nsource Cpp from \"src.hpp\" (\"makeMatrix\")\n\nmakeMatrix :: Tensor2 3 4 Int\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-nexus-cpp/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n\nmlc::Tensor2<int> makeMatrix() {\n    mlc::Tensor2<int> m({3, 4});\n    for (int i = 0; i < 3; i++)\n        for (int j = 0; j < 4; j++)\n            m(i, j) = i * 4 + j + 1;\n    return m;\n}\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pc/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pc/exp.txt",
    "content": "78\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pc/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-cpp\nimport root-py\n\nsource Py from \"src.py\" (\"makeMatrix\")\nsource Cpp from \"src.hpp\" (\"sumAll\")\n\nmakeMatrix :: Tensor2 3 4 Real\nsumAll :: Tensor2 3 4 Real -> Real\n\nresult :: Real\nresult = sumAll makeMatrix\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pc/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n\ndouble sumAll(const mlc::Tensor2<double>& m) {\n    double s = 0;\n    for (size_t k = 0; k < m.size(); k++) s += m.data()[k];\n    return s;\n}\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pc/src.py",
    "content": "import numpy as np\n\ndef makeMatrix():\n    return np.arange(1, 13, dtype=np.float64).reshape(3, 4)\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pr/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pr/exp.txt",
    "content": "78\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pr/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-py\nimport root-r (Real)\n\nsource Py from \"src.py\" (\"makeMatrix\")\nsource R from \"src.R\" (\"sumAll\")\n\nmakeMatrix :: Tensor2 3 4 Real\nsumAll :: Tensor2 3 4 Real -> Real\n\nresult :: Real\nresult = sumAll makeMatrix\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pr/src.R",
    "content": "sumAll <- function(m) {\n  sum(m)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-pr/src.py",
    "content": "import numpy as np\n\ndef makeMatrix():\n    return np.arange(1, 13, dtype=np.float64).reshape(3, 4)\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-rp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-rp/exp.txt",
    "content": "78\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-rp/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-py\nimport root-r (Real)\n\nsource R from \"src.R\" (\"makeMatrix\")\nsource Py from \"src.py\" (\"sumAll\")\n\nmakeMatrix :: Tensor2 3 4 Real\nsumAll :: Tensor2 3 4 Real -> Real\n\nresult :: Real\nresult = sumAll makeMatrix\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-rp/src.R",
    "content": "makeMatrix <- function() {\n  matrix(as.double(1:12), nrow=3, ncol=4, byrow=TRUE)\n}\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-rp/src.py",
    "content": "import numpy as np\n\ndef sumAll(m):\n    return float(np.sum(m))\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-table-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus result > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools *err\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-table-cpp/exp.txt",
    "content": "78\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-table-cpp/main.loc",
    "content": "module main (result)\n\nimport root\nimport root-cpp\n\nsource Cpp from \"src.hpp\" (\"makeMatrix\", \"sumAll\")\n\nmakeMatrix :: Tensor2 3 4 Real\nsumAll :: Tensor2 3 4 Real -> Real\n\nresult :: Real\nresult = sumAll makeMatrix\n"
  },
  {
    "path": "test-suite/golden-tests/tensor-table-cpp/src.hpp",
    "content": "#ifndef __SRC_HPP__\n#define __SRC_HPP__\n#include \"mlc_tensor.hpp\"\n\nmlc::Tensor2<double> makeMatrix() {\n    mlc::Tensor2<double> m({3, 4});\n    for (int i = 0; i < 3; i++)\n        for (int j = 0; j < 4; j++)\n            m(i, j) = (double)(i * 4 + j + 1);\n    return m;\n}\n\ndouble sumAll(const mlc::Tensor2<double>& m) {\n    double s = 0;\n    for (size_t k = 0; k < m.size(); k++) s += m.data()[k];\n    return s;\n}\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-basic/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- forceConst ---\" > obs.txt\n\t./nexus forceConst >> obs.txt 2>> obs.err\n\techo \"--- forceFun ---\" >> obs.txt\n\t./nexus forceFun >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-basic/exp.txt",
    "content": "--- forceConst ---\n42\n--- forceFun ---\n42\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-basic/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\nint add(int a, int b){\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-basic/main.loc",
    "content": "module main (forceConst, forceFun)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"add\")\n\ntype Cpp => Int = \"int\"\n\nadd :: Int -> Int -> Int\n\n-- basic: pure constant\nforceConst :: Int\nforceConst = 42\n\n-- pure function call\nforceFun :: Int\nforceFun = add 40 2\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-choose/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-choose/exp.txt",
    "content": "\"a\"\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-choose/foo.py",
    "content": "# mock of choose\ndef choose(xs):\n    return xs[0]\n\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-choose/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nsource Py from \"foo.py\" (\"choose\")\n\nchoose :: [a] -> <IO> a\n\nfoo :: <IO> Str\nfoo = choose [\"a\", \"b\"]\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-cross-force/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- combined ---\" > obs.txt\n\t./nexus combined >> obs.txt 2>> obs.err\n\techo \"--- addForced ---\" >> obs.txt\n\t./nexus addForced >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-cross-force/exp.txt",
    "content": "--- combined ---\n\"hello_42\"\n--- addForced ---\n84\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-cross-force/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <string>\n\nstd::string combine(std::string name, int value) {\n    return name + \"_\" + std::to_string(value);\n}\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-cross-force/foo.py",
    "content": "def get_value():\n    return 42\n\ndef get_name():\n    return \"hello\"\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-cross-force/main.loc",
    "content": "-- Test cross-language nullary effect evaluation as function arguments.\n\nmodule main (combined, addForced)\n\nimport root-cpp\nimport root-py\n\nsource Py from \"foo.py\" (\"get_name\", \"get_value\")\nsource Cpp from \"foo.hpp\" (\"combine\", \"add\")\n\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\n\nget_name :: <IO> Str\nget_value :: <IO> Int\ncombine :: Str -> Int -> Str\nadd :: Int -> Int -> Int\n\ncombined :: <IO> Str\ncombined = do\n    n <- get_name\n    v <- get_value\n    combine n v\n\naddForced :: <IO> Int\naddForced = do\n    x <- get_value\n    y <- get_value\n    add x y\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-do/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- doPure ---\" > obs.txt\n\t./nexus doPure >> obs.txt 2>> obs.err\n\techo \"--- doBind ---\" >> obs.txt\n\t./nexus doBind >> obs.txt 2>> obs.err\n\techo \"--- doBare ---\" >> obs.txt\n\t./nexus doBare >> obs.txt 2>> obs.err\n\techo \"--- doChain ---\" >> obs.txt\n\t./nexus doChain >> obs.txt 2>> obs.err\n\techo \"--- doMixed ---\" >> obs.txt\n\t./nexus doMixed >> obs.txt 2>> obs.err\n\techo \"--- doLet ---\" >> obs.txt\n\t./nexus doLet >> obs.txt 2>> obs.err\n\techo \"--- doLetBind ---\" >> obs.txt\n\t./nexus doLetBind >> obs.txt 2>> obs.err\n\techo \"--- doLetThunk ---\" >> obs.txt\n\t./nexus doLetThunk >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-do/exp.txt",
    "content": "--- doPure ---\n42\n--- doBind ---\nEVAL 5\n11\n--- doBare ---\nEVAL 3\n42\n--- doChain ---\nEVAL 3\nEVAL 6\n18\n--- doMixed ---\nEVAL 1\nEVAL 5\nEVAL 10\n11\n--- doLet ---\n8\n--- doLetBind ---\nEVAL 5\n21\n--- doLetThunk ---\nEVAL 1\nEVAL 3\n8\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-do/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-do/main.loc",
    "content": "-- Test do-notation with effects:\n--   1. doPure: pure value (no do-block needed)\n--   2. doBind: bind an evaluated effect to a variable\n--   3. doBare: bare statement evaluated and discarded\n--   4. doChain: multiple binds feeding into each other\n--   5. doMixed: interleaving bare statements and binds\n--   6. doLet: pure let binding (no evaluation)\n--   7. doLetBind: let mixed with <- bind\n--   8. doLetThunk: let binds an effect, evaluated later with <-\n\nmodule main (doPure, doBind, doBare, doChain, doMixed, doLet, doLetBind, doLetThunk)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\", \"add\")\n\ntype Cpp => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\nadd :: Int -> Int -> Int\n\n-- pure value: no effects\ndoPure :: Int\ndoPure = 42\n\n-- bind: x <- sideEffect 5 evaluates effect, binds x = 10\n-- then add x 1 = 11\ndoBind :: <IO> Int\ndoBind = do\n    x <- sideEffect 5\n    add x 1\n\n-- bare statement: evaluates and discards result\n-- sideEffect 3 called (prints \"EVAL 3\"), result discarded, then 42\ndoBare :: <IO> Int\ndoBare = do\n    sideEffect 3\n    42\n\n-- chained binds: result of first feeds into second\n-- sideEffect 3 -> \"EVAL 3\", x = 6\n-- sideEffect 6 -> \"EVAL 6\", y = 12\n-- add 6 12 = 18\ndoChain :: <IO> Int\ndoChain = do\n    x <- sideEffect 3\n    y <- sideEffect x\n    add x y\n\n-- mix of bare statements and binds\n-- sideEffect 1 -> \"EVAL 1\", discarded\n-- sideEffect 5 -> \"EVAL 5\", x = 10\n-- sideEffect 10 -> \"EVAL 10\", discarded\n-- add 10 1 = 11\ndoMixed :: <IO> Int\ndoMixed = do\n    sideEffect 1\n    x <- sideEffect 5\n    sideEffect x\n    add x 1\n\n-- pure let binding: let y = add 3 4 binds y = 7, no evaluation\n-- then add y 1 = 8\ndoLet :: <IO> Int\ndoLet = do\n    let y = add 3 4\n    add y 1\n\n-- let mixed with <- bind:\n-- sideEffect 5 -> \"EVAL 5\", x = 10\n-- let y = add x 1 binds y = 11 (pure, no evaluation)\n-- add x y = add 10 11 = 21\ndoLetBind :: <IO> Int\ndoLetBind = do\n    x <- sideEffect 5\n    let y = add x 1\n    add x y\n\n-- let binds an effect (not evaluated), then <- evaluates it later:\n-- let t = sideEffect 3 binds t :: <IO> Int (not yet evaluated)\n-- sideEffect 1 -> \"EVAL 1\", x = 2\n-- y <- t evaluates t: sideEffect 3 -> \"EVAL 3\", y = 6\n-- add 2 6 = 8\ndoLetThunk :: <IO> Int\ndoLetThunk = do\n    let t = sideEffect 3\n    x <- sideEffect 1\n    y <- t\n    add x y\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-effects/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- forceOnce ---\" > obs.txt\n\t./nexus forceOnce >> obs.txt 2>> obs.err\n\techo \"--- forceTwice ---\" >> obs.txt\n\t./nexus forceTwice >> obs.txt 2>> obs.err\n\techo \"--- forceShared ---\" >> obs.txt\n\t./nexus forceShared >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-effects/exp.txt",
    "content": "--- forceOnce ---\nEVAL 5\n10\n--- forceTwice ---\nEVAL 5\nEVAL 5\n20\n--- forceShared ---\nEVAL 5\n20\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-effects/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-effects/main.loc",
    "content": "-- Test core effect use cases:\n--   1. Single effect evaluation\n--   2. Two independent evaluations (no sharing)\n--   3. Do-block with shared binding evaluates once\n\nmodule main (forceOnce, forceTwice, forceShared)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\", \"add\")\n\ntype Cpp => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\nadd :: Int -> Int -> Int\n\n-- Single evaluation\n-- sideEffect 5 prints \"EVAL 5\", returns 10\nforceOnce :: <IO> Int\nforceOnce = sideEffect 5\n\n-- Two independent evaluations\n-- sideEffect 5 called twice: \"EVAL 5\" appears twice, 10 + 10 = 20\nforceTwice :: <IO> Int\nforceTwice = do\n    x <- sideEffect 5\n    y <- sideEffect 5\n    add x y\n\n-- Shared binding: evaluates once, result shared\n-- sideEffect 5 called once: \"EVAL 5\" appears once, 10 + 10 = 20\nforceShared :: <IO> Int\nforceShared = do\n    x <- sideEffect 5\n    add x x\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-forall/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-forall/exp.txt",
    "content": "0.639426798457884\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-forall/main.loc",
    "content": "-- Test: polymorphic effectful typeclass method used with <- in a do-block\n-- Mirrors bug-reports/report-2/bug1.loc\n\nmodule main (test)\n\nimport root-py\n\ntype Py => Real = \"float\"\ntype Py => Int = \"int\"\ntype Py => Unit = \"None\"\n\nclass Random a where\n  random :: <Random> a\n\ninstance Random Real where\n  source Py from \"rng.py\" (\"randomReal\" as random)\n\nsetSeed :: Int -> <Random> Unit\nsource Py from \"rng.py\" (\"setSeed\")\n\ntest :: <Random> Real\ntest = do\n  setSeed 42\n  x <- random\n  x\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-forall/rng.py",
    "content": "import random as _random\n\ndef setSeed(seed):\n    _random.seed(seed)\n    return None\n\ndef randomReal():\n    return _random.random()\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-hk/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-hk/exp.txt",
    "content": "[4,3,1,2,5]\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-hk/main.loc",
    "content": "-- Test: HK typeclass effectful method used with <- in a do-block\n-- Mirrors bug-reports/report-2/bug2.loc\n\nmodule main (test)\n\nimport root-py\n\ntype Py => Int = \"int\"\ntype Py => Unit = \"None\"\n\nclass RandomGroup f where\n  permute :: f a -> <Random> f a\n\ninstance RandomGroup List where\n  source Py from \"rng.py\" (\"permute\")\n\nsetSeed :: Int -> <Random> Unit\nsource Py from \"rng.py\" (\"setSeed\")\n\ntest :: <Random> [Int]\ntest = do\n  setSeed 42\n  x <- permute ([5, 3, 1, 4, 2] :: List Int)\n  x\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-eval-hk/rng.py",
    "content": "import random as _random\n\ndef setSeed(seed):\n    _random.seed(seed)\n    return None\n\ndef permute(xs):\n    ys = list(xs)\n    _random.shuffle(ys)\n    return ys\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- rollOnce ---\" > obs.txt\n\t./nexus rollOnce >> obs.txt 2>> obs.err\n\techo \"--- rollArg ---\" >> obs.txt\n\t./nexus rollArg 3 >> obs.txt 2>> obs.err\n\techo \"--- rollPy ---\" >> obs.txt\n\t./nexus rollPy >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export/exp.txt",
    "content": "--- rollOnce ---\nEVAL 5\n10\n--- rollArg ---\nEVAL 3\n6\n--- rollPy ---\nEVAL 7\n14\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export/foo.py",
    "content": "import sys\n\ndef pySideEffect(x):\n    print(\"EVAL \" + str(x), flush=True)\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export/main.loc",
    "content": "module main (rollOnce, rollArg, rollPy)\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\")\nsource Py from \"foo.py\" (\"pySideEffect\")\n\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\npySideEffect :: Int -> <IO> Int\n\n-- Export a nullary effect: should auto-force\nrollOnce :: <IO> Int\nrollOnce = sideEffect 5\n\n-- Export a function returning an effect: should auto-force\nrollArg :: Int -> <IO> Int\nrollArg x = sideEffect x\n\n-- Export a nullary Python effect: should auto-force\nrollPy :: <IO> Int\nrollPy = pySideEffect 7\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export-guard/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- guardThunk 5 ---\" > obs.txt\n\t./nexus guardThunk 5 >> obs.txt 2>> obs.err\n\techo \"--- guardThunk 0 ---\" >> obs.txt\n\t./nexus guardThunk 0 >> obs.txt 2>> obs.err\n\techo \"--- guardThunk -3 ---\" >> obs.txt\n\t./nexus guardThunk -- -3 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export-guard/exp.txt",
    "content": "--- guardThunk 5 ---\nEVAL 5\n10\n--- guardThunk 0 ---\n0\n--- guardThunk -3 ---\n0\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export-guard/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-export-guard/main.loc",
    "content": "-- Test exported function returning <IO> Int through guards.\n\nmodule main (guardThunk)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\")\n\ntype Cpp => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\n\nguardThunk :: Int -> <IO> Int\nguardThunk x\n  ? x > 0 = sideEffect x\n  : do\n      0\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-force/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus forceOnce >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-force/exp.txt",
    "content": "EVAL 5\n10\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-force/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-force/main.loc",
    "content": "module main (forceOnce)\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\")\n\ntype Cpp => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\n\n-- Exported effect-typed function: auto-forced at export boundary\nforceOnce :: <IO> Int\nforceOnce = sideEffect 5\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-guard-cross/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- guardCross 5 ---\" > obs.txt\n\t./nexus guardCross 5 >> obs.txt 2>> obs.err\n\techo \"--- guardCross 0 ---\" >> obs.txt\n\t./nexus guardCross 0 >> obs.txt 2>> obs.err\n\techo \"--- guardCross -3 ---\" >> obs.txt\n\t./nexus guardCross -- -3 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-guard-cross/exp.txt",
    "content": "--- guardCross 5 ---\n11\n--- guardCross 0 ---\n0\n--- guardCross -3 ---\n0\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-guard-cross/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-guard-cross/foo.py",
    "content": "def py_double(x):\n    return x * 2\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-guard-cross/main.loc",
    "content": "-- Test cross-language effect evaluation inside guard branches.\n\nmodule main (guardCross)\n\nimport root-cpp\nimport root-py\n\nsource Py from \"foo.py\" (\"py_double\")\nsource Cpp from \"foo.hpp\" (\"add\")\n\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\n\npy_double :: Int -> <IO> Int\nadd :: Int -> Int -> Int\n\nguardCross :: Int -> <IO> Int\nguardCross x\n  ? x > 0 = do\n      y <- py_double x\n      add y 1\n  : do\n      0\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-interop/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- cppForce ---\" > obs.txt\n\t./nexus cppForce >> obs.txt 2>> obs.err\n\techo \"--- pyForce ---\" >> obs.txt\n\t./nexus pyForce >> obs.txt 2>> obs.err\n\techo \"--- rForce ---\" >> obs.txt\n\t./nexus rForce >> obs.txt 2>> obs.err\n\techo \"--- crossCppPy ---\" >> obs.txt\n\t./nexus crossCppPy >> obs.txt 2>> obs.err\n\techo \"--- crossPyR ---\" >> obs.txt\n\t./nexus crossPyR >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-interop/exp.txt",
    "content": "--- cppForce ---\nEVAL_CPP 5\n10\n--- pyForce ---\nEVAL_PY 5\n10\n--- rForce ---\nEVAL_R 5\n10\n--- crossCppPy ---\nEVAL_CPP 3\nEVAL_PY 6\n18\n--- crossPyR ---\nEVAL_PY 4\nEVAL_R 8\n24\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-interop/foo.R",
    "content": "sideEffectR <- function(x) {\n    cat(paste0(\"EVAL_R \", x, \"\\n\"))\n    flush(stdout())\n    x * 2L\n}\n\naddR <- function(a, b) {\n    a + b\n}\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-interop/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffectCpp(int x) {\n    std::cout << \"EVAL_CPP \" << x << std::endl;\n    return x * 2;\n}\n\nint addCpp(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-interop/foo.py",
    "content": "import sys\n\ndef sideEffectPy(x):\n    print(\"EVAL_PY \" + str(x))\n    sys.stdout.flush()\n    return x * 2\n\ndef addPy(a, b):\n    return a + b\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-interop/main.loc",
    "content": "-- Test effects across C++, Python, and R:\n--   1-3. Evaluate effect in each language independently\n--   4-5. Cross-language: do-blocks chaining effect evaluations across languages\n\nmodule main (cppForce, pyForce, rForce, crossCppPy, crossPyR)\n\nimport root-cpp\nimport root-py\nimport root-r\n\nsource Cpp from \"foo.hpp\" (\"sideEffectCpp\", \"addCpp\")\nsource Py from \"foo.py\" (\"sideEffectPy\", \"addPy\")\nsource R from \"foo.R\" (\"sideEffectR\", \"addR\")\n\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype R => Int = \"integer\"\n\nsideEffectCpp :: Int -> <IO> Int\nsideEffectPy :: Int -> <IO> Int\nsideEffectR :: Int -> <IO> Int\naddCpp :: Int -> Int -> Int\naddPy :: Int -> Int -> Int\naddR :: Int -> Int -> Int\n\n-- Evaluate a C++ effect\n-- sideEffectCpp 5 -> \"EVAL_CPP 5\", returns 10\ncppForce :: <IO> Int\ncppForce = sideEffectCpp 5\n\n-- Evaluate a Python effect\n-- sideEffectPy 5 -> \"EVAL_PY 5\", returns 10\npyForce :: <IO> Int\npyForce = sideEffectPy 5\n\n-- Evaluate an R effect\n-- sideEffectR 5 -> \"EVAL_R 5\", returns 10\nrForce :: <IO> Int\nrForce = sideEffectR 5\n\n-- Cross-language do-block: C++ effect feeds Python effect, result via C++ add\n-- sideEffectCpp 3 -> \"EVAL_CPP 3\", x = 6\n-- sideEffectPy 6 -> \"EVAL_PY 6\", y = 12\n-- addCpp 6 12 = 18\ncrossCppPy :: <IO> Int\ncrossCppPy = do\n    x <- sideEffectCpp 3\n    y <- sideEffectPy x\n    addCpp x y\n\n-- Cross-language do-block: Python effect feeds R effect, result via Python add\n-- sideEffectPy 4 -> \"EVAL_PY 4\", x = 8\n-- sideEffectR 8 -> \"EVAL_R 8\", y = 16\n-- addPy 8 16 = 24\ncrossPyR :: <IO> Int\ncrossPyR = do\n    x <- sideEffectPy 4\n    y <- sideEffectR x\n    addPy x y\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-let/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- letShared ---\" > obs.txt\n\t./nexus letShared >> obs.txt 2>> obs.err\n\techo \"--- letIndep ---\" >> obs.txt\n\t./nexus letIndep >> obs.txt 2>> obs.err\n\techo \"--- letChain ---\" >> obs.txt\n\t./nexus letChain >> obs.txt 2>> obs.err\n\techo \"--- letMultiForce ---\" >> obs.txt\n\t./nexus letMultiForce >> obs.txt 2>> obs.err\n\techo \"--- letNested ---\" >> obs.txt\n\t./nexus letNested >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-let/exp.txt",
    "content": "--- letShared ---\nEVAL 5\n20\n--- letIndep ---\nEVAL 5\nEVAL 5\n20\n--- letChain ---\nEVAL 3\nEVAL 6\n18\n--- letMultiForce ---\nEVAL 5\nEVAL 5\n20\n--- letNested ---\nEVAL 2\nEVAL 4\n24\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-let/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <iostream>\n\nint sideEffect(int x) {\n    std::cout << \"EVAL \" << x << std::endl;\n    return x * 2;\n}\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-let/main.loc",
    "content": "-- Test let-binding semantics with effects:\n--   - do { x <- e } evaluates e once, x is shared (no re-evaluation)\n--   - Separate <- evaluations are independent each time\n--   - Sequential do-bindings evaluate in order, each exactly once\n\nmodule main (letShared, letIndep, letChain, letMultiForce, letNested)\n\nimport root-cpp\n\nsource Cpp from \"foo.hpp\" (\"sideEffect\", \"add\")\n\ntype Cpp => Int = \"int\"\n\nsideEffect :: Int -> <IO> Int\nadd :: Int -> Int -> Int\n\n-- Shared binding: evaluates once, result shared in both uses\n-- sideEffect 5 -> \"EVAL 5\" once, x = 10, 10 + 10 = 20\nletShared :: <IO> Int\nletShared = do\n    x <- sideEffect 5\n    add x x\n\n-- Independent evaluations: each evaluates separately\n-- sideEffect 5 called twice -> \"EVAL 5\" twice, 10 + 10 = 20\nletIndep :: <IO> Int\nletIndep = do\n    x <- sideEffect 5\n    y <- sideEffect 5\n    add x y\n\n-- Sequential bindings: first result feeds into second\n-- sideEffect 3 -> \"EVAL 3\", x = 6\n-- sideEffect 6 -> \"EVAL 6\", y = 12\n-- 6 + 12 = 18\nletChain :: <IO> Int\nletChain = do\n    x <- sideEffect 3\n    y <- sideEffect x\n    add x y\n\n-- Two independent evaluations of the same expression\n-- sideEffect 5 called twice -> \"EVAL 5\" twice, 10 + 10 = 20\nletMultiForce :: <IO> Int\nletMultiForce = do\n    x <- sideEffect 5\n    y <- sideEffect 5\n    add x y\n\n-- Nested bindings with sharing at each level\n-- sideEffect 2 -> \"EVAL 2\", a = 4\n-- sideEffect 4 -> \"EVAL 4\", b = 8\n-- c = 4 + 8 = 12\n-- 12 + 12 = 24\nletNested :: <IO> Int\nletNested = do\n    a <- sideEffect 2\n    b <- sideEffect a\n    let c = add a b\n    add c c\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-nullary-interop/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- pyThunkInCpp ---\" > obs.txt\n\t./nexus pyThunkInCpp >> obs.txt 2>> obs.err\n\techo \"--- multiPyThunkInCpp ---\" >> obs.txt\n\t./nexus multiPyThunkInCpp >> obs.txt 2>> obs.err\n\techo \"--- mixedThunkInCpp ---\" >> obs.txt\n\t./nexus mixedThunkInCpp >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-nullary-interop/exp.txt",
    "content": "--- pyThunkInCpp ---\n\"hello\"\n--- multiPyThunkInCpp ---\n\"hello_42\"\n--- mixedThunkInCpp ---\n84\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-nullary-interop/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <string>\n\nstd::string combine(std::string name, int value) {\n    return name + \"_\" + std::to_string(value);\n}\n\nint add(int a, int b) {\n    return a + b;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-nullary-interop/foo.py",
    "content": "def get_value():\n    return 42\n\ndef get_name():\n    return \"hello\"\n"
  },
  {
    "path": "test-suite/golden-tests/thunk-nullary-interop/main.loc",
    "content": "-- Test nullary (zero-argument) foreign effects evaluated in a cross-language\n-- do-block.\n\nmodule main (pyThunkInCpp, multiPyThunkInCpp, mixedThunkInCpp)\n\nimport root-cpp\nimport root-py\n\nsource Py from \"foo.py\" (\"get_value\", \"get_name\")\nsource Cpp from \"foo.hpp\" (\"combine\", \"add\")\n\ntype Cpp => Int = \"int\"\ntype Py => Int = \"int\"\ntype Cpp => Str = \"std::string\"\ntype Py => Str = \"str\"\n\nget_value :: <IO> Int\nget_name :: <IO> Str\ncombine :: Str -> Int -> Str\nadd :: Int -> Int -> Int\n\n-- Evaluate a single nullary Python effect from C++ context\npyThunkInCpp :: <IO> Str\npyThunkInCpp = do\n    n <- get_name\n    n\n\n-- Evaluate multiple nullary Python effects in a C++ do-block\nmultiPyThunkInCpp :: <IO> Str\nmultiPyThunkInCpp = do\n    n <- get_name\n    v <- get_value\n    combine n v\n\n-- Mix nullary Python effects with C++ computation in do-block\nmixedThunkInCpp :: <IO> Int\nmixedThunkInCpp = do\n    v1 <- get_value\n    v2 <- get_value\n    add v1 v2\n"
  },
  {
    "path": "test-suite/golden-tests/two-module/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o tavern tavern.loc 2> build.err\n\tmorloc make -o combat combat.loc 2>> build.err\n\t./tavern randomClass > obs.txt 2> obs.err\n\t./combat rollDice >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf tavern combat pools __pycache__ *.manifest\n"
  },
  {
    "path": "test-suite/golden-tests/two-module/combat.loc",
    "content": "module combat (rollDice)\n\nimport .util (roll)\n\nrollDice :: <Rand> [Int]\nrollDice = roll 2 1\n"
  },
  {
    "path": "test-suite/golden-tests/two-module/exp.txt",
    "content": "\"Fighter\"\n[1,1]\n"
  },
  {
    "path": "test-suite/golden-tests/two-module/tavern.loc",
    "content": "module tavern (randomClass)\n\nimport .util (choose)\n\nrandomClass :: <Rand> Str\nrandomClass = choose [\"Fighter\"]\n"
  },
  {
    "path": "test-suite/golden-tests/two-module/util.loc",
    "content": "module (roll, choose)\n\nimport root-py\n\nsource Py from \"util.py\"\n  ( \"roll\" as roll\n  , \"choose\" as choose\n  )\n\nroll :: Int -> Int -> <Rand> [Int]\n\nchoose :: [a] -> <Rand> a\n"
  },
  {
    "path": "test-suite/golden-tests/two-module/util.py",
    "content": "import random\n\ndef choose(xs):\n    return random.choice(xs)\n\ndef roll(n, d):\n    return [random.randint(1, d) for _ in range(n)]\n"
  },
  {
    "path": "test-suite/golden-tests/type-alias-transitive/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus test > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/type-alias-transitive/exp.txt",
    "content": "\"hello world\"\n"
  },
  {
    "path": "test-suite/golden-tests/type-alias-transitive/foo.hpp",
    "content": "#include <string>\n\nstd::string my_concat(std::string a, std::string b) {\n    return a + b;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/type-alias-transitive/main.loc",
    "content": "module main (test)\n\nimport types-cpp\n\ntest :: MyStr\ntest = myConcat \"hello\" \" world\"\n"
  },
  {
    "path": "test-suite/golden-tests/type-alias-transitive/types/main.loc",
    "content": "module types (*)\n\ntype MyStr = Str\n\nmyConcat :: MyStr -> MyStr -> MyStr\n"
  },
  {
    "path": "test-suite/golden-tests/type-alias-transitive/types-cpp.loc",
    "content": "module types-cpp (*)\n\nimport types\nimport root-cpp\n\n-- No explicit `type Cpp => MyStr = \"std::string\"` mapping.\n-- The compiler should transitively resolve: MyStr -> Str -> \"std::string\"\n\nsource Cpp from \"foo.hpp\" (\"my_concat\" as myConcat)\n"
  },
  {
    "path": "test-suite/golden-tests/type-annotations-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/type-annotations-1/exp.txt",
    "content": "0\n"
  },
  {
    "path": "test-suite/golden-tests/type-annotations-1/main.loc",
    "content": "module main (foo)\n\ntype Py => List a = \"list\" a\ntype Py => Int = \"int\"\n\nlength :: [a] -> Int\nsource Py (\"len\" as length)\n\nfoo = length ([] :: [Int])\n"
  },
  {
    "path": "test-suite/golden-tests/type-identities-c/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[12345678,987654321]' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/type-identities-c/exp.txt",
    "content": "[12345678,987654321]\n"
  },
  {
    "path": "test-suite/golden-tests/type-identities-c/main.loc",
    "content": "module main (foo)\n\nimport root-cpp (idcpp, Packable)\n\ninstance Packable (Int) SizeT where\n  source Cpp from \"types.h\"\n    ( \"packSizeT\" as pack\n    , \"unpackSizeT\" as unpack\n    )\n\ninstance Packable (Int) Long where\n  source Cpp from \"types.h\"\n    ( \"packLong\" as pack\n    , \"unpackLong\" as unpack\n    )\n\ntype Cpp => SizeT = \"size_t\" \ntype Cpp => Long = \"int64_t\" \ntype Cpp => Int = \"int\"\n\nfoo :: (SizeT, Long) -> (SizeT, Long)\nfoo x = idcpp x\n"
  },
  {
    "path": "test-suite/golden-tests/type-identities-c/types.h",
    "content": "#ifndef __MORLOC_TYPE_IDENTITIES_TYPES_H__\n#define __MORLOC_TYPE_IDENTITIES_TYPES_H__\n\n#include <cstdint>\n\n// packSizeT   Cpp :: pack   => Int -> SizeT\nsize_t packSizeT(int x) {\n    return static_cast<size_t>(x);\n}\n\n// unpackSizeT Cpp :: unpack => SizeT -> Int\nint unpackSizeT(size_t x) {\n    return static_cast<int>(x);\n}\n\n// packLong   Cpp :: pack   => Int -> Long\nint64_t packLong(int x) {\n    return static_cast<int64_t>(x);\n}\n\n// unpackLong Cpp :: unpack => Long -> Int\nint unpackLong(int64_t x) {\n    return static_cast<int>(x);\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo 42 > obs.txt 2> obs.err\n\t./nexus foos 42 2>> obs.err  >> obs.txt\n\t./nexus sfoos '[42]' 2>> obs.err  >> obs.txt\n\t./nexus bar 42 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-1/exp.txt",
    "content": "43\n[42,42]\n[1,[42]]\n[[42,43],[42,43]]\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-1/foo.py",
    "content": "#  foo :: Int -> Int\ndef foo(x):\n    return x + 1\n\n#  foos :: Int -> [Int]\ndef foos(x):\n    return [x, x]\n\n#  sfoos :: [Int] -> (Int, [Int])\ndef sfoos(xs):\n    return (1, xs)\n\n#  toPair a :: a -> (a, a)\ndef toPair(x):\n    return (x,x)\n\n#  swapPair a b :: (a, b) -> (b, a)\ndef swapPair(xs):\n    return (xs[1], xs[0])\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-1/main.loc",
    "content": "module main (foo, foos, sfoos, bar)\n\nsource Py from \"foo.py\" (\"foo\", \"foos\", \"sfoos\", \"toPair\", \"swapPair\")\n\ntype Py => Int = \"int\"\ntype Py => (List a) = \"list\" a\ntype Py => (Tuple2 a b) = \"tuple\" a b\n\nfoo :: Int -> Int\nfoos :: Int -> [Int]\nsfoos :: [Int] -> (Int, [Int])\ntoPair :: a -> (a, a)\nswapPair :: (a, b) -> (b, a)\n\nbar x = toPair (swapPair (foo x, x))\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus f '\"hello\"' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools nexus.cpy lib/foo/__pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-2/exp.txt",
    "content": "\"hello\"\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-2/lib/foo/foo.py",
    "content": "def f(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-2/lib/foo/main.loc",
    "content": "module lib.foo (f, FooType)\n\nimport lib.footypes (FooType)\n\nsource Py from \"foo.py\" (\"f\")\n\nf :: FooType -> FooType\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-2/lib/footypes/main.loc",
    "content": "module lib.footypes (*)\n\ntype Py => Str = \"str\"\n\ntype FooType = Str\n"
  },
  {
    "path": "test-suite/golden-tests/type-synthesis-2/main.loc",
    "content": "module main (f)\n\nimport lib.foo (f)\n"
  },
  {
    "path": "test-suite/golden-tests/typeclass-stress/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\techo \"--- testPy ---\" > obs.txt\n\t./nexus testPy >> obs.txt 2>> obs.err\n\techo \"--- testCpp ---\" >> obs.txt\n\t./nexus testCpp >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools log __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclass-stress/exp.txt",
    "content": "--- testPy ---\ntrue\n--- testCpp ---\ntrue\n"
  },
  {
    "path": "test-suite/golden-tests/typeclass-stress/main.loc",
    "content": "-- Stress test for typeclass instance resolution with type aliases.\n-- List, Deque, Vector, and Array are all aliases for List, each with\n-- their own typeclass instances. The compiler must deduplicate these\n-- during instance resolution to avoid exponential blowup.\n\nmodule main (testPy, testCpp)\n\nimport root-py\nimport root-cpp\n\n-- Use Functor (map)\nmapList :: [Int] -> [Int]\nmapList xs = map (\\x -> x + 1) xs\n\n-- Use Foldable (fold)\nfoldList :: [Int] -> Int\nfoldList = fold (\\acc x -> acc + x) 0\n\n-- Use Indexed (at)\natList :: [Int] -> Int\natList xs = at 0 xs\n\n-- Use multiple typeclasses: map then fold\nmapThenFold :: [Int] -> Int\nmapThenFold xs = fold (\\a x -> a + x) 0 (map (\\x -> x * 2) xs)\n\n-- length (Foldable-derived) with map (Functor)\nmapThenLength :: [Int] -> Int\nmapThenLength xs = length (map (\\x -> x + 1) xs)\n\n-- Nested: map over list of lists\nnestedMap :: [[Int]] -> [[Int]]\nnestedMap = map (map (\\x -> x + 10))\n\n-- Chain: cons (Stack) then map (Functor) then fold (Foldable)\nconsMapFold :: [Int] -> Int\nconsMapFold xs = fold (\\a x -> a + x) 0 (map (\\x -> x * 3) (cons 0 xs))\n\n-- snoc (Queue) + reverse + map (Functor)\nsnocReverse :: [Int] -> [Int]\nsnocReverse xs = reverse (map (\\x -> x + 1) (snoc xs 99))\n\n-- Eq instance on lists\neqList :: Bool\neqList = [1, 2, 3] == [1, 2, 3]\n\n-- Compose several typeclass-derived ops\nstressCompose :: [Int] -> Int\nstressCompose xs =\n  let ys = map (\\x -> x + 1) xs\n      zs = filter (\\x -> x > 2) ys\n      w = fold (\\a x -> a + x) 0 zs\n  in w\n\ntestPy :: Bool\ntestPy =\n  let t1 = mapList [1,2,3] == [2,3,4]\n      t2 = foldList [1,2,3,4] == 10\n      t3 = atList [10,20,30] == 10\n      t4 = mapThenFold [1,2,3] == 12\n      t5 = mapThenLength [1,2,3] == 3\n      t6 = nestedMap [[1,2],[3]] == [[11,12],[13]]\n      t7 = consMapFold [1,2] == 9\n      t8 = snocReverse [1,2] == [100,3,2]\n      t9 = eqList\n      t10 = stressCompose [1,2,3] == 7\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10\n\ntestCpp :: Bool\ntestCpp =\n  let t1 = mapList [1,2,3] == [2,3,4]\n      t2 = foldList [1,2,3,4] == 10\n      t3 = atList [10,20,30] == 10\n      t4 = mapThenFold [1,2,3] == 12\n      t5 = mapThenLength [1,2,3] == 3\n      t6 = nestedMap [[1,2],[3]] == [[11,12],[13]]\n      t7 = consMapFold [1,2] == 9\n      t8 = snocReverse [1,2] == [100,3,2]\n      t9 = eqList\n      t10 = stressCompose [1,2,3] == 7\n  in t1 && t2 && t3 && t4 && t5 && t6 && t7 && t8 && t9 && t10\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"a\"' '\"b\"' > obs.txt 2> obs.err\n\t./nexus bar 6 5 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-1/exp.txt",
    "content": "\"abyolo\"\n17\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-1/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <string>\n\nint addInt(int x, int y){\n  return (x + y);\n}\n\ndouble addReal(double x, double y){\n  return (x + y);\n}\n\nstd::string addStr(std::string x, std::string y){\n  return (x + y);\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-1/foo.py",
    "content": "def addInt(x, y):\n    return x + y\n\ndef addReal(x, y):\n    return x + y\n\ndef addStr(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-1/main.loc",
    "content": "module main (foo, bar)\n\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Cpp => Str = \"std::string\"\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Py => Str = \"str\"\n\nclass Add a where\n  add :: a -> a -> a\n\ninstance Add Int where\n  source Cpp from \"foo.hpp\" (\"addInt\" as add)\n  source Py from \"foo.py\" (\"addInt\" as add)\n\ninstance Add Real where\n  source Cpp from \"foo.hpp\" (\"addReal\" as add)\n  source Py from \"foo.py\" (\"addReal\" as add)\n\ninstance Add Str where\n  source Cpp from \"foo.hpp\" (\"addStr\" as add)\n  source Py from \"foo.py\" (\"addStr\" as add)\n\nbar :: Real -> Real -> Real\nbar x y = add x (add y x)\n\nfoo x y = add x (add y \"yolo\")\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-2/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus paste '[\"a\",\"b\"]' > obs.txt 2> obs.err\n\t./nexus sum '[1,2,3]' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__ y z\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-2/exp.txt",
    "content": "\"ab\"\n6\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-2/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <string>\n#include <vector>\n#include <functional>\n\nint addInt(int x, int y){\n  return (x + y);\n}\n\ndouble addReal(double x, double y){\n  return (x + y);\n}\n\nstd::string addStr(std::string x, std::string y){\n  return (x + y);\n}\n\ntemplate <class A, class B, class F>\nB fold(F f, B y, const std::vector<A>& xs){\n    for(std::size_t i=0; i < xs.size(); i++){\n        y = f(y, xs[i]);\n    }\n    return y;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-2/foo.py",
    "content": "def addInt(x, y):\n    return x + y\n\ndef addReal(x, y):\n    return x + y\n\ndef addStr(x, y):\n    return x + y\n\ndef fold(f, b, xs):\n    for x in xs:\n        b = f(b, x)\n    return b\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-2/main.loc",
    "content": "module main (paste, sum)\n\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Cpp => Str = \"std::string\"\ntype Cpp => List a = \"std::vector<$1>\" a\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Py => Str = \"str\"\ntype Py => List a = \"list\" a\n\nclass Monoid a where\n  empty :: a\n  op :: a -> a -> a\n\ninstance Monoid Int where\n  source Cpp from \"foo.hpp\" (\"addInt\" as op)\n  source Py from \"foo.py\" (\"addInt\" as op)\n  empty = 0\n\ninstance Monoid Real where\n  source Cpp from \"foo.hpp\" (\"addReal\" as op)\n  source Py from \"foo.py\" (\"addReal\" as op)\n  empty = 0.0\n\ninstance Monoid Str where\n  source Cpp from \"foo.hpp\" (\"addStr\" as op)\n  source Py from \"foo.py\" (\"addStr\" as op)\n  empty = \"\"\n\nsource Cpp from \"foo.hpp\" (\"fold\")\nsource Py from \"foo.py\" (\"fold\")\nfold :: (b -> a -> b) -> b -> [a] -> b\n\nsum :: [Int] -> Int\nsum = fold op empty\n\npaste :: [Str] -> Str\npaste = fold op empty\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-3/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[6,5]' > obs.txt 2> obs.err\n\t./nexus bar '[6,5]' 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-3/exp.txt",
    "content": "\"[6, 5]\"\n2\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-3/main.loc",
    "content": "module main (foo, bar)\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Py => Str = \"str\"\ntype Py => (List a) = \"list\" a\n\n\nclass Summarizable a b where\n  summarize :: a -> b\n\ninstance Summarizable [a] Str where\n  source Py (\"str\" as summarize)\n\ninstance Summarizable [a] Int where\n  source Py (\"len\" as summarize)\n\n\nfoo :: [Int] -> Str\nfoo x = summarize x\n\nbar :: [Int] -> Int\nbar x = summarize x\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-4/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '[\"a\",\"bad\"]' > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-4/exp.txt",
    "content": "[[\"a\",\"bad\"],[1,3]]\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-4/foo.py",
    "content": "#  class Reversible ([a],[b]) [(a,b)] where\ndef forward(x):\n    return list(zip(x[0], x[1]))\n\ndef backward(xys):\n    xs = []\n    ys = []\n    for (x,y) in xys:\n        xs.append(x)\n        ys.append(y)\n    return (xs, ys)\n\n#  addLen :: Str -> (Str, Int)\ndef addLen(x):\n    return (x, len(x))\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-4/main.loc",
    "content": "module main (foo)\n\nimport root ((.))\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => (List a) = \"list\" a\ntype Py => Tuple2 a b = \"tuple\" a b\n\n\nclass Reversible a b where\n  forward :: a -> b\n  backward :: b -> a\n\ninstance Reversible ([a],[b]) [(a,b)] where\n  source Py from \"foo.py\" (\"forward\", \"backward\")\n\n\nsource Py (\"map\")\nsource Py from \"foo.py\" (\"addLen\")\naddLen :: Str -> (Str, Int)\nmap :: (a -> b) -> [a] -> [b]\n\nfoo :: [Str] -> ([Str],[Int])\nfoo = backward . map addLen\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-5/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus square 4.0 > obs.txt 2> obs.err\n\t./nexus sumOfSquares [1,2,3] 2>> obs.err  >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-5/exp.txt",
    "content": "16\n14\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-5/main.loc",
    "content": "module main (square, sumOfSquares)\n\nimport root-cpp\n\nsquare :: Real -> Real\nsquare x = x * x\n\nsumOfSquares :: [Real] -> Real\nsumOfSquares xs = fold (+) 0.0 (map square xs)\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-6/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo '\"alice\"' 42 > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-6/exp.txt",
    "content": "[[\"alice\"],[42]]\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-6/main.loc",
    "content": "module main (foo)\n\nimport map-cpp\n\nfoo :: Str -> Int -> Map Str Int\nfoo k v = pack ([k], [v])\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-7/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus appendAllInt 1 2 3 > obs.txt 2> obs.err\n\t./nexus appendAllStr '\"hello\"' '\" \"' '\"world\"' >> obs.txt 2>> obs.err\n\t./nexus showAppend 10 20 >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-7/exp.txt",
    "content": "6\n\"hello world\"\n\"30\"\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-7/foo.hpp",
    "content": "#ifndef __FOO_HPP__\n#define __FOO_HPP__\n\n#include <string>\n\nint appendInt(int x, int y){\n  return x + y;\n}\n\nstd::string appendStr(std::string x, std::string y){\n  return x + y;\n}\n\nstd::string showInt(int x){\n  return std::to_string(x);\n}\n\nstd::string showStr(std::string x){\n  return x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-7/foo.py",
    "content": "def appendInt(x, y):\n    return x + y\n\ndef appendStr(x, y):\n    return x + y\n\ndef showInt(x):\n    return str(x)\n\ndef showStr(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-7/main.loc",
    "content": "module main (appendAllInt, appendAllStr, showAppend)\n\ntype Cpp => Int = \"int\"\ntype Cpp => Str = \"std::string\"\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\n\nclass Semigroup a where\n  append :: a -> a -> a\n\nclass Semigroup a => Showable a where\n  toStr :: a -> Str\n\ninstance Semigroup Int where\n  source Cpp from \"foo.hpp\" (\"appendInt\" as append)\n  source Py from \"foo.py\" (\"appendInt\" as append)\n\ninstance Showable Int where\n  source Cpp from \"foo.hpp\" (\"showInt\" as toStr)\n  source Py from \"foo.py\" (\"showInt\" as toStr)\n\ninstance Semigroup Str where\n  source Cpp from \"foo.hpp\" (\"appendStr\" as append)\n  source Py from \"foo.py\" (\"appendStr\" as append)\n\ninstance Showable Str where\n  source Cpp from \"foo.hpp\" (\"showStr\" as toStr)\n  source Py from \"foo.py\" (\"showStr\" as toStr)\n\n-- single constraint, concrete Int\nappendAllInt :: Semigroup a => Int -> Int -> Int -> Int\nappendAllInt x y z = append x (append y z)\n\n-- single constraint, concrete Str\nappendAllStr :: Semigroup a => Str -> Str -> Str -> Str\nappendAllStr x y z = append x (append y z)\n\n-- multiple constraints, uses methods from both Showable and Semigroup\nshowAppend :: (Semigroup a, Showable a) => Int -> Int -> Str\nshowAppend x y = toStr (append x y)\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-8/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testStr > obs.txt 2> obs.err\n\t./nexus testList >> obs.txt 2>> obs.err\n\t./nexus testListInt >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-8/exp.txt",
    "content": "\"hello\"\n[\"world\"]\n[1,2,3]\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-8/foo.py",
    "content": "def appendStr(x, y):\n    return x + y\n\ndef appendList(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-8/main.loc",
    "content": "module main (testStr, testList, testListInt)\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => (List a) = \"list\" a\n\nclass Monoid a where\n  mempty :: a\n  mappend :: a -> a -> a\n\ninstance Monoid Str where\n  source Py from \"foo.py\" (\"appendStr\" as mappend)\n  mempty = \"\"\n\ninstance Monoid (List a) where\n  source Py from \"foo.py\" (\"appendList\" as mappend)\n  mempty = []\n\n-- The core regression: annotation selects Str, not List\ntestStr :: Str\ntestStr = mappend mempty \"hello\"\n\n-- Annotation selects List\ntestList :: [Str]\ntestList = mappend mempty [\"world\"]\n\n-- Nested: compose methods from same class at different types\ntestListInt :: [Int]\ntestListInt = mappend (mappend mempty [1]) [2, 3]\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-9/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus testSemiStr > obs.txt 2> obs.err\n\t./nexus testMonoidStr >> obs.txt 2>> obs.err\n\t./nexus testMonoidList >> obs.txt 2>> obs.err\n\t./nexus testGroupNegate >> obs.txt 2>> obs.err\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-9/exp.txt",
    "content": "\"hello world\"\n\"hello\"\n[1,2,3]\n5\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-9/foo.py",
    "content": "def appendStr(x, y):\n    return x + y\n\ndef addInt(x, y):\n    return x + y\n\ndef negateInt(x):\n    return -x\n\ndef appendList(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/golden-tests/typeclasses-9/main.loc",
    "content": "module main (testSemiStr, testMonoidStr, testMonoidList, testGroupNegate)\n\ntype Py => Int = \"int\"\ntype Py => Str = \"str\"\ntype Py => (List a) = \"list\" a\n\n-- Three-level class hierarchy: Semigroup -> Monoid -> Group\nclass Semigroup a where\n  sappend :: a -> a -> a\n\nclass Semigroup a => Monoid a where\n  mempty :: a\n\nclass Monoid a => Group a where\n  ginverse :: a -> a\n\ninstance Semigroup Str where\n  source Py from \"foo.py\" (\"appendStr\" as sappend)\n\ninstance Monoid Str where\n  mempty = \"\"\n\ninstance Semigroup Int where\n  source Py from \"foo.py\" (\"addInt\" as sappend)\n\ninstance Monoid Int where\n  mempty = 0\n\ninstance Group Int where\n  source Py from \"foo.py\" (\"negateInt\" as ginverse)\n\ninstance Semigroup (List a) where\n  source Py from \"foo.py\" (\"appendList\" as sappend)\n\ninstance Monoid (List a) where\n  mempty = []\n\n-- Test superclass method (Semigroup) through concrete type\ntestSemiStr :: Str\ntestSemiStr = sappend \"hello\" \" world\"\n\n-- Test Monoid method with Str\ntestMonoidStr :: Str\ntestMonoidStr = sappend mempty \"hello\"\n\n-- Test Monoid method with List -- exercises the annotation propagation fix\ntestMonoidList :: [Int]\ntestMonoidList = sappend mempty [1, 2, 3]\n\n-- Test three-level hierarchy: Group's ginverse uses Monoid's mempty indirectly\ntestGroupNegate :: Int\ntestGroupNegate = sappend (ginverse 5) 10\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-edge-cases/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus singleChar 2>> obs.err >> obs.txt\n\t./nexus emptyLike 2>> obs.err >> obs.txt\n\t./nexus rtl 2>> obs.err >> obs.txt\n\t./nexus combining 2>> obs.err >> obs.txt\n\t./nexus surrogateLike 2>> obs.err >> obs.txt\n\t./nexus nullLike 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-edge-cases/exp.txt",
    "content": "\"世\"\n\"​\"\n\"مرحبا بالعالم\"\n\"é\"\n\"😀💩\"\n\"a​b​c\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-edge-cases/foo.py",
    "content": "def py_identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-edge-cases/main.loc",
    "content": "module main (emptyLike, singleChar, surrogateLike, rtl, combining, nullLike)\n\nsource Py from \"foo.py\" (\"py_identity\" as pyId)\n\npyId :: Str -> Str\ntype Py => Str = \"str\"\n\n-- Single multi-byte character\nsingleChar :: Str\nsingleChar = pyId \"世\"\n\n-- Characters that look like JSON special chars but aren't\nemptyLike :: Str\nemptyLike = pyId \"​\"\n\n-- Right-to-left text (Arabic)\nrtl :: Str\nrtl = pyId \"مرحبا بالعالم\"\n\n-- Combining characters (e + combining acute = é)\ncombining :: Str\ncombining = pyId \"é\"\n\n-- String with surrogate-like high codepoints (valid UTF-8, 4-byte sequences)\nsurrogateLike :: Str\nsurrogateLike = pyId \"😀💩\"\n\n-- Null-like zero-width chars mixed with ASCII\nnullLike :: Str\nnullLike = pyId \"a​b​c\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interop/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus pyToCpp 2>> obs.err >> obs.txt\n\t./nexus cppToPy 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interop/exp.txt",
    "content": "\"[cpp:[py:café-你好]]\"\n\"[py:[cpp:café-你好]]\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interop/foo.hpp",
    "content": "#include <string>\n\nstd::string cpp_wrap(std::string x) {\n    return \"[cpp:\" + x + \"]\";\n}\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interop/foo.py",
    "content": "def py_wrap(x):\n    return \"[py:\" + x + \"]\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interop/main.loc",
    "content": "module main (pyToCpp, cppToPy)\n\nimport root ((.))\n\nsource Py from \"foo.py\" (\"py_wrap\" as pyWrap)\nsource Cpp from \"foo.hpp\" (\"cpp_wrap\" as cppWrap)\n\npyWrap :: Str -> Str\ncppWrap :: Str -> Str\n\ntype Py => Str = \"str\"\ntype Cpp => Str = \"std::string\"\n\n-- Unicode string crosses Python -> C++ boundary\npyToCpp :: Str\npyToCpp = (cppWrap . pyWrap) \"café-你好\"\n\n-- Unicode string crosses C++ -> Python boundary\ncppToPy :: Str\ncppToPy = (pyWrap . cppWrap) \"café-你好\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interpolation/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus greet 2>> obs.err >> obs.txt\n\t./nexus multiByteInterp 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interpolation/exp.txt",
    "content": "\"你好 World ❤\"\n\"café World üñ\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interpolation/foo.py",
    "content": "def py_identity(x):\n    return x\n\ndef py_name():\n    return \"World\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-interpolation/main.loc",
    "content": "module main (greet, multiByteInterp)\n\nsource Py from \"foo.py\" (\"py_identity\" as pyId, \"py_name\" as pyName)\n\npyId :: Str -> Str\npyName :: Str\n\ntype Py => Str = \"str\"\n\n-- Test: Unicode in string interpolation - surrounding text is Unicode\ngreet :: Str\ngreet = pyId \"你好 #{pyName} ❤\"\n\n-- Test: Multi-byte chars on both sides of interpolation\nmultiByteInterp :: Str\nmultiByteInterp = pyId \"café #{pyName} üñ\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus hello 2>> obs.err >> obs.txt\n\t./nexus chinese 2>> obs.err >> obs.txt\n\t./nexus emoji 2>> obs.err >> obs.txt\n\t./nexus mixed 2>> obs.err >> obs.txt\n\t./nexus roundtrip 2>> obs.err >> obs.txt\n\t./nexus tripleQuoted 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source/exp.txt",
    "content": "\"Élève café\"\n\"你知道得太多了\"\n\"❤ hello ☃\"\n\"abc-éèê-你好-❤-xyz\"\n\"Hello, café-你好!\"\n\"line1: café\\nline2: 你好\\nline3: ❤\\n\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source/foo.hpp",
    "content": "#include <string>\n\nstd::string cpp_hello(std::string name) {\n    return \"Hello, \" + name + \"!\";\n}\n\nstd::string cpp_identity(std::string x) {\n    return x;\n}\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source/foo.py",
    "content": "def py_hello(name):\n    return f\"Hello, {name}!\"\n\ndef py_identity(x):\n    return x\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source/main.loc",
    "content": "module main (hello, chinese, emoji, mixed, roundtrip, tripleQuoted)\n\n-- À propos: test Unicode in line comments — em-dash, accented letters, CJK\n-- 你好世界 – Chinese hello world in a comment\n\n{- Block comment with Unicode:\n   éèêë ñ ü ß ☃ ❤ ★\n   Japanese: こんにちは\n   Arabic: مرحبا\n-}\n\nsource Py from \"foo.py\" (\"py_hello\" as pyHello, \"py_identity\" as pyId)\n\npyHello :: Str -> Str\npyId :: Str -> Str\n\ntype Py => Str = \"str\"\n\n-- Test 1: Unicode string literal in .loc source (accented chars)\nhello :: Str\nhello = pyId \"Élève café\"\n\n-- Test 2: CJK characters in string literal\nchinese :: Str\nchinese = pyId \"你知道得太多了\"\n\n-- Test 3: Emoji-like symbols in string literal\nemoji :: Str\nemoji = pyId \"❤ hello ☃\"\n\n-- Test 4: Mixed ASCII and multi-byte in same string  \nmixed :: Str\nmixed = pyId \"abc-éèê-你好-❤-xyz\"\n\n-- Test 5: Unicode survives function application round-trip\nroundtrip :: Str\nroundtrip = pyHello \"café-你好\"\n\n-- Test 6: Unicode in triple-quoted string\ntripleQuoted :: Str\ntripleQuoted = pyId \"\"\"line1: café\nline2: 你好\nline3: ❤\"\"\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source-cpp/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t>obs.txt\n\t./nexus hello 2>> obs.err >> obs.txt\n\t./nexus chinese 2>> obs.err >> obs.txt\n\t./nexus mixed 2>> obs.err >> obs.txt\n\nclean:\n\trm -rf nexus pools __pycache__\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source-cpp/exp.txt",
    "content": "\"Élève café\"\n\"你知道得太多了\"\n\"Hello, café-你好-❤!\"\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source-cpp/foo.hpp",
    "content": "#include <string>\n\nstd::string cpp_identity(std::string x) {\n    return x;\n}\n\nstd::string cpp_hello(std::string name) {\n    return \"Hello, \" + name + \"!\";\n}\n"
  },
  {
    "path": "test-suite/golden-tests/unicode-source-cpp/main.loc",
    "content": "module main (hello, chinese, mixed)\n\n-- Test Unicode strings through C++ pool — exercises the codegen subprocess\n\nsource Cpp from \"foo.hpp\" (\"cpp_identity\" as cppId, \"cpp_hello\" as cppHello)\n\ncppId :: Str -> Str\ncppHello :: Str -> Str\n\ntype Cpp => Str = \"std::string\"\n\nhello :: Str\nhello = cppId \"Élève café\"\n\nchinese :: Str\nchinese = cppId \"你知道得太多了\"\n\nmixed :: Str\nmixed = cppHello \"café-你好-❤\"\n"
  },
  {
    "path": "test-suite/golden-tests/unit-1/Makefile",
    "content": "all:\n\trm -f *.err obs.txt\n\tmorloc make -o nexus main.loc 2> build.err\n\t./nexus foo > obs.txt 2> obs.err\n\nclean:\n\trm -rf nexus pools\n"
  },
  {
    "path": "test-suite/golden-tests/unit-1/exp.txt",
    "content": "42\n"
  },
  {
    "path": "test-suite/golden-tests/unit-1/foo.py",
    "content": "# note, this is a fake urand number generator\ndef urand():\n    return 42\n"
  },
  {
    "path": "test-suite/golden-tests/unit-1/main.loc",
    "content": "module main (foo)\n\nimport root-py\n\nsource Py from \"foo.py\" (\"urand\")\n\nurand :: () -> Int\n\nfoo = urand ()\n"
  },
  {
    "path": "test-suite/install-tests/.gitignore",
    "content": "nexus\n"
  },
  {
    "path": "test-suite/install-tests/README.md",
    "content": "# Test installation\n\nThese suite tests the `morloc make --install` command.\n\nEach test verifies 6 things:\n\n1. Binary installed to ~/.local/share/morloc/bin/\n2. Exe directory created under ~/.local/share/morloc/exe/\n3. Pools directory / included files/folders copied correctly\n4. Program produces correct output when run\n5. morloc uninstall --program removes the binary\n6. Cleanup is complete\n\n  ┌──────────┬──────────┬───────────────────────────────────────────────────────────────────┐\n  │  Module  │ Language │                           What it tests                           │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testpy1  │ Python   │ Direct source in cwd, file include via package.yaml               │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testpy2  │ Python   │ Source in src/, whole folder include via package.yaml             │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testpy3  │ Python   │ Direct source + indirect Python import, include via --include CLI │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testcpp1 │ C++      │ Direct .hpp source in cwd, file include via package.yaml          │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testcpp2 │ C++      │ Source in src/, whole folder include via package.yaml             │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testcpp3 │ C++      │ Direct source + indirect #include, include via --include CLI      │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testr1   │ R        │ Direct source in cwd, file include via package.yaml               │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testr2   │ R        │ Source in src/, whole folder include via package.yaml             │\n  ├──────────┼──────────┼───────────────────────────────────────────────────────────────────┤\n  │ testr3   │ R        │ Direct source + indirect source(), include via --include CLI      │\n  └──────────┴──────────┴───────────────────────────────────────────────────────────────────┘\n\n"
  },
  {
    "path": "test-suite/install-tests/run-tests.sh",
    "content": "#!/usr/bin/env bash\n# run-tests.sh - Installation test suite for morloc\n#\n# Tests `morloc make --install` with package.yaml include fields and\n# `morloc make --install --include` CLI flags across Python, C++, and R.\n#\n# Covers:\n#   - Source files in cwd vs nested directories (src/)\n#   - Directly and indirectly sourced files\n#   - Whole-folder includes\n#   - CLI --include flag\n#\n# Usage: ./run-tests.sh [test...]\n#   With no arguments, runs all test groups. Pass partial names to filter:\n#   ./run-tests.sh testpy testcpp testr\n\nset -euo pipefail\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\n\nMORLOC_HOME=\"${MORLOC_HOME:-$HOME/.local/share/morloc}\"\nBIN_DIR=\"$MORLOC_HOME/bin\"\nEXE_DIR=\"$MORLOC_HOME/exe\"\nFDB_DIR=\"$MORLOC_HOME/fdb\"\n\nPASSED=0\nFAILED=0\nTOTAL=0\nFAILURES=()\n\nif [[ -t 1 ]]; then\n    GREEN=$'\\033[32m' RED=$'\\033[31m' YELLOW=$'\\033[33m' BOLD=$'\\033[1m' RESET=$'\\033[0m'\nelse\n    GREEN='' RED='' YELLOW='' BOLD='' RESET=''\nfi\n\n# ======================================================================\n# Test helpers\n# ======================================================================\n\nassert_test() {\n    local label=\"$1\"\n    local expected=\"$2\"\n    local actual=\"$3\"\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-55s \" \"$label\"\n\n    if [[ \"$actual\" == \"$expected\" ]]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      expected: $expected\"\n        echo \"      actual:   $actual\"\n    fi\n}\n\nassert_file_exists() {\n    local label=\"$1\"\n    local filepath=\"$2\"\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-55s \" \"$label\"\n\n    if [[ -e \"$filepath\" ]]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      file not found: $filepath\"\n    fi\n}\n\nassert_dir_exists() {\n    local label=\"$1\"\n    local dirpath=\"$2\"\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-55s \" \"$label\"\n\n    if [[ -d \"$dirpath\" ]]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      directory not found: $dirpath\"\n    fi\n}\n\nassert_not_exists() {\n    local label=\"$1\"\n    local filepath=\"$2\"\n\n    TOTAL=$((TOTAL + 1))\n    printf \"  %-55s \" \"$label\"\n\n    if [[ ! -e \"$filepath\" ]]; then\n        printf \"%sPASS%s\\n\" \"$GREEN\" \"$RESET\"\n        PASSED=$((PASSED + 1))\n    else\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$label\")\n        echo \"      should not exist: $filepath\"\n    fi\n}\n\nshould_run() {\n    local name=\"$1\"\n    if [[ $# -eq 0 ]] || [[ ${#FILTERS[@]} -eq 0 ]]; then\n        return 0\n    fi\n    for pat in \"${FILTERS[@]}\"; do\n        if [[ \"$name\" == *\"$pat\"* ]]; then\n            return 0\n        fi\n    done\n    return 1\n}\n\n# Build, install, test, and uninstall a module.\n#\n# Usage: run_install_test <test_name> <test_dir> <subcommand> <arg> <expected> [extra_make_args...]\n#\n# test_name:  group label\n# test_dir:   directory containing main.loc + sources\n# subcommand: the exported function to call\n# arg:        argument to pass to the subcommand\n# expected:   expected output\n# extra_make_args: additional args for `morloc make` (e.g. --include foo.py)\nrun_install_test() {\n    local test_name=\"$1\"\n    local test_dir=\"$2\"\n    local subcommand=\"$3\"\n    local arg=\"$4\"\n    local expected=\"$5\"\n    shift 5\n    local extra_args=(\"$@\")\n\n    local work_dir\n    work_dir=$(mktemp -d)\n\n    # Copy test module to a temp working directory\n    cp -r \"$test_dir\"/. \"$work_dir\"/\n\n    # Build and install\n    local build_err\n    build_err=$(cd \"$work_dir\" && morloc make --install --force -o \"$test_name\" \"${extra_args[@]}\" main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"$test_name: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"$test_name: build\")\n        echo \"$build_err\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$work_dir\"\n        return\n    }\n\n    local bin_path=\"$BIN_DIR/$test_name\"\n    local exe_path=\"$EXE_DIR/$test_name\"\n\n    # Check binary exists\n    assert_file_exists \"$test_name: binary installed\" \"$bin_path\"\n\n    # Check exe directory exists\n    assert_dir_exists \"$test_name: exe directory created\" \"$exe_path\"\n\n    # Check pools directory copied\n    assert_dir_exists \"$test_name: pools directory copied\" \"$exe_path/pools\"\n\n    # Return included-file checks to the caller via the callback pattern\n    # (caller adds assert_file_exists calls after this function)\n\n    # Run the installed program and check output\n    local actual\n    actual=$(\"$bin_path\" \"$subcommand\" \"$arg\" 2>&1) || actual=\"ERROR: rc=$?\"\n    assert_test \"$test_name: output correct\" \"$expected\" \"$actual\"\n\n    # Uninstall\n    morloc uninstall --program \"$test_name\" >/dev/null 2>&1 || true\n\n    # Verify uninstall cleaned up\n    assert_not_exists \"$test_name: binary removed after uninstall\" \"$bin_path\"\n\n    rm -rf \"$work_dir\"\n}\n\n# Collect filter arguments\nFILTERS=(\"$@\")\n\necho \"${BOLD}morloc install tests${RESET}\"\necho \"\"\n\n# ======================================================================\n# Python tests\n# ======================================================================\n\n# --- testpy1: direct source in cwd, include file via package.yaml ---\nif should_run \"testpy1\"; then\n    echo \"${BOLD}[testpy1] Python: direct source in cwd, include via package.yaml${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testpy1\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testpy1 main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testpy1: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testpy1: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testpy1: binary installed\" \"$BIN_DIR/testpy1\"\n        assert_dir_exists  \"testpy1: exe directory created\" \"$EXE_DIR/testpy1\"\n        assert_dir_exists  \"testpy1: pools directory copied\" \"$EXE_DIR/testpy1/pools\"\n        assert_file_exists \"testpy1: helpers.py included\" \"$EXE_DIR/testpy1/helpers.py\"\n\n        ACTUAL=$(\"$BIN_DIR/testpy1\" pygreet '\"world\"' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testpy1: output correct\" '\"hello world\"' \"$ACTUAL\"\n\n        morloc uninstall --program testpy1 >/dev/null 2>&1 || true\n        assert_not_exists \"testpy1: cleaned up after uninstall\" \"$BIN_DIR/testpy1\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# --- testpy2: source in src/, include whole folder via package.yaml ---\nif should_run \"testpy2\"; then\n    echo \"${BOLD}[testpy2] Python: source in src/, include folder via package.yaml${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testpy2\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testpy2 main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testpy2: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testpy2: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testpy2: binary installed\" \"$BIN_DIR/testpy2\"\n        assert_dir_exists  \"testpy2: exe directory created\" \"$EXE_DIR/testpy2\"\n        assert_dir_exists  \"testpy2: src/ folder included\" \"$EXE_DIR/testpy2/src\"\n        assert_file_exists \"testpy2: src/mathutil.py included\" \"$EXE_DIR/testpy2/src/mathutil.py\"\n\n        ACTUAL=$(\"$BIN_DIR/testpy2\" pyadd '3' '4' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testpy2: output correct\" \"7\" \"$ACTUAL\"\n\n        morloc uninstall --program testpy2 >/dev/null 2>&1 || true\n        assert_not_exists \"testpy2: cleaned up after uninstall\" \"$BIN_DIR/testpy2\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# --- testpy3: direct source + indirect import, include via --include CLI ---\nif should_run \"testpy3\"; then\n    echo \"${BOLD}[testpy3] Python: indirect dependency, include via --include CLI${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testpy3\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testpy3 --include \"formatter.py\" --include \"fmtlib.py\" main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testpy3: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testpy3: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testpy3: binary installed\" \"$BIN_DIR/testpy3\"\n        assert_dir_exists  \"testpy3: exe directory created\" \"$EXE_DIR/testpy3\"\n        assert_file_exists \"testpy3: formatter.py included\" \"$EXE_DIR/testpy3/formatter.py\"\n        assert_file_exists \"testpy3: fmtlib.py included (indirect)\" \"$EXE_DIR/testpy3/fmtlib.py\"\n\n        ACTUAL=$(\"$BIN_DIR/testpy3\" pyformat '\"x\"' '5' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testpy3: output correct\" '\"x=5\"' \"$ACTUAL\"\n\n        morloc uninstall --program testpy3 >/dev/null 2>&1 || true\n        assert_not_exists \"testpy3: cleaned up after uninstall\" \"$BIN_DIR/testpy3\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# ======================================================================\n# C++ tests\n# ======================================================================\n\n# --- testcpp1: direct source in cwd, include file via package.yaml ---\nif should_run \"testcpp1\"; then\n    echo \"${BOLD}[testcpp1] C++: direct source in cwd, include via package.yaml${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testcpp1\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testcpp1 main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testcpp1: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testcpp1: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testcpp1: binary installed\" \"$BIN_DIR/testcpp1\"\n        assert_dir_exists  \"testcpp1: exe directory created\" \"$EXE_DIR/testcpp1\"\n        assert_dir_exists  \"testcpp1: pools directory copied\" \"$EXE_DIR/testcpp1/pools\"\n        assert_file_exists \"testcpp1: square.hpp included\" \"$EXE_DIR/testcpp1/square.hpp\"\n\n        ACTUAL=$(\"$BIN_DIR/testcpp1\" cppsquare '7' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testcpp1: output correct\" \"49\" \"$ACTUAL\"\n\n        morloc uninstall --program testcpp1 >/dev/null 2>&1 || true\n        assert_not_exists \"testcpp1: cleaned up after uninstall\" \"$BIN_DIR/testcpp1\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# --- testcpp2: source in src/, include whole folder via package.yaml ---\nif should_run \"testcpp2\"; then\n    echo \"${BOLD}[testcpp2] C++: source in src/, include folder via package.yaml${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testcpp2\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testcpp2 main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testcpp2: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testcpp2: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testcpp2: binary installed\" \"$BIN_DIR/testcpp2\"\n        assert_dir_exists  \"testcpp2: exe directory created\" \"$EXE_DIR/testcpp2\"\n        assert_dir_exists  \"testcpp2: src/ folder included\" \"$EXE_DIR/testcpp2/src\"\n        assert_file_exists \"testcpp2: src/dbl.hpp included\" \"$EXE_DIR/testcpp2/src/dbl.hpp\"\n\n        ACTUAL=$(\"$BIN_DIR/testcpp2\" cppdouble '6' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testcpp2: output correct\" \"12\" \"$ACTUAL\"\n\n        morloc uninstall --program testcpp2 >/dev/null 2>&1 || true\n        assert_not_exists \"testcpp2: cleaned up after uninstall\" \"$BIN_DIR/testcpp2\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# --- testcpp3: direct source + indirect #include, include via --include CLI ---\nif should_run \"testcpp3\"; then\n    echo \"${BOLD}[testcpp3] C++: indirect #include, include via --include CLI${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testcpp3\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testcpp3 --include \"inc.hpp\" --include \"offset.hpp\" main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testcpp3: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testcpp3: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testcpp3: binary installed\" \"$BIN_DIR/testcpp3\"\n        assert_dir_exists  \"testcpp3: exe directory created\" \"$EXE_DIR/testcpp3\"\n        assert_file_exists \"testcpp3: inc.hpp included\" \"$EXE_DIR/testcpp3/inc.hpp\"\n        assert_file_exists \"testcpp3: offset.hpp included (indirect)\" \"$EXE_DIR/testcpp3/offset.hpp\"\n\n        ACTUAL=$(\"$BIN_DIR/testcpp3\" cppinc '10' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testcpp3: output correct\" \"11\" \"$ACTUAL\"\n\n        morloc uninstall --program testcpp3 >/dev/null 2>&1 || true\n        assert_not_exists \"testcpp3: cleaned up after uninstall\" \"$BIN_DIR/testcpp3\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# ======================================================================\n# R tests\n# ======================================================================\n\n# --- testr1: direct source in cwd, include file via package.yaml ---\nif should_run \"testr1\"; then\n    echo \"${BOLD}[testr1] R: direct source in cwd, include via package.yaml${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testr1\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testr1 main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testr1: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testr1: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testr1: binary installed\" \"$BIN_DIR/testr1\"\n        assert_dir_exists  \"testr1: exe directory created\" \"$EXE_DIR/testr1\"\n        assert_dir_exists  \"testr1: pools directory copied\" \"$EXE_DIR/testr1/pools\"\n        assert_file_exists \"testr1: negate.R included\" \"$EXE_DIR/testr1/negate.R\"\n\n        ACTUAL=$(\"$BIN_DIR/testr1\" rnegate '5.0' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testr1: output correct\" \"-5\" \"$ACTUAL\"\n\n        morloc uninstall --program testr1 >/dev/null 2>&1 || true\n        assert_not_exists \"testr1: cleaned up after uninstall\" \"$BIN_DIR/testr1\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# --- testr2: source in src/, include whole folder via package.yaml ---\nif should_run \"testr2\"; then\n    echo \"${BOLD}[testr2] R: source in src/, include folder via package.yaml${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testr2\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testr2 main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testr2: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testr2: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testr2: binary installed\" \"$BIN_DIR/testr2\"\n        assert_dir_exists  \"testr2: exe directory created\" \"$EXE_DIR/testr2\"\n        assert_dir_exists  \"testr2: src/ folder included\" \"$EXE_DIR/testr2/src\"\n        assert_file_exists \"testr2: src/triple.R included\" \"$EXE_DIR/testr2/src/triple.R\"\n\n        ACTUAL=$(\"$BIN_DIR/testr2\" rtriple '4' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testr2: output correct\" \"12\" \"$ACTUAL\"\n\n        morloc uninstall --program testr2 >/dev/null 2>&1 || true\n        assert_not_exists \"testr2: cleaned up after uninstall\" \"$BIN_DIR/testr2\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# --- testr3: direct source + indirect source(), include via --include CLI ---\nif should_run \"testr3\"; then\n    echo \"${BOLD}[testr3] R: indirect source(), include via --include CLI${RESET}\"\n\n    TEST_DIR=\"$SCRIPT_DIR/testr3\"\n    WORK_DIR=$(mktemp -d)\n    cp -r \"$TEST_DIR\"/. \"$WORK_DIR\"/\n\n    BUILD_ERR=$(cd \"$WORK_DIR\" && morloc make --install --force -o testr3 --include \"glue.R\" --include \"rutil.R\" main.loc 2>&1) || {\n        TOTAL=$((TOTAL + 1))\n        printf \"  %-55s \" \"testr3: build\"\n        printf \"%sFAIL%s\\n\" \"$RED\" \"$RESET\"\n        FAILED=$((FAILED + 1))\n        FAILURES+=(\"testr3: build\")\n        echo \"$BUILD_ERR\" | tail -5 | sed 's/^/      /'\n        rm -rf \"$WORK_DIR\"\n    }\n\n    if [[ -d \"$WORK_DIR\" ]]; then\n        assert_file_exists \"testr3: binary installed\" \"$BIN_DIR/testr3\"\n        assert_dir_exists  \"testr3: exe directory created\" \"$EXE_DIR/testr3\"\n        assert_file_exists \"testr3: glue.R included\" \"$EXE_DIR/testr3/glue.R\"\n        assert_file_exists \"testr3: rutil.R included (indirect)\" \"$EXE_DIR/testr3/rutil.R\"\n\n        ACTUAL=$(\"$BIN_DIR/testr3\" rpaste '\"foo\"' '\"bar\"' 2>&1) || ACTUAL=\"ERROR: rc=$?\"\n        assert_test \"testr3: output correct\" '\"foobar\"' \"$ACTUAL\"\n\n        morloc uninstall --program testr3 >/dev/null 2>&1 || true\n        assert_not_exists \"testr3: cleaned up after uninstall\" \"$BIN_DIR/testr3\"\n\n        rm -rf \"$WORK_DIR\"\n    fi\n    echo \"\"\nfi\n\n# ======================================================================\n# Results\n# ======================================================================\n\necho \"=== Results ===\"\necho \"${GREEN}Passed: $PASSED${RESET}, ${RED}Failed: $FAILED${RESET}, Total: $TOTAL\"\n\nif (( FAILED > 0 )); then\n    echo \"\"\n    echo \"${RED}Failures:${RESET}\"\n    for f in \"${FAILURES[@]}\"; do\n        echo \"  ${RED}-${RESET} $f\"\n    done\n    exit 1\nfi\necho \"${GREEN}${BOLD}ALL PASSED${RESET}\"\n"
  },
  {
    "path": "test-suite/install-tests/testcpp1/main.loc",
    "content": "module testcpp1 (cppsquare)\n\nimport root-cpp\n\nsource Cpp from \"square.hpp\" (\"cppsquare\")\n\ncppsquare :: Int -> Int\n"
  },
  {
    "path": "test-suite/install-tests/testcpp1/package.yaml",
    "content": "name: testcpp1\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"square.hpp\"\n"
  },
  {
    "path": "test-suite/install-tests/testcpp1/square.hpp",
    "content": "#ifndef SQUARE_HPP\n#define SQUARE_HPP\n\nint cppsquare(int x) {\n    return x * x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/install-tests/testcpp2/main.loc",
    "content": "module testcpp2 (cppdouble)\n\nimport root-cpp\n\nsource Cpp from \"src/dbl.hpp\" (\"cppdouble\")\n\ncppdouble :: Int -> Int\n"
  },
  {
    "path": "test-suite/install-tests/testcpp2/package.yaml",
    "content": "name: testcpp2\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"src/\"\n"
  },
  {
    "path": "test-suite/install-tests/testcpp2/src/dbl.hpp",
    "content": "#ifndef DBL_HPP\n#define DBL_HPP\n\nint cppdouble(int x) {\n    return x + x;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/install-tests/testcpp3/inc.hpp",
    "content": "#ifndef INC_HPP\n#define INC_HPP\n\n#include \"offset.hpp\"\n\nint cppinc(int x) {\n    return x + OFFSET;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/install-tests/testcpp3/main.loc",
    "content": "module testcpp3 (cppinc)\n\nimport root-cpp\n\nsource Cpp from \"inc.hpp\" (\"cppinc\")\n\ncppinc :: Int -> Int\n"
  },
  {
    "path": "test-suite/install-tests/testcpp3/offset.hpp",
    "content": "#ifndef OFFSET_HPP\n#define OFFSET_HPP\n\n#define OFFSET 1\n\n#endif\n"
  },
  {
    "path": "test-suite/install-tests/testdatafile1/data.txt",
    "content": "hello from datafile"
  },
  {
    "path": "test-suite/install-tests/testdatafile1/main.loc",
    "content": "module testdatafile1 (readData)\n\nimport root-py\n\nsource Py from \"reader.py\" (\"readfile\")\n\nreadfile :: Str -> Str\n\nreadData :: Str\nreadData = readfile (@datafile \"data.txt\")\n"
  },
  {
    "path": "test-suite/install-tests/testdatafile1/package.yaml",
    "content": "name: testdatafile1\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"reader.py\"\n  - \"data.txt\"\n"
  },
  {
    "path": "test-suite/install-tests/testdatafile1/reader.py",
    "content": "def readfile(path):\n    with open(path) as f:\n        return f.read().strip()\n"
  },
  {
    "path": "test-suite/install-tests/testpy1/helpers.py",
    "content": "def pygreet(name):\n    return \"hello \" + name\n"
  },
  {
    "path": "test-suite/install-tests/testpy1/main.loc",
    "content": "module testpy1 (pygreet)\n\nimport root-py\n\nsource Py from \"helpers.py\" (\"pygreet\")\n\npygreet :: Str -> Str\n"
  },
  {
    "path": "test-suite/install-tests/testpy1/package.yaml",
    "content": "name: testpy1\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"helpers.py\"\n"
  },
  {
    "path": "test-suite/install-tests/testpy2/main.loc",
    "content": "module testpy2 (pyadd)\n\nimport root-py\n\nsource Py from \"src/mathutil.py\" (\"pyadd\")\n\npyadd :: Int -> Int -> Int\n"
  },
  {
    "path": "test-suite/install-tests/testpy2/package.yaml",
    "content": "name: testpy2\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"src/\"\n"
  },
  {
    "path": "test-suite/install-tests/testpy2/src/mathutil.py",
    "content": "def pyadd(x, y):\n    return x + y\n"
  },
  {
    "path": "test-suite/install-tests/testpy3/fmtlib.py",
    "content": "def fmt_pair(name, n):\n    return name + \"=\" + str(n)\n"
  },
  {
    "path": "test-suite/install-tests/testpy3/formatter.py",
    "content": "from fmtlib import fmt_pair\n\ndef pyformat(name, n):\n    return fmt_pair(name, n)\n"
  },
  {
    "path": "test-suite/install-tests/testpy3/main.loc",
    "content": "module testpy3 (pyformat)\n\nimport root-py\n\nsource Py from \"formatter.py\" (\"pyformat\")\n\npyformat :: Str -> Int -> Str\n"
  },
  {
    "path": "test-suite/install-tests/testr1/main.loc",
    "content": "module testr1 (rnegate)\n\nimport root-r\n\nsource R from \"negate.R\" (\"rnegate\")\n\nrnegate :: Real -> Real\n"
  },
  {
    "path": "test-suite/install-tests/testr1/negate.R",
    "content": "rnegate <- function(x) {\n  -x\n}\n"
  },
  {
    "path": "test-suite/install-tests/testr1/package.yaml",
    "content": "name: testr1\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"negate.R\"\n"
  },
  {
    "path": "test-suite/install-tests/testr2/main.loc",
    "content": "module testr2 (rtriple)\n\nimport root-r\n\nsource R from \"src/triple.R\" (\"rtriple\")\n\nrtriple :: Int -> Int\n"
  },
  {
    "path": "test-suite/install-tests/testr2/package.yaml",
    "content": "name: testr2\nversion: 0.1.0\nhomepage: null\nsynopsis: null\ndescription: null\ncategory: null\nlicense: MIT\nauthor: null\nmaintainer: null\ngithub: null\nbug-reports: null\ndependencies: []\ninclude:\n  - \"src/\"\n"
  },
  {
    "path": "test-suite/install-tests/testr2/src/triple.R",
    "content": "rtriple <- function(x) {\n  as.integer(x * 3L)\n}\n"
  },
  {
    "path": "test-suite/install-tests/testr3/glue.R",
    "content": "source(\"rutil.R\")\n\nrpaste <- function(a, b) {\n  rjoin(a, b)\n}\n"
  },
  {
    "path": "test-suite/install-tests/testr3/main.loc",
    "content": "module testr3 (rpaste)\n\nimport root-r\n\nsource R from \"glue.R\" (\"rpaste\")\n\nrpaste :: Str -> Str -> Str\n"
  },
  {
    "path": "test-suite/install-tests/testr3/rutil.R",
    "content": "rjoin <- function(a, b) {\n  paste0(a, b)\n}\n"
  },
  {
    "path": "test-suite/integration/Main.hs",
    "content": "module Main (main) where\n\nimport System.Directory (getHomeDirectory, makeAbsolute)\nimport System.Environment (getArgs, lookupEnv, withArgs)\nimport System.FilePath ((</>))\nimport Test.Tasty (defaultMain, testGroup)\n\nimport Morloc.Test.Common (TestEnv (..))\nimport Morloc.Test.ConcurrencyTests (concurrencyTests)\nimport Morloc.Test.DaemonTests (daemonTests)\nimport Morloc.Test.InstallTests (installTests)\nimport Morloc.Test.ShmTests (shmTests)\nimport Morloc.Test.StressTests (stressTests)\n\nmain :: IO ()\nmain = do\n  suiteDir <- makeAbsolute \"test-suite\"\n  home <- getHomeDirectory\n  morlocHome <- maybe (home </> \".local/share/morloc\") id <$> lookupEnv \"MORLOC_HOME\"\n  let env =\n        TestEnv\n          { teSuiteDir = suiteDir\n          , teMorlocHome = morlocHome\n          }\n  -- Default to sequential execution: stress tests measure global resources\n  -- (SHM segments in /dev/shm) and cannot run concurrently with other tests.\n  -- Override with: --test-arguments=\"--num-threads N\"\n  args <- getArgs\n  let hasNumThreads =\n        any\n          ( \\a ->\n              \"--num-threads\" == a\n                || \"-j\" == a\n                || take 14 a == \"--num-threads=\"\n                || take 3 a == \"-j=\"\n          )\n          args\n      args' = if hasNumThreads then args else \"--num-threads\" : \"1\" : args\n  withArgs args' $\n    defaultMain $\n      testGroup\n        \"Integration Tests\"\n        [ installTests env\n        , concurrencyTests env\n        , daemonTests env\n        , stressTests env\n        , shmTests env\n        ]\n"
  },
  {
    "path": "test-suite/integration/Morloc/Test/Common.hs",
    "content": "module Morloc.Test.Common\n  ( TestEnv (..)\n  , withTestCopy\n  , withTestDir\n  , morlocMake\n  , morlocInstall\n  , morlocUninstall\n  , runProgram\n  , runNexus\n  , runNexusQuiet\n  , assertFileExists\n  , assertDirExists\n  , assertNotExists\n  , assertContains\n  , assertJsonEq\n  -- Daemon helpers\n  , DaemonHandle (..)\n  , withDaemon\n  , pickFreePort\n  , waitForHttp\n  , httpGet\n  , httpPost\n  , lpRequest\n  , jsonField\n  -- Resource tracking\n  , countZombies\n  , countShm\n  , countTmp\n  , listShm\n  , listShmWithAge\n  -- Utilities\n  , strip\n  , readDef\n  , cleanupMorlocResources\n  ) where\n\nimport Control.Concurrent (threadDelay)\nimport Control.Exception (SomeException, bracket, try)\nimport Data.List (isInfixOf)\nimport Data.Maybe (catMaybes)\nimport Data.Time.Clock (UTCTime, diffUTCTime, getCurrentTime)\nimport GHC.Stack (HasCallStack)\nimport System.Directory\n  ( copyFile\n  , createDirectoryIfMissing\n  , doesDirectoryExist\n  , doesFileExist\n  , doesPathExist\n  , getModificationTime\n  , listDirectory\n  , removeDirectoryRecursive\n  )\nimport System.Exit (ExitCode (..))\nimport System.FilePath (takeDirectory, (</>))\nimport System.IO (IOMode (..), hClose, openFile)\nimport System.IO.Temp (createTempDirectory, getCanonicalTemporaryDirectory)\nimport System.Process\n  ( CreateProcess (cwd, std_err, std_out)\n  , ProcessHandle\n  , StdStream (..)\n  , createProcess\n  , proc\n  , readCreateProcessWithExitCode\n  , readProcessWithExitCode\n  , terminateProcess\n  , waitForProcess\n  )\nimport Test.Tasty.HUnit (Assertion, assertBool, assertFailure)\n\ndata TestEnv = TestEnv\n  { teSuiteDir :: FilePath\n  , teMorlocHome :: FilePath\n  }\n\n-- | Create a temp directory, run action, clean up\nwithTestDir :: (FilePath -> IO a) -> IO a\nwithTestDir action = bracket setup cleanup action\n  where\n    setup = do\n      tmpBase <- getCanonicalTemporaryDirectory\n      createTempDirectory tmpBase \"morloc-test\"\n    cleanup = removeDirectoryRecursive\n\n-- | Copy source dir into a temp dir, run action, clean up\nwithTestCopy :: FilePath -> (FilePath -> IO a) -> IO a\nwithTestCopy srcDir action = bracket setup cleanup action\n  where\n    setup = do\n      tmpBase <- getCanonicalTemporaryDirectory\n      tmpDir <- createTempDirectory tmpBase \"morloc-test\"\n      copyDirRecursive srcDir tmpDir\n      return tmpDir\n    cleanup = removeDirectoryRecursive\n\ncopyDirRecursive :: FilePath -> FilePath -> IO ()\ncopyDirRecursive src dst = do\n  entries <- listDirectory src\n  mapM_ (copyEntry src dst) entries\n  where\n    copyEntry s d name = do\n      let sp = s </> name\n          dp = d </> name\n      isDir <- doesDirectoryExist sp\n      if isDir\n        then do\n          createDirectoryIfMissing True dp\n          copyDirRecursive sp dp\n        else do\n          createDirectoryIfMissing True (takeDirectory dp)\n          copyFile sp dp\n\n-- | Compile a .loc file with morloc make\nmorlocMake ::\n  FilePath ->\n  String ->\n  String ->\n  IO (ExitCode, String, String)\nmorlocMake workDir outName locFile = do\n  let args = [\"make\", \"-o\", outName, locFile]\n      cp = (proc \"morloc\" args) {cwd = Just workDir}\n  readCreateProcessWithExitCode cp \"\"\n\n-- | Compile and install a .loc file\nmorlocInstall ::\n  FilePath ->\n  String ->\n  [String] ->\n  String ->\n  IO (ExitCode, String, String)\nmorlocInstall workDir outName extraArgs locFile = do\n  let args = [\"make\", \"--install\", \"--force\", \"-o\", outName] ++ extraArgs ++ [locFile]\n      cp = (proc \"morloc\" args) {cwd = Just workDir}\n  readCreateProcessWithExitCode cp \"\"\n\nmorlocUninstall :: String -> IO ()\nmorlocUninstall progName = do\n  _ <- readProcessWithExitCode \"morloc\" [\"uninstall\", \"--program\", progName] \"\"\n  return ()\n\n-- | Run an installed program\nrunProgram :: FilePath -> String -> [String] -> IO (ExitCode, String, String)\nrunProgram binPath subcmd args =\n  readProcessWithExitCode binPath (subcmd : args) \"\"\n\n-- | Run a nexus binary in a working directory\nrunNexus :: FilePath -> String -> [String] -> IO (ExitCode, String, String)\nrunNexus workDir subcmd args = do\n  let cp = (proc (workDir </> \"nexus\") (subcmd : args)) {cwd = Just workDir}\n  readCreateProcessWithExitCode cp \"\"\n\n{- | Run a nexus binary with output redirected to /dev/null.\nUnlike runNexus, this does not capture stdout/stderr via pipes, so child\npool processes inherit /dev/null and won't be affected by pipe closure.\nUse this in stress tests to match the shell test behavior.\n-}\nrunNexusQuiet :: FilePath -> String -> [String] -> IO ExitCode\nrunNexusQuiet workDir subcmd args = do\n  devNull <- openFile \"/dev/null\" WriteMode\n  let cp =\n        (proc (workDir </> \"nexus\") (subcmd : args))\n          { cwd = Just workDir\n          , std_out = UseHandle devNull\n          , std_err = UseHandle devNull\n          }\n  (_, _, _, ph) <- createProcess cp\n  ec <- waitForProcess ph\n  hClose devNull\n  return ec\n\n-- ======================================================================\n-- Daemon lifecycle\n-- ======================================================================\n\ndata DaemonHandle = DaemonHandle\n  { dhProcess :: ProcessHandle\n  , dhWorkDir :: FilePath\n  }\n\n-- | Start a daemon, run action, stop daemon\nwithDaemon ::\n  -- | working directory with compiled nexus\n  FilePath ->\n  -- | extra daemon args (e.g. [\"--http-port\", \"12345\"])\n  [String] ->\n  (DaemonHandle -> IO a) ->\n  IO a\nwithDaemon workDir extraArgs action = bracket startD stopD action\n  where\n    startD = do\n      devNull <- openFile \"/dev/null\" WriteMode\n      let cp =\n            (proc (workDir </> \"nexus\") (\"--daemon\" : extraArgs))\n              { cwd = Just workDir\n              , std_out = UseHandle devNull\n              , std_err = UseHandle devNull\n              }\n      (_, _, _, ph) <- createProcess cp\n      return (DaemonHandle ph workDir)\n    stopD dh = do\n      terminateProcess (dhProcess dh)\n      _ <- waitForProcess (dhProcess dh)\n      return ()\n\n-- | Pick a random free TCP port\npickFreePort :: IO Int\npickFreePort = do\n  (_, out, _) <-\n    readProcessWithExitCode\n      \"python3\"\n      [ \"-c\"\n      , \"import socket; s=socket.socket(); s.bind(('127.0.0.1',0)); print(s.getsockname()[1]); s.close()\"\n      ]\n      \"\"\n  return (read (strip out))\n\n-- | Wait for an HTTP port to respond, with retries\nwaitForHttp :: Int -> Int -> IO Bool\nwaitForHttp port maxWaitMs = go 0\n  where\n    stepMs = 200\n    go elapsed\n      | elapsed >= maxWaitMs = return False\n      | otherwise = do\n          result <-\n            try $ httpGet (\"http://127.0.0.1:\" ++ show port ++ \"/health\") ::\n              IO (Either SomeException (ExitCode, String, String))\n          case result of\n            Right (ExitSuccess, _, _) -> return True\n            _ -> do\n              threadDelay (stepMs * 1000)\n              go (elapsed + stepMs)\n\n-- | HTTP GET via curl\nhttpGet :: String -> IO (ExitCode, String, String)\nhttpGet url = readProcessWithExitCode \"curl\" [\"-s\", url] \"\"\n\n-- | HTTP POST via curl\nhttpPost :: String -> String -> IO (ExitCode, String, String)\nhttpPost url body =\n  readProcessWithExitCode\n    \"curl\"\n    [\"-s\", \"-X\", \"POST\", url, \"-H\", \"Content-Type: application/json\", \"-d\", body]\n    \"\"\n\n-- | Send a length-prefixed JSON message to a Unix socket or TCP endpoint\nlpRequest :: String -> String -> IO String\nlpRequest target jsonMsg = do\n  let script =\n        unlines\n          [ \"import socket, struct, sys, json\"\n          , \"target = sys.argv[1]\"\n          , \"msg = sys.argv[2].encode('utf-8')\"\n          , \"if target.startswith('/'):\"\n          , \"    s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\"\n          , \"    s.connect(target)\"\n          , \"else:\"\n          , \"    host, port = target.rsplit(':', 1)\"\n          , \"    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\"\n          , \"    s.connect((host, int(port)))\"\n          , \"s.settimeout(10)\"\n          , \"s.sendall(struct.pack('>I', len(msg)) + msg)\"\n          , \"resp_len_bytes = b''\"\n          , \"while len(resp_len_bytes) < 4:\"\n          , \"    chunk = s.recv(4 - len(resp_len_bytes))\"\n          , \"    if not chunk: break\"\n          , \"    resp_len_bytes += chunk\"\n          , \"resp_len = struct.unpack('>I', resp_len_bytes)[0]\"\n          , \"resp = b''\"\n          , \"while len(resp) < resp_len:\"\n          , \"    chunk = s.recv(resp_len - len(resp))\"\n          , \"    if not chunk: break\"\n          , \"    resp += chunk\"\n          , \"s.close()\"\n          , \"print(resp.decode('utf-8'))\"\n          ]\n  (_, out, _) <- readProcessWithExitCode \"python3\" [\"-c\", script, target, jsonMsg] \"\"\n  return (strip out)\n\n-- | Extract a JSON field value using python3\njsonField :: String -> String -> IO String\njsonField jsonStr field = do\n  let script =\n        unlines\n          [ \"import json, sys\"\n          , \"data = json.loads(sys.argv[1])\"\n          , \"val = data.get(sys.argv[2])\"\n          , \"if val is None:\"\n          , \"    print('')\"\n          , \"elif isinstance(val, (dict, list)):\"\n          , \"    print(json.dumps(val, separators=(',', ':')))\"\n          , \"elif isinstance(val, bool):\"\n          , \"    print('true' if val else 'false')\"\n          , \"else:\"\n          , \"    print(val)\"\n          ]\n  (_, out, _) <- readProcessWithExitCode \"python3\" [\"-c\", script, jsonStr, field] \"\"\n  return (strip out)\n\n-- ======================================================================\n-- Resource tracking\n-- ======================================================================\n\ncountZombies :: IO Int\ncountZombies = do\n  (_, out, _) <-\n    readProcessWithExitCode \"sh\" [\"-c\", \"ps -eo stat 2>/dev/null | grep -c '^Z' || echo 0\"] \"\"\n  return (readDef 0 (strip out))\n\ncountShm :: IO Int\ncountShm = do\n  (_, out, _) <-\n    readProcessWithExitCode\n      \"sh\"\n      [\"-c\", \"ls -1 /dev/shm/morloc-* 2>/dev/null | wc -l || echo 0\"]\n      \"\"\n  return (readDef 0 (strip out))\n\ncountTmp :: IO Int\ncountTmp = do\n  (_, out, _) <-\n    readProcessWithExitCode\n      \"sh\"\n      [\"-c\", \"ls -1d /tmp/morloc.* 2>/dev/null | wc -l || echo 0\"]\n      \"\"\n  return (readDef 0 (strip out))\n\n-- | List actual SHM segment names for diagnostics\nlistShm :: IO [String]\nlistShm = do\n  (_, out, _) <-\n    readProcessWithExitCode\n      \"sh\"\n      [\"-c\", \"ls -1 /dev/shm/morloc-* 2>/dev/null || true\"]\n      \"\"\n  return (filter (not . null) (lines out))\n\n-- | List SHM segments with age in seconds (from stat mtime)\nlistShmWithAge :: IO [(String, Double)]\nlistShmWithAge = do\n  segs <- listShm\n  now <- getCurrentTime\n  catMaybes <$> mapM (getAge now) segs\n  where\n    getAge now seg = do\n      result <- try (getModificationTime seg) :: IO (Either SomeException UTCTime)\n      return $ case result of\n        Right mtime -> Just (seg, realToFrac (diffUTCTime now mtime))\n        Left _ -> Nothing\n\n-- ======================================================================\n-- Assertions\n-- ======================================================================\n\nassertFileExists :: (HasCallStack) => String -> FilePath -> Assertion\nassertFileExists label path = do\n  exists <- doesFileExist path\n  if exists\n    then return ()\n    else assertFailure (label ++ \": file not found: \" ++ path)\n\nassertDirExists :: (HasCallStack) => String -> FilePath -> Assertion\nassertDirExists label path = do\n  exists <- doesDirectoryExist path\n  if exists\n    then return ()\n    else assertFailure (label ++ \": directory not found: \" ++ path)\n\nassertNotExists :: (HasCallStack) => String -> FilePath -> Assertion\nassertNotExists label path = do\n  exists <- doesPathExist path\n  if exists\n    then assertFailure (label ++ \": should not exist: \" ++ path)\n    else return ()\n\nassertContains :: (HasCallStack) => String -> String -> String -> Assertion\nassertContains label needle haystack =\n  assertBool\n    (label ++ \": expected to contain \" ++ show needle ++ \" in \" ++ show (take 200 haystack))\n    (needle `isInfixOf` haystack)\n\n-- | Assert a JSON field equals an expected value, showing the raw response on failure\nassertJsonEq :: (HasCallStack) => String -> String -> String -> String -> Assertion\nassertJsonEq label rawJson field expected = do\n  val <- jsonField rawJson field\n  if val == expected\n    then return ()\n    else\n      assertFailure $\n        label\n          ++ \": field \"\n          ++ show field\n          ++ \" expected \"\n          ++ show expected\n          ++ \" but got \"\n          ++ show val\n          ++ \"\\n  raw response: \"\n          ++ show (take 500 rawJson)\n\n-- ======================================================================\n-- Internal utilities\n-- ======================================================================\n\nstrip :: String -> String\nstrip = reverse . dropWhile (== '\\n') . reverse . dropWhile (== '\\n')\n\nreadDef :: Int -> String -> Int\nreadDef def s = case reads s of\n  [(n, _)] -> n\n  _ -> def\n\n-- | Remove stale morloc SHM segments and tmp dirs to get a clean baseline\ncleanupMorlocResources :: IO ()\ncleanupMorlocResources = do\n  _ <- readProcessWithExitCode \"sh\" [\"-c\", \"rm -f /dev/shm/morloc-* 2>/dev/null\"] \"\"\n  _ <- readProcessWithExitCode \"sh\" [\"-c\", \"rm -rf /tmp/morloc.* 2>/dev/null\"] \"\"\n  return ()\n"
  },
  {
    "path": "test-suite/integration/Morloc/Test/ConcurrencyTests.hs",
    "content": "module Morloc.Test.ConcurrencyTests (concurrencyTests) where\n\nimport System.Directory (copyFile, doesDirectoryExist, listDirectory)\nimport System.Exit (ExitCode (..))\nimport System.FilePath ((</>))\nimport Test.Tasty (TestTree, testGroup)\nimport Test.Tasty.HUnit (assertFailure, testCase)\n\nimport Morloc.Test.Common\n\n-- | A concurrency test spec: compile a .loc file, run each subcommand\ndata ConcSpec = ConcSpec\n  { csLocFile :: String -- .loc filename (relative to concurrency-tests/)\n  , csSubcommands :: [String] -- exported functions to run\n  }\n\nconcurrencyTest :: TestEnv -> String -> ConcSpec -> TestTree\nconcurrencyTest env name spec =\n  testGroup\n    name\n    [ testCase subcmd $ runSubcmd env spec subcmd\n    | subcmd <- csSubcommands spec\n    ]\n\nrunSubcmd :: TestEnv -> ConcSpec -> String -> IO ()\nrunSubcmd env spec subcmd = do\n  let srcDir = teSuiteDir env </> \"concurrency-tests\"\n  withTestDir $ \\workDir -> do\n    -- Copy .loc file and helpers\n    copyLocAndHelpers srcDir (csLocFile spec) workDir\n\n    -- Compile\n    (ec, _, err) <- morlocMake workDir \"nexus\" (csLocFile spec)\n    case ec of\n      ExitSuccess -> return ()\n      ExitFailure c ->\n        assertFailure $\n          csLocFile spec ++ \": compile failed (exit \" ++ show c ++ \"):\\n\" ++ err\n\n    -- Run with timeout (handled by tasty's timeout mechanism)\n    (rc, _, stderr) <- runNexus workDir subcmd []\n    case rc of\n      ExitSuccess -> return ()\n      ExitFailure c ->\n        assertFailure $\n          csLocFile spec ++ \":\" ++ subcmd ++ \" failed (exit \" ++ show c ++ \"):\\n\" ++ stderr\n\ncopyLocAndHelpers :: FilePath -> String -> FilePath -> IO ()\ncopyLocAndHelpers srcDir locFile workDir = do\n  copyFile (srcDir </> locFile) (workDir </> locFile)\n  let helpersDir = srcDir </> \"helpers\"\n  helpersExist <- doesDirectoryExist helpersDir\n  if helpersExist\n    then do\n      entries <- listDirectory helpersDir\n      mapM_ (\\f -> copyFile (helpersDir </> f) (workDir </> f)) entries\n    else return ()\n\nconcurrencyTests :: TestEnv -> TestTree\nconcurrencyTests env =\n  testGroup\n    \"Concurrency\"\n    [ concurrencyTest env \"bidi-py-r\" $\n        ConcSpec\n          { csLocFile = \"bidi-py-r.loc\"\n          , csSubcommands = [\"testUni\", \"testBidi1\", \"testBidi5\", \"testBidi10\", \"testBidi11\", \"testBidi15\"]\n          }\n    , concurrencyTest env \"bidi-r-py\" $\n        ConcSpec\n          { csLocFile = \"bidi-r-py.loc\"\n          , csSubcommands = [\"testBidi1\", \"testBidi5\", \"testBidi10\", \"testBidi11\", \"testBidi15\"]\n          }\n    , concurrencyTest env \"concurrent-uni\" $\n        ConcSpec\n          { csLocFile = \"concurrent-uni.loc\"\n          , csSubcommands = [\"testPyToR15\", \"testRToPy15\", \"testPyToR20\"]\n          }\n    , concurrencyTest env \"deep-callback\" $\n        ConcSpec\n          { csLocFile = \"deep-callback.loc\"\n          , csSubcommands =\n              [\"testDepth2\", \"testDepth4\", \"testDepth6\", \"testDepth12\", \"testDeep4x5\", \"testDeep6x5\"]\n          }\n    ]\n"
  },
  {
    "path": "test-suite/integration/Morloc/Test/DaemonTests.hs",
    "content": "module Morloc.Test.DaemonTests (daemonTests) where\n\nimport Control.Concurrent (threadDelay)\nimport Control.Exception (SomeException, try)\nimport System.Directory (copyFile, doesFileExist, listDirectory, removeFile)\nimport System.Exit (ExitCode (..))\nimport System.FilePath ((</>))\nimport System.IO.Temp (createTempDirectory, getCanonicalTemporaryDirectory)\nimport Test.Tasty (TestTree, testGroup)\nimport Test.Tasty.HUnit (assertBool, assertFailure, testCase)\n\nimport Morloc.Test.Common\n\n-- ======================================================================\n-- Test data compilation\n-- ======================================================================\n\ncompileDaemonProgram :: TestEnv -> String -> IO FilePath\ncompileDaemonProgram env locFile = do\n  let srcDir = teSuiteDir env </> \"daemon-tests\"\n  tmpDir <- do\n    tmpBase <- getCanonicalTemporaryDirectory\n    createTempDirectory tmpBase \"morloc-daemon\"\n  copyFile (srcDir </> locFile) (tmpDir </> locFile)\n  entries <- listDirectory srcDir\n  mapM_\n    ( \\f -> do\n        let src = srcDir </> f\n        isFile <- doesFileExist src\n        if isFile && (hasSuffix \".py\" f || hasSuffix \".R\" f || hasSuffix \".hpp\" f)\n          then copyFile src (tmpDir </> f)\n          else return ()\n    )\n    entries\n  (ec, _, err) <- morlocMake tmpDir \"nexus\" locFile\n  case ec of\n    ExitSuccess -> return tmpDir\n    ExitFailure c -> do\n      assertFailure $ locFile ++ \": compile failed (exit \" ++ show c ++ \"):\\n\" ++ err\n      return tmpDir\n\nhasSuffix :: String -> String -> Bool\nhasSuffix suf s = reverse suf == take (length suf) (reverse s)\n\n-- | Wait for the daemon to be fully ready (health + discover responding)\nwaitForDaemonReady :: Int -> Int -> IO Bool\nwaitForDaemonReady port maxWaitMs = go 0\n  where\n    stepMs = 300\n    go elapsed\n      | elapsed >= maxWaitMs = return False\n      | otherwise = do\n          result <-\n            try $ httpGet (\"http://127.0.0.1:\" ++ show port ++ \"/discover\") ::\n              IO (Either SomeException (ExitCode, String, String))\n          case result of\n            Right (ExitSuccess, body, _)\n              | length body > 10 -> return True\n            _ -> do\n              threadDelay (stepMs * 1000)\n              go (elapsed + stepMs)\n\n-- | Wait for a socket/TCP endpoint to respond\nwaitForSocket :: String -> Int -> IO Bool\nwaitForSocket target maxWaitMs = go 0\n  where\n    stepMs = 500\n    go elapsed\n      | elapsed >= maxWaitMs = return False\n      | otherwise = do\n          result <-\n            try $ lpRequest target \"{\\\"method\\\":\\\"health\\\"}\" ::\n              IO (Either SomeException String)\n          case result of\n            Right r | length r > 2 -> return True\n            _ -> do\n              threadDelay (stepMs * 1000)\n              go (elapsed + stepMs)\n\n-- ======================================================================\n-- HTTP API tests\n-- ======================================================================\n\nhttpTests :: TestEnv -> TestTree\nhttpTests env = testCase \"HTTP API (arithmetic)\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  port <- pickFreePort\n  withDaemon arithDir [\"--http-port\", show port] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not become ready (port \" ++ show port ++ \", dir \" ++ arithDir ++ \")\") ok\n\n    -- Health\n    (_, body, _) <- httpGet (\"http://127.0.0.1:\" ++ show port ++ \"/health\")\n    assertJsonEq \"health\" body \"status\" \"ok\"\n\n    -- Discovery\n    (_, disco, _) <- httpGet (\"http://127.0.0.1:\" ++ show port ++ \"/discover\")\n    assertContains \"discover lists add\" \"add\" disco\n    assertContains \"discover lists mul\" \"mul\" disco\n    assertContains \"discover lists neg\" \"neg\" disco\n    assertContains \"discover lists square\" \"square\" disco\n\n    -- add(3,4) -> 7\n    (_, r1, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/add\") \"[3, 4]\"\n    assertJsonEq \"add\" r1 \"status\" \"ok\"\n    assertJsonEq \"add\" r1 \"result\" \"7\"\n\n    -- mul(5,6) -> 30\n    (_, r2, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/mul\") \"[5, 6]\"\n    assertJsonEq \"mul\" r2 \"result\" \"30\"\n\n    -- neg(42) -> -42\n    (_, r3, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/neg\") \"[42]\"\n    assertJsonEq \"neg\" r3 \"result\" \"-42\"\n\n    -- square(7) -> 49\n    (_, r4, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/square\") \"[7]\"\n    assertJsonEq \"square\" r4 \"result\" \"49\"\n\n    -- Args as object form\n    (_, r5, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/add\") \"{\\\"args\\\": [10, 20]}\"\n    assertJsonEq \"add object form\" r5 \"result\" \"30\"\n\n    -- Float args\n    (_, r6, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/add\") \"[1.5, 2.5]\"\n    assertJsonEq \"add float\" r6 \"result\" \"4\"\n\n    -- Unknown command\n    (_, r7, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/nonexistent\") \"[1]\"\n    assertJsonEq \"unknown command\" r7 \"status\" \"error\"\n\nhttpPyTests :: TestEnv -> TestTree\nhttpPyTests env = testCase \"HTTP API (Python strings)\" $ do\n  strDir <- compileDaemonProgram env \"strings.loc\"\n  port <- pickFreePort\n  withDaemon strDir [\"--http-port\", show port] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not start (port \" ++ show port ++ \", dir \" ++ strDir ++ \")\") ok\n\n    (_, r1, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/greet\") \"[\\\"world\\\"]\"\n    assertJsonEq \"greet\" r1 \"status\" \"ok\"\n    assertJsonEq \"greet\" r1 \"result\" \"Hello, world!\"\n\n    (_, r2, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/strlen\") \"[\\\"morloc\\\"]\"\n    assertJsonEq \"strlen\" r2 \"result\" \"6\"\n\n    (_, r3, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/strlen\") \"[\\\"\\\"]\"\n    assertJsonEq \"strlen empty\" r3 \"result\" \"0\"\n\nhttpPureTests :: TestEnv -> TestTree\nhttpPureTests env = testCase \"HTTP API (pure commands)\" $ do\n  pureDir <- compileDaemonProgram env \"pure.loc\"\n  port <- pickFreePort\n  withDaemon pureDir [\"--http-port\", show port] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not start (port \" ++ show port ++ \", dir \" ++ pureDir ++ \")\") ok\n\n    (_, r1, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/checkInt\") \"[]\"\n    assertJsonEq \"checkInt\" r1 \"status\" \"ok\"\n    assertJsonEq \"checkInt\" r1 \"result\" \"42\"\n\n    (_, r2, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/checkReal\") \"[]\"\n    assertJsonEq \"checkReal\" r2 \"result\" \"3.14\"\n\n    (_, r3, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/checkBool\") \"[]\"\n    assertJsonEq \"checkBool\" r3 \"result\" \"true\"\n\n    (_, r4, _) <- httpPost (\"http://127.0.0.1:\" ++ show port ++ \"/call/checkStr\") \"[]\"\n    assertJsonEq \"checkStr\" r4 \"result\" \"hello\"\n\n-- ======================================================================\n-- Unix socket tests\n-- ======================================================================\n\nsocketTests :: TestEnv -> TestTree\nsocketTests env = testCase \"Unix socket API\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  let sockPath = \"/tmp/morloc-test-haskell-socket.sock\"\n  removeIfExists sockPath\n  withDaemon arithDir [\"--socket\", sockPath] $ \\_ -> do\n    ok <- waitForSocket sockPath 15000\n    assertBool (\"socket daemon did not start (socket \" ++ sockPath ++ \", dir \" ++ arithDir ++ \")\") ok\n\n    r1 <- lpRequest sockPath \"{\\\"method\\\":\\\"health\\\"}\"\n    assertJsonEq \"socket health\" r1 \"status\" \"ok\"\n\n    r2 <- lpRequest sockPath \"{\\\"method\\\":\\\"discover\\\"}\"\n    assertContains \"socket discover\" \"add\" r2\n\n    r3 <- lpRequest sockPath \"{\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"add\\\",\\\"args\\\":[10,20]}\"\n    assertJsonEq \"socket add\" r3 \"status\" \"ok\"\n    assertJsonEq \"socket add\" r3 \"result\" \"30\"\n\n    r4 <-\n      lpRequest sockPath \"{\\\"id\\\":\\\"req-42\\\",\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"mul\\\",\\\"args\\\":[3,7]}\"\n    assertJsonEq \"socket mul\" r4 \"id\" \"req-42\"\n    assertJsonEq \"socket mul\" r4 \"result\" \"21\"\n\n    r5 <- lpRequest sockPath \"{\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"bogus\\\",\\\"args\\\":[1]}\"\n    assertJsonEq \"socket unknown cmd\" r5 \"status\" \"error\"\n\n  removeIfExists sockPath\n\n-- ======================================================================\n-- TCP tests\n-- ======================================================================\n\ntcpTests :: TestEnv -> TestTree\ntcpTests env = testCase \"TCP API\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  port <- pickFreePort\n  withDaemon arithDir [\"--port\", show port] $ \\_ -> do\n    ok <- waitForSocket (\"127.0.0.1:\" ++ show port) 15000\n    assertBool (\"tcp daemon did not start (port \" ++ show port ++ \", dir \" ++ arithDir ++ \")\") ok\n\n    r1 <- lpRequest (\"127.0.0.1:\" ++ show port) \"{\\\"method\\\":\\\"health\\\"}\"\n    assertJsonEq \"tcp health\" r1 \"status\" \"ok\"\n\n    r2 <-\n      lpRequest\n        (\"127.0.0.1:\" ++ show port)\n        \"{\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"add\\\",\\\"args\\\":[100,200]}\"\n    assertJsonEq \"tcp add\" r2 \"status\" \"ok\"\n    assertJsonEq \"tcp add\" r2 \"result\" \"300\"\n\n    r3 <-\n      lpRequest\n        (\"127.0.0.1:\" ++ show port)\n        \"{\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"square\\\",\\\"args\\\":[9]}\"\n    assertJsonEq \"tcp square\" r3 \"result\" \"81\"\n\n-- ======================================================================\n-- Multi-listener tests\n-- ======================================================================\n\nmultiListenerTests :: TestEnv -> TestTree\nmultiListenerTests env = testCase \"Multi-listener (HTTP+TCP+socket)\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  let sockPath = \"/tmp/morloc-test-haskell-multi.sock\"\n  removeIfExists sockPath\n  httpPort <- pickFreePort\n  tcpPort <- pickFreePort\n  withDaemon arithDir [\"--socket\", sockPath, \"--port\", show tcpPort, \"--http-port\", show httpPort] $ \\_ -> do\n    ok <- waitForDaemonReady httpPort 15000\n    assertBool\n      ( \"multi daemon did not start (http=\"\n          ++ show httpPort\n          ++ \", tcp=\"\n          ++ show tcpPort\n          ++ \", dir \"\n          ++ arithDir\n          ++ \")\"\n      )\n      ok\n\n    (_, r1, _) <- httpPost (\"http://127.0.0.1:\" ++ show httpPort ++ \"/call/add\") \"[1, 2]\"\n    assertJsonEq \"multi HTTP\" r1 \"result\" \"3\"\n\n    r2 <-\n      lpRequest (\"127.0.0.1:\" ++ show tcpPort) \"{\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"add\\\",\\\"args\\\":[1,2]}\"\n    assertJsonEq \"multi TCP\" r2 \"result\" \"3\"\n\n    r3 <- lpRequest sockPath \"{\\\"method\\\":\\\"call\\\",\\\"command\\\":\\\"add\\\",\\\"args\\\":[1,2]}\"\n    assertJsonEq \"multi socket\" r3 \"result\" \"3\"\n\n  removeIfExists sockPath\n\n-- ======================================================================\n-- Sequential requests tests\n-- ======================================================================\n\nsequentialTests :: TestEnv -> TestTree\nsequentialTests env = testCase \"Sequential requests\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  port <- pickFreePort\n  withDaemon arithDir [\"--http-port\", show port] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not start (port \" ++ show port ++ \", dir \" ++ arithDir ++ \")\") ok\n\n    mapM_\n      ( \\i -> do\n          (_, r, _) <-\n            httpPost\n              (\"http://127.0.0.1:\" ++ show port ++ \"/call/add\")\n              (\"[\" ++ show i ++ \", \" ++ show i ++ \"]\")\n          assertJsonEq (\"sequential add \" ++ show i) r \"result\" (show (i + i))\n      )\n      [1 .. 10 :: Int]\n\n-- ======================================================================\n-- Concurrent requests tests\n-- ======================================================================\n\nconcurrentHttpTests :: TestEnv -> TestTree\nconcurrentHttpTests env = testCase \"Concurrent HTTP requests\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  port <- pickFreePort\n  withDaemon arithDir [\"--http-port\", show port] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not start (port \" ++ show port ++ \", dir \" ++ arithDir ++ \")\") ok\n\n    mapM_\n      ( \\i -> do\n          (_, r, _) <-\n            httpPost\n              (\"http://127.0.0.1:\" ++ show port ++ \"/call/square\")\n              (\"[\" ++ show i ++ \"]\")\n          assertJsonEq (\"concurrent square \" ++ show i) r \"result\" (show (i * i))\n      )\n      [1 .. 5 :: Int]\n\n-- ======================================================================\n-- Graceful shutdown tests\n-- ======================================================================\n\nshutdownTests :: TestEnv -> TestTree\nshutdownTests env = testCase \"Graceful shutdown\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  port <- pickFreePort\n  let sockPath = \"/tmp/morloc-test-haskell-shutdown.sock\"\n  removeIfExists sockPath\n\n  withDaemon arithDir [\"--http-port\", show port, \"--socket\", sockPath] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not start (port \" ++ show port ++ \")\") ok\n\n    (_, body, _) <- httpGet (\"http://127.0.0.1:\" ++ show port ++ \"/health\")\n    assertJsonEq \"alive before shutdown\" body \"status\" \"ok\"\n\n  threadDelay 500000\n  sockExists <- doesFileExist sockPath\n  assertBool \"socket file should be removed after shutdown\" (not sockExists)\n  removeIfExists sockPath\n\n-- ======================================================================\n-- Pool health tests\n-- ======================================================================\n\npoolHealthTests :: TestEnv -> TestTree\npoolHealthTests env = testCase \"Health reports pool status\" $ do\n  arithDir <- compileDaemonProgram env \"arithmetic.loc\"\n  port <- pickFreePort\n  withDaemon arithDir [\"--http-port\", show port] $ \\_ -> do\n    ok <- waitForDaemonReady port 15000\n    assertBool (\"daemon did not start (port \" ++ show port ++ \")\") ok\n\n    (_, body, _) <- httpGet (\"http://127.0.0.1:\" ++ show port ++ \"/health\")\n    assertContains \"health includes ok\" \"ok\" body\n\n-- ======================================================================\n-- Helpers\n-- ======================================================================\n\nremoveIfExists :: FilePath -> IO ()\nremoveIfExists path = do\n  exists <- doesFileExist path\n  if exists then removeFile path else return ()\n\n-- ======================================================================\n-- Top-level test tree\n-- ======================================================================\n\ndaemonTests :: TestEnv -> TestTree\ndaemonTests env =\n  testGroup\n    \"Daemon\"\n    [ httpTests env\n    , httpPyTests env\n    , httpPureTests env\n    , socketTests env\n    , tcpTests env\n    , multiListenerTests env\n    , sequentialTests env\n    , concurrentHttpTests env\n    , shutdownTests env\n    , poolHealthTests env\n    ]\n"
  },
  {
    "path": "test-suite/integration/Morloc/Test/InstallTests.hs",
    "content": "module Morloc.Test.InstallTests (installTests) where\n\nimport System.Exit (ExitCode (..))\nimport System.FilePath ((</>))\nimport Test.Tasty (TestTree, testGroup)\nimport Test.Tasty.HUnit (assertEqual, assertFailure, testCase)\n\nimport Morloc.Test.Common\n\ndata InstallSpec = InstallSpec\n  { isSourceDir :: String -- relative to test-suite/, e.g. \"install-tests/testpy1\"\n  , isExtraArgs :: [String] -- extra morloc make args\n  , isSubcommand :: String -- exported function to call\n  , isArgs :: [String] -- arguments to pass\n  , isExpected :: String -- expected stdout output\n  , isFiles :: [String] -- files that must exist in exe dir\n  , isDirs :: [String] -- dirs that must exist in exe dir\n  }\n\ninstallTest :: TestEnv -> String -> InstallSpec -> TestTree\ninstallTest env name spec = testCase name $ do\n  let srcDir = teSuiteDir env </> isSourceDir spec\n      binDir = teMorlocHome env </> \"bin\"\n      exeDir = teMorlocHome env </> \"exe\"\n      binPath = binDir </> name\n      exePath = exeDir </> name\n\n  withTestCopy srcDir $ \\workDir -> do\n    -- Build and install\n    (ec, _out, err) <- morlocInstall workDir name (isExtraArgs spec) \"main.loc\"\n    case ec of\n      ExitSuccess -> return ()\n      ExitFailure c ->\n        assertFailure $\n          name ++ \": morloc make failed (exit \" ++ show c ++ \"):\\n\" ++ err\n\n    -- Check binary and exe directory exist\n    assertFileExists (name ++ \": binary installed\") binPath\n    assertDirExists (name ++ \": exe directory created\") exePath\n    assertDirExists (name ++ \": pools directory copied\") (exePath </> \"pools\")\n\n    -- Check expected files\n    mapM_\n      (\\f -> assertFileExists (name ++ \": \" ++ f ++ \" included\") (exePath </> f))\n      (isFiles spec)\n\n    -- Check expected directories\n    mapM_\n      (\\d -> assertDirExists (name ++ \": \" ++ d ++ \" included\") (exePath </> d))\n      (isDirs spec)\n\n    -- Run the installed program\n    (rc, stdout, stderr) <- runProgram binPath (isSubcommand spec) (isArgs spec)\n    let actual = case rc of\n          ExitSuccess -> strip stdout\n          ExitFailure c -> \"ERROR: rc=\" ++ show c ++ \"\\n\" ++ stderr\n    assertEqual (name ++ \": output\") (isExpected spec) actual\n\n    -- Uninstall and verify cleanup\n    morlocUninstall name\n    assertNotExists (name ++ \": binary removed after uninstall\") binPath\n\ninstallTests :: TestEnv -> TestTree\ninstallTests env =\n  testGroup\n    \"Install\"\n    [ testGroup\n        \"Python\"\n        [ installTest env \"testpy1\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testpy1\"\n              , isExtraArgs = []\n              , isSubcommand = \"pygreet\"\n              , isArgs = [\"\\\"world\\\"\"]\n              , isExpected = \"\\\"hello world\\\"\"\n              , isFiles = [\"helpers.py\"]\n              , isDirs = []\n              }\n        , installTest env \"testpy2\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testpy2\"\n              , isExtraArgs = []\n              , isSubcommand = \"pyadd\"\n              , isArgs = [\"3\", \"4\"]\n              , isExpected = \"7\"\n              , isFiles = [\"src/mathutil.py\"]\n              , isDirs = [\"src\"]\n              }\n        , installTest env \"testpy3\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testpy3\"\n              , isExtraArgs = [\"--include\", \"formatter.py\", \"--include\", \"fmtlib.py\"]\n              , isSubcommand = \"pyformat\"\n              , isArgs = [\"\\\"x\\\"\", \"5\"]\n              , isExpected = \"\\\"x=5\\\"\"\n              , isFiles = [\"formatter.py\", \"fmtlib.py\"]\n              , isDirs = []\n              }\n        ]\n    , testGroup\n        \"Cpp\"\n        [ installTest env \"testcpp1\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testcpp1\"\n              , isExtraArgs = []\n              , isSubcommand = \"cppsquare\"\n              , isArgs = [\"7\"]\n              , isExpected = \"49\"\n              , isFiles = [\"square.hpp\"]\n              , isDirs = []\n              }\n        , installTest env \"testcpp2\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testcpp2\"\n              , isExtraArgs = []\n              , isSubcommand = \"cppdouble\"\n              , isArgs = [\"6\"]\n              , isExpected = \"12\"\n              , isFiles = [\"src/dbl.hpp\"]\n              , isDirs = [\"src\"]\n              }\n        , installTest env \"testcpp3\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testcpp3\"\n              , isExtraArgs = [\"--include\", \"inc.hpp\", \"--include\", \"offset.hpp\"]\n              , isSubcommand = \"cppinc\"\n              , isArgs = [\"10\"]\n              , isExpected = \"11\"\n              , isFiles = [\"inc.hpp\", \"offset.hpp\"]\n              , isDirs = []\n              }\n        ]\n    , testGroup\n        \"R\"\n        [ installTest env \"testr1\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testr1\"\n              , isExtraArgs = []\n              , isSubcommand = \"rnegate\"\n              , isArgs = [\"5.0\"]\n              , isExpected = \"-5\"\n              , isFiles = [\"negate.R\"]\n              , isDirs = []\n              }\n        , installTest env \"testr2\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testr2\"\n              , isExtraArgs = []\n              , isSubcommand = \"rtriple\"\n              , isArgs = [\"4\"]\n              , isExpected = \"12\"\n              , isFiles = [\"src/triple.R\"]\n              , isDirs = [\"src\"]\n              }\n        , installTest env \"testr3\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testr3\"\n              , isExtraArgs = [\"--include\", \"glue.R\", \"--include\", \"rutil.R\"]\n              , isSubcommand = \"rpaste\"\n              , isArgs = [\"\\\"foo\\\"\", \"\\\"bar\\\"\"]\n              , isExpected = \"\\\"foobar\\\"\"\n              , isFiles = [\"glue.R\", \"rutil.R\"]\n              , isDirs = []\n              }\n        ]\n    , testGroup\n        \"Datafile\"\n        [ installTest env \"testdatafile1\" $\n            InstallSpec\n              { isSourceDir = \"install-tests/testdatafile1\"\n              , isExtraArgs = []\n              , isSubcommand = \"readData\"\n              , isArgs = []\n              , isExpected = \"\\\"hello from datafile\\\"\"\n              , isFiles = [\"reader.py\", \"data.txt\"]\n              , isDirs = []\n              }\n        ]\n    ]\n"
  },
  {
    "path": "test-suite/integration/Morloc/Test/ShmTests.hs",
    "content": "module Morloc.Test.ShmTests (shmTests) where\n\nimport Control.Concurrent (threadDelay)\nimport Control.Concurrent.Async (mapConcurrently)\nimport Control.Exception (SomeException, try)\nimport Control.Monad (when)\nimport Data.List (intercalate)\nimport System.Directory (copyFile, listDirectory, removeDirectoryRecursive)\nimport System.Exit (ExitCode (..))\nimport System.FilePath (takeExtension, (</>))\nimport System.IO (IOMode (..), hClose, openFile)\nimport System.IO.Temp (createTempDirectory, getCanonicalTemporaryDirectory)\nimport System.Process\n  ( StdStream (..)\n  , createProcess\n  , proc\n  , terminateProcess\n  , waitForProcess\n  )\nimport qualified System.Process as P\nimport Test.Tasty (TestTree, testGroup, withResource)\nimport Test.Tasty.HUnit (assertFailure, testCase)\n\nimport Morloc.Test.Common\n\n-- | Compile the SHM stress test program into a temp directory\ncompileStressProgram :: TestEnv -> IO FilePath\ncompileStressProgram env = do\n  let srcDir = teSuiteDir env </> \"shm-tests\"\n  tmpBase <- getCanonicalTemporaryDirectory\n  workDir <- createTempDirectory tmpBase \"morloc-shm\"\n  entries <- listDirectory srcDir\n  mapM_\n    ( \\f -> do\n        let ext = takeExtension f\n        when (ext `elem` [\".loc\", \".py\", \".hpp\"]) $\n          copyFile (srcDir </> f) (workDir </> f)\n    )\n    entries\n  (ec, _, err) <- morlocMake workDir \"nexus\" \"main.loc\"\n  case ec of\n    ExitSuccess -> return workDir\n    ExitFailure c ->\n      error $\n        \"SHM stress program compile failed (exit \" ++ show c ++ \"):\\n\" ++ err\n\nshmTests :: TestEnv -> TestTree\nshmTests env = withResource (compileStressProgram env) removeDirectoryRecursive $\n  \\getWorkDir ->\n    testGroup\n      \"SHM\"\n      [ normalConcurrentCleanup getWorkDir\n      , rapidFireCleanup getWorkDir\n      , sigtermBehavior getWorkDir\n      ]\n\n-- ======================================================================\n-- Test 1: Concurrent SHM cleanup\n-- ======================================================================\n\nnormalConcurrentCleanup :: IO FilePath -> TestTree\nnormalConcurrentCleanup getWorkDir = testCase \"normalConcurrentCleanup\" $ do\n  workDir <- getWorkDir\n  let conc = 8 :: Int\n      rounds = 3 :: Int\n  cleanupMorlocResources\n  threadDelay 100000 -- 100ms settle\n  mapM_ (runRound workDir conc rounds) [1 .. rounds]\n  where\n    runRound workDir conc totalRounds roundNum = do\n      before <- countShm\n\n      results <-\n        mapConcurrently\n          ( \\_ ->\n              try (runNexus workDir \"stress\" [\"1000\", \"2.0\"]) ::\n                IO (Either SomeException (ExitCode, String, String))\n          )\n          [1 .. conc :: Int]\n\n      -- 1s settle\n      threadDelay 1000000\n\n      after <- countShm\n      afterSegs <- listShmWithAge\n\n      let ecs = [ec | Right (ec, _, _) <- results]\n          outs = [strip out | Right (_, out, _) <- results]\n          numFailed = length [() | Left _ <- results]\n          allExitOk = all (== ExitSuccess) ecs && numFailed == 0\n          allOutputOk = all (== \"499500\") outs\n          shmDelta = after - before\n\n      when (not allExitOk || not allOutputOk || shmDelta /= 0) $\n        assertFailure $\n          unlines $\n            [ \"normalConcurrentCleanup: SHM leak detected\"\n            , \"  workDir: \" ++ workDir\n            , \"  round \"\n                ++ show roundNum\n                ++ \"/\"\n                ++ show totalRounds\n                ++ \": \"\n                ++ show conc\n                ++ \" concurrent, \"\n                ++ show (length ecs)\n                ++ \" succeeded\"\n            , \"    before: \" ++ show before ++ \" segments\"\n            , \"    after:  \" ++ show after ++ \" segments (after 1s settle)\"\n            ]\n              ++ [ \"    leaked: \"\n                  ++ intercalate\n                    \"\\n            \"\n                    [seg ++ \" (\" ++ show age ++ \"s old)\" | (seg, age) <- afterSegs]\n                 | not (null afterSegs)\n                 ]\n              ++ [ \"    exit codes: \" ++ show (map exitToInt ecs)\n                 , \"    outputs: \" ++ show outs\n                 ]\n              ++ [\"    exceptions: \" ++ show numFailed | numFailed > 0]\n\n-- ======================================================================\n-- Test 2: Rapid-fire sequential cleanup\n-- ======================================================================\n\nrapidFireCleanup :: IO FilePath -> TestTree\nrapidFireCleanup getWorkDir = testCase \"rapidFireCleanup\" $ do\n  workDir <- getWorkDir\n  let iterations = 50 :: Int\n  cleanupMorlocResources\n  threadDelay 100000 -- 100ms settle\n  before <- countShm\n\n  failures <- runIterations workDir iterations (0 :: Int)\n\n  -- 1s settle\n  threadDelay 1000000\n\n  after <- countShm\n  afterSegs <- listShmWithAge\n  let shmDelta = after - before\n\n  when (failures > 0 || shmDelta > 0) $\n    assertFailure $\n      unlines $\n        [ \"rapidFireCleanup:\"\n        , \"  workDir: \" ++ workDir\n        , \"  iterations: \" ++ show iterations\n        , \"  failures: \" ++ show failures\n        , \"  before: \" ++ show before ++ \" segments\"\n        , \"  after:  \" ++ show after ++ \" segments (after 1s settle)\"\n        ]\n          ++ [ \"  leaked: \"\n              ++ intercalate\n                \"\\n          \"\n                [seg ++ \" (\" ++ show age ++ \"s old)\" | (seg, age) <- afterSegs]\n             | not (null afterSegs)\n             ]\n  where\n    runIterations _ 0 failures = return failures\n    runIterations wd remaining failures = do\n      result <-\n        try (runNexus wd \"stress\" [\"100\", \"0.0\"]) ::\n          IO (Either SomeException (ExitCode, String, String))\n      let failed = case result of\n            Right (ExitSuccess, out, _) -> strip out /= \"4950\"\n            _ -> True\n      threadDelay 10000 -- 10ms\n      runIterations wd (remaining - 1) (if failed then failures + 1 else failures)\n\n-- ======================================================================\n-- Test 3: SIGTERM triggers clean SHM cleanup\n-- ======================================================================\n\nsigtermBehavior :: IO FilePath -> TestTree\nsigtermBehavior getWorkDir = testCase \"sigtermBehavior\" $ do\n  workDir <- getWorkDir\n  cleanupMorlocResources\n  threadDelay 100000 -- 100ms settle\n  before <- countShm\n\n  -- Launch nexus with a long-running command (5s sleep)\n  devNull <- openFile \"/dev/null\" WriteMode\n  let cp =\n        (proc (workDir </> \"nexus\") [\"stress\", \"1000\", \"5.0\"])\n          { P.cwd = Just workDir\n          , P.std_out = UseHandle devNull\n          , P.std_err = UseHandle devNull\n          }\n  (_, _, _, ph) <- createProcess cp\n\n  -- Wait 1s for the process to start and create SHM segments\n  threadDelay 1000000\n\n  -- Send SIGTERM (terminateProcess sends SIGTERM on Unix)\n  terminateProcess ph\n\n  -- Wait for exit\n  _ec <- waitForProcess ph\n  hClose devNull\n\n  -- 1s settle\n  threadDelay 1000000\n\n  after <- countShm\n  afterSegs <- listShmWithAge\n  let shmDelta = after - before\n\n  when (shmDelta > 0) $\n    assertFailure $\n      unlines $\n        [ \"sigtermBehavior: SHM leak after SIGTERM\"\n        , \"  workDir: \" ++ workDir\n        , \"  before: \" ++ show before ++ \" segments\"\n        , \"  after:  \" ++ show after ++ \" segments (after 1s settle)\"\n        , \"  leaked: \"\n            ++ intercalate\n              \"\\n          \"\n              [seg ++ \" (\" ++ show age ++ \"s old)\" | (seg, age) <- afterSegs]\n        ]\n\n  -- Clean up leaked segments so they don't affect other tests\n  cleanupMorlocResources\n\n-- ======================================================================\n-- Helpers\n-- ======================================================================\n\nexitToInt :: ExitCode -> Int\nexitToInt ExitSuccess = 0\nexitToInt (ExitFailure n) = n\n"
  },
  {
    "path": "test-suite/integration/Morloc/Test/StressTests.hs",
    "content": "module Morloc.Test.StressTests (stressTests) where\n\nimport Control.Concurrent (threadDelay)\nimport Control.Concurrent.Async (mapConcurrently)\nimport Control.Exception (SomeException, try)\nimport Data.List (intercalate)\nimport System.Directory\n  ( copyFile\n  , doesFileExist\n  , listDirectory\n  , removeDirectoryRecursive\n  )\nimport System.Exit (ExitCode (..))\nimport System.FilePath (takeExtension, (</>))\nimport System.IO.Temp (createTempDirectory, getCanonicalTemporaryDirectory)\nimport System.Process (readProcessWithExitCode)\nimport Test.Tasty (TestTree, testGroup, withResource)\nimport Test.Tasty.HUnit (assertFailure, testCase)\n\nimport Morloc.Test.Common\n\n-- | A workload: a golden test directory + calls to make against its nexus\ndata Workload = Workload\n  { wlName :: String -- label (e.g. \"cpp\", \"py-r\")\n  , wlTestDir :: String -- relative to test-suite/golden-tests/\n  , wlCalls :: [(String, [String])] -- (subcommand, args) pairs\n  }\n\nworkloads :: [Workload]\nworkloads =\n  [ Workload \"cpp\" \"argument-form-1-c\" [(\"foo\", [\"2\"])]\n  , Workload \"py\" \"argument-form-1-py\" [(\"foo\", [\"2\"])]\n  , Workload \"r\" \"argument-form-1-r\" [(\"foo\", [\"2\"])]\n  , Workload \"cpp-py\" \"interop-3a-cp\" [(\"foo\", [\"[1,2,3]\"])]\n  , Workload \"cpp-r\" \"interop-3a-rc\" [(\"foo\", [\"[1,2,3]\"])]\n  , Workload \"py-r\" \"interop-3a-pr\" [(\"foo\", [\"[1,2,3]\"])]\n  ]\n\n-- | Compile a golden test into a temp directory, returning the work dir\ncompileWorkload :: TestEnv -> Workload -> IO FilePath\ncompileWorkload env wl = do\n  let goldenDir = teSuiteDir env </> \"golden-tests\" </> wlTestDir wl\n  tmpBase <- getCanonicalTemporaryDirectory\n  workDir <- createTempDirectory tmpBase \"morloc-stress\"\n\n  -- Copy source files\n  entries <- listDirectory goldenDir\n  mapM_\n    ( \\f -> do\n        let ext = takeExtension f\n        if ext `elem` [\".loc\", \".py\", \".hpp\", \".R\"]\n          then copyFile (goldenDir </> f) (workDir </> f)\n          else return ()\n    )\n    entries\n\n  -- Find the .loc file referenced in the Makefile\n  locFile <- findLocFile goldenDir\n  (ec, _, err) <- morlocMake workDir \"nexus\" locFile\n  case ec of\n    ExitSuccess -> return workDir\n    ExitFailure c -> do\n      assertFailure $ wlName wl ++ \": compile failed (exit \" ++ show c ++ \"):\\n\" ++ err\n      return workDir\n\nfindLocFile :: FilePath -> IO String\nfindLocFile dir = do\n  let mkfile = dir </> \"Makefile\"\n  exists <- doesFileExist mkfile\n  if exists\n    then do\n      (_, out, _) <-\n        readProcessWithExitCode\n          \"sh\"\n          [\"-c\", \"grep 'morloc make' \" ++ show mkfile ++ \" | head -1 | grep -oP '[^ ]+\\\\.loc'\"]\n          \"\"\n      return (strip out)\n    else do\n      entries <- listDirectory dir\n      case filter (\\f -> takeExtension f == \".loc\") entries of\n        (f : _) -> return f\n        [] -> assertFailure (\"No .loc file found in \" ++ dir) >> return \"\"\n\n-- | Snapshot of resource counts for comparison\ndata ResourceSnapshot = ResourceSnapshot\n  { rsZombies :: Int\n  , rsShm :: Int\n  , rsTmp :: Int\n  , rsShmList :: [String] -- actual segment names\n  }\n\ntakeSnapshot :: IO ResourceSnapshot\ntakeSnapshot =\n  ResourceSnapshot\n    <$> countZombies\n    <*> countShm\n    <*> countTmp\n    <*> listShm\n\n-- | Format a resource delta for failure messages\nformatDelta :: String -> ResourceSnapshot -> ResourceSnapshot -> String\nformatDelta label before after =\n  unlines $\n    [ label\n    , \"  shm: \"\n        ++ show (rsShm before)\n        ++ \" -> \"\n        ++ show (rsShm after)\n        ++ \" (delta: \"\n        ++ show (rsShm after - rsShm before)\n        ++ \")\"\n    , \"  tmp: \"\n        ++ show (rsTmp before)\n        ++ \" -> \"\n        ++ show (rsTmp after)\n        ++ \" (delta: \"\n        ++ show (rsTmp after - rsTmp before)\n        ++ \")\"\n    , \"  zombies: \"\n        ++ show (rsZombies before)\n        ++ \" -> \"\n        ++ show (rsZombies after)\n        ++ \" (delta: \"\n        ++ show (rsZombies after - rsZombies before)\n        ++ \")\"\n    ]\n      ++ if null (rsShmList after)\n        then []\n        else [\"  segments: \" ++ intercalate \", \" (rsShmList after)]\n\n-- ======================================================================\n-- Zombie stress test: 50 concurrent nexus invocations, check for leaks\n-- ======================================================================\n\nzombieTest :: Workload -> IO FilePath -> TestTree\nzombieTest wl getWorkDir = testCase \"zombie\" $ do\n  workDir <- getWorkDir\n  cleanupMorlocResources\n  let iterations = 50 :: Int\n      (subcmd, args) = head (wlCalls wl)\n  before <- takeSnapshot\n\n  _ <-\n    mapConcurrently\n      ( \\_ ->\n          try (runNexusQuiet workDir subcmd args) :: IO (Either SomeException ExitCode)\n      )\n      [1 .. iterations]\n\n  threadDelay 2000000 -- 2s settle\n  after <- takeSnapshot\n  let newShm = rsShm after - rsShm before\n      newTmp = rsTmp after - rsTmp before\n      problems =\n        concat\n          [ [\"shm leaked: \" ++ show newShm | newShm > 0]\n          , [\"tmp leaked: \" ++ show newTmp | newTmp > 0]\n          ]\n\n  if null problems\n    then return ()\n    else\n      assertFailure $\n        unlines\n          [ \"workDir: \" ++ workDir\n          , formatDelta (\"after \" ++ show iterations ++ \" concurrent + 2s settle\") before after\n          , intercalate \"; \" problems\n          ]\n\n-- ======================================================================\n-- Concurrent stress test: 10 rounds x 10 concurrent, check for leaks\n-- ======================================================================\n\nconcurrentStressTest :: Workload -> IO FilePath -> TestTree\nconcurrentStressTest wl getWorkDir = testCase \"concurrent\" $ do\n  workDir <- getWorkDir\n  cleanupMorlocResources\n  let concurrent = 10 :: Int\n      rounds = 10 :: Int\n  before <- takeSnapshot\n\n  leakRounds <- goRounds workDir (0 :: Int) rounds concurrent before\n\n  threadDelay 2000000 -- 2s settle\n  after <- takeSnapshot\n  let newShm = rsShm after - rsShm before\n      newTmp = rsTmp after - rsTmp before\n      problems =\n        concat\n          [ [\"shm leaked: \" ++ show newShm | newShm > 0]\n          , [\"tmp leaked: \" ++ show newTmp | newTmp > 0]\n          , [ \"leaks observed during \"\n              ++ show leakRounds\n              ++ \"/\"\n              ++ show rounds\n              ++ \" rounds\"\n            | leakRounds > 0\n            ]\n          ]\n\n  if null problems\n    then return ()\n    else\n      assertFailure $\n        unlines\n          [ \"workDir: \" ++ workDir\n          , formatDelta\n              ( \"after \"\n                  ++ show rounds\n                  ++ \" rounds x \"\n                  ++ show concurrent\n                  ++ \" concurrent + 2s settle\"\n              )\n              before\n              after\n          , intercalate \"; \" problems\n          ]\n  where\n    goRounds _ leakR 0 _ _ = return leakR\n    goRounds wd leakR remaining conc before0 = do\n      let (subcmd, args) = head (wlCalls wl)\n      _ <-\n        mapConcurrently\n          ( \\_ ->\n              try (runNexusQuiet wd subcmd args) :: IO (Either SomeException ExitCode)\n          )\n          [1 .. conc :: Int]\n      threadDelay 50000 -- 50ms between rounds\n      cur <- takeSnapshot\n      let leaked = rsShm cur - rsShm before0 > 0 || rsTmp cur - rsTmp before0 > 0\n      goRounds\n        wd\n        (if leaked then leakR + 1 else leakR)\n        (remaining - 1)\n        conc\n        before0\n\n-- ======================================================================\n-- Crash recovery: 10 concurrent crash-and-recover cycles\n-- ======================================================================\n\ncrashRecoveryTest :: Workload -> IO FilePath -> TestTree\ncrashRecoveryTest wl getWorkDir = testCase \"crash-recovery\" $ do\n  workDir <- getWorkDir\n  cleanupMorlocResources\n  let iterations = 10 :: Int\n      (subcmd, args) = head (wlCalls wl)\n  before <- takeSnapshot\n\n  results <-\n    mapConcurrently\n      ( \\_ -> do\n          (_, out, _) <-\n            readProcessWithExitCode\n              \"sh\"\n              [ \"-c\"\n              , \"cd \"\n                  ++ show workDir\n                  ++ \" && \"\n                  ++ \"./nexus \"\n                  ++ subcmd\n                  ++ \" \"\n                  ++ unwords args\n                  ++ \" > /dev/null 2>&1 &\"\n                  ++ \" NPID=$!; sleep 0.1;\"\n                  ++ \" CPID=$(ps -o pid= --ppid $NPID 2>/dev/null | head -1 | tr -d ' ');\"\n                  ++ \" if [ -n \\\"$CPID\\\" ]; then kill -9 $CPID 2>/dev/null; fi;\"\n                  ++ \" HUNG=0; for i in $(seq 1 50); do\"\n                  ++ \"   if ! kill -0 $NPID 2>/dev/null; then break; fi;\"\n                  ++ \"   sleep 0.1;\"\n                  ++ \"   if [ $i -eq 50 ]; then HUNG=1; kill -9 $NPID 2>/dev/null; fi;\"\n                  ++ \" done;\"\n                  ++ \" wait $NPID 2>/dev/null;\"\n                  ++ \" echo $HUNG\"\n              ]\n              \"\"\n          return (strip out == \"1\")\n      )\n      [1 .. iterations]\n\n  threadDelay 2000000 -- 2s settle\n  after <- takeSnapshot\n  let hangs = length (filter id results)\n      newShm = rsShm after - rsShm before\n      newTmp = rsTmp after - rsTmp before\n      problems =\n        concat\n          [ [ \"hung: \"\n              ++ show hangs\n              ++ \"/\"\n              ++ show iterations\n              ++ \" (nexus did not exit after child kill)\"\n            | hangs > 0\n            ]\n          , [\"shm leaked: \" ++ show newShm | newShm > 0]\n          , [\"tmp leaked: \" ++ show newTmp | newTmp > 0]\n          ]\n\n  if null problems\n    then return ()\n    else\n      assertFailure $\n        unlines\n          [ \"workDir: \" ++ workDir\n          , formatDelta (\"after \" ++ show iterations ++ \" crash iterations + 2s settle\") before after\n          , intercalate \"\\n\" problems\n          ]\n\n-- ======================================================================\n-- Top-level: compile each workload once, share across test types\n-- ======================================================================\n\nworkloadGroup :: TestEnv -> Workload -> TestTree\nworkloadGroup env wl =\n  withResource (compileWorkload env wl) removeDirectoryRecursive $ \\getWorkDir ->\n    testGroup\n      (wlName wl)\n      [ zombieTest wl getWorkDir\n      , concurrentStressTest wl getWorkDir\n      , crashRecoveryTest wl getWorkDir\n      ]\n\nstressTests :: TestEnv -> TestTree\nstressTests env =\n  testGroup\n    \"Stress\"\n    [workloadGroup env wl | wl <- workloads]\n"
  },
  {
    "path": "test-suite/shm-tests/cppfuncs.loc",
    "content": "module cppfuncs (*)\n\nimport types\n\nsum_list :: [Int] -> Int\n\nsource Cpp from \"stress.hpp\" (\"sum_list\")\n"
  },
  {
    "path": "test-suite/shm-tests/main.loc",
    "content": "module main (stress)\n\nimport types (Int, Real, List)\nimport pyfuncs (make_list)\nimport cppfuncs (sum_list)\n\nstress :: Int -> Real -> Int\nstress n delay = sum_list (make_list n delay)\n"
  },
  {
    "path": "test-suite/shm-tests/pyfuncs.loc",
    "content": "module pyfuncs (*)\n\nimport types\n\nmake_list :: Int -> Real -> [Int]\n\nsource Py from \"stress.py\" (\"make_list\")\n"
  },
  {
    "path": "test-suite/shm-tests/stress.hpp",
    "content": "#ifndef MORLOC_STRESS_HPP\n#define MORLOC_STRESS_HPP\n\n#include <vector>\n\nint sum_list(const std::vector<int>& xs) {\n    int s = 0;\n    for (auto x : xs) s += x;\n    return s;\n}\n\n#endif\n"
  },
  {
    "path": "test-suite/shm-tests/stress.py",
    "content": "import time\n\ndef make_list(n, delay):\n    time.sleep(delay)\n    return list(range(n))\n"
  },
  {
    "path": "test-suite/shm-tests/types.loc",
    "content": "module types (*)\n\ntype Py => Int = \"int\"\ntype Py => Real = \"float\"\ntype Py => List a = \"list\" a\ntype Cpp => Int = \"int\"\ntype Cpp => Real = \"double\"\ntype Cpp => List a = \"std::vector<$1>\" a\n"
  },
  {
    "path": "test-suite/stress/.gitignore",
    "content": "*log\n"
  },
  {
    "path": "test-suite/stress/README.md",
    "content": "# Stress Tests\n\nTests for process management, resource cleanup, and stability under load.\n\n## Interface\n\nAll tests take a golden test directory and one or more nexus calls:\n\n```bash\n./<test>.sh <golden-test-dir> <call> [<call> ...] [-- options...]\n```\n\nA call is a nexus subcommand with arguments, e.g. `\"foo '[1,2,3]'\"`.\n\n## Tests\n\n**zombie-stress.sh** `[-- iterations]` — Runs the nexus repeatedly (default 50)\nand checks that no shared memory segments or temp directories accumulate.\n\n**concurrent-stress.sh** `[-- concurrent rounds]` — Launches multiple nexus\ninvocations simultaneously (default 10x10) to test for resource leaks under\ncontention.\n\n**crash-recovery.sh** `[-- iterations]` — Kills a pool process mid-execution\nwith SIGKILL (default 10 iterations) and verifies the nexus exits promptly\nwithout leaking resources.\n\n**valgrind-check.sh** — Runs the nexus under valgrind and checks for large\nmemory leaks (>4KB) or file descriptor leaks (>3 extra). Requires valgrind.\n\n## Examples\n\n```bash\n# Single test, single workload\n./zombie-stress.sh ../golden-tests/interop-3a-cp \"foo '[1,2,3]'\" -- 100\n\n# Crash recovery on R-only workload\n./crash-recovery.sh ../golden-tests/argument-form-1-r \"foo 2\" -- 20\n\n# Run all tests across all language combinations\n./run-all.sh\n\n# Run only zombie and concurrent tests\n./run-all.sh zombie concurrent\n```\n\n## run-all.sh\n\nLoops all stress tests across six workloads covering every language combination:\nC++ only, Python only, R only, C++/Python, C++/R, Python/R.\n"
  },
  {
    "path": "test-suite/stress/common.sh",
    "content": "#!/usr/bin/env bash\n# common.sh - Shared setup for stress tests\n# Source this file: source \"$(dirname \"$0\")/common.sh\"\n#\n# After sourcing, the caller should call:\n#   parse_args \"$@\"\n#   compile_workload\n#\n# This sets:\n#   WORK_DIR   - temp directory containing compiled nexus\n#   CALLS[@]   - array of \"subcommand args...\" strings to invoke\n\nset -euo pipefail\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\nWORK_DIR=$(mktemp -d)\nNEXUS_TIMEOUT=15\nSTDERR_LOG=\"$SCRIPT_DIR/stress-test.log\"\nSTRESS_SCRIPT=\"$(basename \"${0}\")\"\n\ncleanup() {\n    jobs -p 2>/dev/null | xargs -r kill -9 2>/dev/null || true\n    wait 2>/dev/null || true\n    rm -rf \"$WORK_DIR\"\n}\ntrap cleanup EXIT\n\ncount_zombies() {\n    local n\n    n=$(ps -eo stat 2>/dev/null | grep -c '^Z') || true\n    echo \"${n:-0}\"\n}\n\ncount_shm() {\n    local n=0\n    if ls /dev/shm/morloc-* &>/dev/null; then\n        n=$(ls -1 /dev/shm/morloc-* 2>/dev/null | wc -l)\n    fi\n    echo \"$n\"\n}\n\ncount_tmp() {\n    local n=0\n    if ls -d /tmp/morloc.* &>/dev/null; then\n        n=$(ls -1d /tmp/morloc.* 2>/dev/null | wc -l)\n    fi\n    echo \"$n\"\n}\n\n# Parse: <test_dir> <call> [<call> ...]\n# Each <call> is a quoted string like \"foo '[1,2,3]'\"\nparse_args() {\n    if [ $# -lt 2 ]; then\n        echo \"Usage: $(basename \"$0\") <golden-test-dir> <call> [<call> ...]\" >&2\n        echo \"  <call> is a nexus invocation, e.g. \\\"foo '[1,2,3]'\\\"\" >&2\n        exit 1\n    fi\n    TEST_DIR=\"$(cd \"$1\" && pwd)\"\n    shift\n    CALLS=(\"$@\")\n}\n\ncompile_workload() {\n    local test_name\n    test_name=$(basename \"$TEST_DIR\")\n\n    # Extract the morloc source file from the Makefile\n    local loc_file\n    loc_file=$(grep 'morloc make' \"$TEST_DIR/Makefile\" | head -1 | grep -oP '[^ ]+\\.loc')\n\n    cp \"$TEST_DIR\"/*.loc \"$WORK_DIR\"/ 2>/dev/null || true\n    cp \"$TEST_DIR\"/*.py \"$WORK_DIR\"/ 2>/dev/null || true\n    cp \"$TEST_DIR\"/*.hpp \"$WORK_DIR\"/ 2>/dev/null || true\n    cp \"$TEST_DIR\"/*.R \"$WORK_DIR\"/ 2>/dev/null || true\n\n    cd \"$WORK_DIR\"\n    echo \"Compiling $test_name ($loc_file)...\"\n    morloc make -o nexus \"$loc_file\" > /dev/null 2>&1\n    echo \"Done. Calls: ${CALLS[*]}\"\n}\n\n# Run a random call from CALLS[@], logging stderr to STDERR_LOG\nrun_nexus() {\n    local call=\"${CALLS[RANDOM % ${#CALLS[@]}]}\"\n    local _tmp_err\n    _tmp_err=$(mktemp)\n    eval timeout \"$NEXUS_TIMEOUT\" ./nexus $call > /dev/null 2>\"$_tmp_err\"\n    local _rc=$?\n    if [ -s \"$_tmp_err\" ]; then\n        {\n            printf \"=== %s | %s | call: %s | %s ===\\n\" \\\n                \"$STRESS_SCRIPT\" \"$(basename \"$TEST_DIR\")\" \"$call\" \"$(date '+%H:%M:%S')\"\n            cat \"$_tmp_err\"\n            echo \"\"\n        } >> \"$STDERR_LOG\"\n    fi\n    rm -f \"$_tmp_err\"\n    return $_rc\n}\n"
  },
  {
    "path": "test-suite/stress/concurrent-stress.sh",
    "content": "#!/usr/bin/env bash\n# concurrent-stress.sh - Test concurrent nexus execution\n#\n# Usage: ./concurrent-stress.sh <golden-test-dir> <call> [<call> ...] [-- concurrent rounds]\n#   e.g. ./concurrent-stress.sh ../golden-tests/interop-3a-cp \"foo '[1,2,3]'\" -- 10 10\n\nsource \"$(dirname \"$0\")/common.sh\"\n\nPOSITIONAL=()\nCONCURRENT=10\nROUNDS=10\nwhile [ $# -gt 0 ]; do\n    if [ \"$1\" = \"--\" ]; then\n        shift; CONCURRENT=${1:-10}; shift; ROUNDS=${1:-10}; break\n    fi\n    POSITIONAL+=(\"$1\"); shift\ndone\nparse_args \"${POSITIONAL[@]}\"\n\necho \"=== Concurrent Stress Test ===\"\necho \"Concurrent: $CONCURRENT, Rounds: $ROUNDS\"\ncompile_workload\n\nINITIAL_SHM=$(count_shm)\nINITIAL_TMP=$(count_tmp)\nINITIAL_ZOMBIES=$(count_zombies)\nRESOURCE_LEAK_ROUNDS=0\n\nfor round in $(seq 1 \"$ROUNDS\"); do\n    PIDS=()\n    for j in $(seq 1 \"$CONCURRENT\"); do\n        run_nexus &\n        PIDS+=($!)\n    done\n\n    EXEC_FAILURES=0\n    for pid in \"${PIDS[@]}\"; do\n        if ! wait \"$pid\" 2>/dev/null; then\n            EXEC_FAILURES=$((EXEC_FAILURES + 1))\n        fi\n    done\n\n    sleep 0.05\n\n    SHM=$(( $(count_shm) - INITIAL_SHM ))\n    TMP=$(( $(count_tmp) - INITIAL_TMP ))\n    ZOMBIES=$(( $(count_zombies) - INITIAL_ZOMBIES ))\n\n    printf \"Round %3d: %d/%d succeeded, zombies=%d, shm=%d, tmp=%d\\n\" \\\n        \"$round\" \"$((CONCURRENT - EXEC_FAILURES))\" \"$CONCURRENT\" \\\n        \"$ZOMBIES\" \"$SHM\" \"$TMP\"\n\n    if (( SHM > 0 || TMP > 0 )); then\n        RESOURCE_LEAK_ROUNDS=$((RESOURCE_LEAK_ROUNDS + 1))\n    fi\ndone\n\necho \"\"\necho \"=== Summary ===\"\nFINAL_SHM=$(( $(count_shm) - INITIAL_SHM ))\nFINAL_TMP=$(( $(count_tmp) - INITIAL_TMP ))\nFINAL_ZOMBIES=$(( $(count_zombies) - INITIAL_ZOMBIES ))\n\necho \"New zombies: $FINAL_ZOMBIES\"\necho \"Leaked SHM: $FINAL_SHM\"\necho \"Leaked tmpdir: $FINAL_TMP\"\necho \"Rounds with resource leaks: $RESOURCE_LEAK_ROUNDS / $ROUNDS\"\n\nif (( FINAL_SHM > 0 || FINAL_TMP > 0 )); then\n    echo \"FAIL: Resources leaked\"\n    exit 1\nfi\necho \"PASS\"\n"
  },
  {
    "path": "test-suite/stress/crash-recovery.sh",
    "content": "#!/usr/bin/env bash\n# crash-recovery.sh - Test nexus behavior when a pool crashes\n#\n# Starts the nexus in background, kills one of its pool child processes with\n# SIGKILL, and verifies the nexus exits within a reasonable time without\n# hanging. Also checks for resource leaks.\n#\n# Usage: ./crash-recovery.sh <golden-test-dir> <call> [<call> ...] [-- iterations]\n#   e.g. ./crash-recovery.sh ../golden-tests/interop-3a-cp \"foo '[1,2,3]'\" -- 10\n\nsource \"$(dirname \"$0\")/common.sh\"\n\nPOSITIONAL=()\nITERATIONS=10\nwhile [ $# -gt 0 ]; do\n    if [ \"$1\" = \"--\" ]; then\n        shift; ITERATIONS=${1:-10}; break\n    fi\n    POSITIONAL+=(\"$1\"); shift\ndone\nparse_args \"${POSITIONAL[@]}\"\n\nMAX_WAIT_SECONDS=5\n\necho \"=== Crash Recovery Test ===\"\necho \"Iterations: $ITERATIONS\"\ncompile_workload\n\nINITIAL_SHM=$(count_shm)\nINITIAL_TMP=$(count_tmp)\nFAILURES=0\n\nfor i in $(seq 1 \"$ITERATIONS\"); do\n    # Start nexus in background with a random call\n    local_call=\"${CALLS[RANDOM % ${#CALLS[@]}]}\"\n    iter_err=\"$WORK_DIR/iter-${i}.err\"\n    eval exec ./nexus $local_call 2>\"$iter_err\" > /dev/null &\n    NEXUS_PID=$!\n\n    # Wait for pools to start\n    sleep 0.1\n\n    # Find and kill a pool child process\n    POOL_PID=$(ps -o pid= --ppid \"$NEXUS_PID\" 2>/dev/null | head -1 | tr -d ' ') || true\n\n    if [ -n \"$POOL_PID\" ]; then\n        kill -9 \"$POOL_PID\" 2>/dev/null || true\n    fi\n\n    # Wait for nexus to exit (with timeout)\n    HUNG=0\n    ELAPSED=0\n    while kill -0 \"$NEXUS_PID\" 2>/dev/null; do\n        if (( ELAPSED >= MAX_WAIT_SECONDS * 10 )); then\n            HUNG=1\n            kill -9 \"$NEXUS_PID\" 2>/dev/null || true\n            break\n        fi\n        sleep 0.1\n        ELAPSED=$((ELAPSED + 1))\n    done\n    wait \"$NEXUS_PID\" 2>/dev/null || true\n\n    # Log any nexus stderr\n    if [ -s \"$iter_err\" ]; then\n        {\n            printf \"=== %s | %s | iteration %d | call: %s | %s ===\\n\" \\\n                \"$STRESS_SCRIPT\" \"$(basename \"$TEST_DIR\")\" \"$i\" \"$local_call\" \"$(date '+%H:%M:%S')\"\n            cat \"$iter_err\"\n            echo \"\"\n        } >> \"$STDERR_LOG\"\n    fi\n    rm -f \"$iter_err\"\n\n    SHM=$(( $(count_shm) - INITIAL_SHM ))\n    TMP=$(( $(count_tmp) - INITIAL_TMP ))\n\n    if (( HUNG )); then\n        printf \"Iteration %3d: HUNG (nexus did not exit within %ds)\\n\" \"$i\" \"$MAX_WAIT_SECONDS\"\n        FAILURES=$((FAILURES + 1))\n    elif (( SHM > 0 || TMP > 0 )); then\n        printf \"Iteration %3d: LEAK (shm=%d, tmp=%d)\\n\" \"$i\" \"$SHM\" \"$TMP\"\n        FAILURES=$((FAILURES + 1))\n    else\n        printf \"Iteration %3d: OK\\n\" \"$i\"\n    fi\ndone\n\necho \"\"\necho \"=== Summary ===\"\necho \"Failures: $FAILURES / $ITERATIONS\"\n\nif (( FAILURES > 0 )); then\n    echo \"FAIL\"\n    exit 1\nfi\necho \"PASS\"\n"
  },
  {
    "path": "test-suite/stress/run-all.sh",
    "content": "#!/usr/bin/env bash\n# run-all.sh - Run all stress tests across language combinations\n#\n# Usage: ./run-all.sh [test...]\n#   With no arguments, runs all tests. Pass test names to run a subset:\n#   ./run-all.sh zombie concurrent\n\nset -euo pipefail\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\nGOLDEN=\"$SCRIPT_DIR/../golden-tests\"\n\n# Workloads: golden-test-dir + nexus calls\ndeclare -A WORKLOAD_DIR=(\n    [cpp]=\"$GOLDEN/argument-form-1-c\"\n    [py]=\"$GOLDEN/argument-form-1-py\"\n    [r]=\"$GOLDEN/argument-form-1-r\"\n    [cpp-py]=\"$GOLDEN/interop-3a-cp\"\n    [cpp-r]=\"$GOLDEN/interop-3a-rc\"\n    [py-r]=\"$GOLDEN/interop-3a-pr\"\n)\ndeclare -A WORKLOAD_CALL=(\n    [cpp]=\"foo 2\"\n    [py]=\"foo 2\"\n    [r]=\"foo 2\"\n    [cpp-py]=\"foo '[1,2,3]'\"\n    [cpp-r]=\"foo '[1,2,3]'\"\n    [py-r]=\"foo '[1,2,3]'\"\n)\n\nWORKLOAD_ORDER=(cpp py r cpp-py cpp-r py-r)\n\nPASSED=0\nFAILED=0\nSKIPPED=0\nFAILURES=()\n\n# Colors (disabled if stdout is not a terminal)\nif [[ -t 1 ]]; then\n    GREEN=$'\\033[32m' RED=$'\\033[31m' YELLOW=$'\\033[33m' BOLD=$'\\033[1m' RESET=$'\\033[0m'\nelse\n    GREEN='' RED='' YELLOW='' BOLD='' RESET=''\nfi\n\nrun_test() {\n    local test_script=\"$1\"\n    local test_name=\"$2\"\n    local workload=\"$3\"\n    local dir=\"${WORKLOAD_DIR[$workload]}\"\n    local call=\"${WORKLOAD_CALL[$workload]}\"\n\n    printf \"%-20s %-8s ... \" \"$test_name\" \"[$workload]\"\n\n    local output start_time elapsed\n    start_time=$(date +%s%N)\n    if output=$(\"$SCRIPT_DIR/$test_script\" \"$dir\" \"$call\" 2>&1); then\n        elapsed=$(( ($(date +%s%N) - start_time) / 1000000 ))\n        if (( elapsed >= 1000 )); then\n            printf \"%sPASS%s (%d.%01ds)\\n\" \"$GREEN\" \"$RESET\" \"$((elapsed/1000))\" \"$(( (elapsed%1000) / 100 ))\"\n        else\n            echo \"${GREEN}PASS${RESET} (${elapsed}ms)\"\n        fi\n        PASSED=$((PASSED + 1))\n    else\n        elapsed=$(( ($(date +%s%N) - start_time) / 1000000 ))\n        local last_line\n        last_line=$(echo \"$output\" | tail -1)\n        if [[ \"$last_line\" == SKIP* ]]; then\n            echo \"${YELLOW}SKIP${RESET}\"\n            SKIPPED=$((SKIPPED + 1))\n        else\n            echo \"${RED}FAIL${RESET}\"\n            FAILED=$((FAILED + 1))\n            FAILURES+=(\"$test_name [$workload]\")\n            # Print last 5 lines of output for context\n            echo \"$output\" | tail -5 | sed 's/^/    /'\n        fi\n    fi\n}\n\n# Determine which tests to run\nSELECTED=(\"$@\")\nshould_run() {\n    if [ ${#SELECTED[@]} -eq 0 ]; then return 0; fi\n    for s in \"${SELECTED[@]}\"; do\n        if [[ \"$1\" == *\"$s\"* ]]; then return 0; fi\n    done\n    return 1\n}\n\nSTDERR_LOG=\"$SCRIPT_DIR/stress-test.log\"\n> \"$STDERR_LOG\"\n\necho \"=== Morloc Stress Test Suite ===\"\necho \"Stderr log: $STDERR_LOG\"\necho \"\"\n\nfor workload in \"${WORKLOAD_ORDER[@]}\"; do\n    if should_run \"zombie\"; then\n        run_test \"zombie-stress.sh\" \"zombie\" \"$workload\"\n    fi\n    if should_run \"concurrent\"; then\n        run_test \"concurrent-stress.sh\" \"concurrent\" \"$workload\"\n    fi\n    if should_run \"crash\"; then\n        run_test \"crash-recovery.sh\" \"crash-recovery\" \"$workload\"\n    fi\n    if should_run \"valgrind\"; then\n        run_test \"valgrind-check.sh\" \"valgrind\" \"$workload\"\n    fi\ndone\n\necho \"\"\necho \"=== Results ===\"\necho \"${GREEN}Passed: $PASSED${RESET}, ${RED}Failed: $FAILED${RESET}, ${YELLOW}Skipped: $SKIPPED${RESET}\"\n\nif [ -s \"$STDERR_LOG\" ]; then\n    echo \"Nexus stderr logged to: $STDERR_LOG\"\nfi\n\nif (( FAILED > 0 )); then\n    echo \"\"\n    echo \"${RED}Failures:${RESET}\"\n    for f in \"${FAILURES[@]}\"; do\n        echo \"  ${RED}-${RESET} $f\"\n    done\n    exit 1\nfi\necho \"${GREEN}${BOLD}ALL PASSED${RESET}\"\n"
  },
  {
    "path": "test-suite/stress/valgrind-check.sh",
    "content": "#!/usr/bin/env bash\n# valgrind-check.sh - Check for memory leaks and leaked file descriptors\n#\n# Runs the nexus under valgrind with leak checking and fd tracking.\n# Requires valgrind to be installed; skips gracefully if not available.\n#\n# Usage: ./valgrind-check.sh <golden-test-dir> <call>\n#   e.g. ./valgrind-check.sh ../golden-tests/interop-3a-cp \"foo '[1,2,3]'\"\n\nsource \"$(dirname \"$0\")/common.sh\"\n\nparse_args \"$@\"\n\necho \"=== Valgrind Memory/FD Leak Check ===\"\n\nif ! command -v valgrind &>/dev/null; then\n    echo \"SKIP: valgrind not found\"\n    exit 0\nfi\n\ncompile_workload\n\nVALGRIND_LOG=\"/tmp/morloc-valgrind-$$.log\"\n\n# Use first call only for valgrind (deterministic)\nCALL=\"${CALLS[0]}\"\n\n# The nexus file may be a shell wrapper (#!/bin/sh + exec morloc-nexus \"$0\" \"$@\").\n# Valgrind can't instrument through exec, so unwrap to call morloc-nexus directly.\nif head -1 ./nexus | grep -q '^#!'; then\n    NEXUS_BIN=$(sed -n '2s/^exec \\([^ ]*\\) .*/\\1/p' ./nexus)\n    if [ -z \"$NEXUS_BIN\" ] || ! command -v \"$NEXUS_BIN\" &>/dev/null; then\n        echo \"FAIL: Cannot find morloc-nexus binary from nexus wrapper\"\n        exit 1\n    fi\n    VALGRIND_CMD=\"$NEXUS_BIN ./nexus $CALL\"\nelse\n    VALGRIND_CMD=\"./nexus $CALL\"\nfi\n\necho \"Running under valgrind: $VALGRIND_CMD\"\nNEXUS_ERR=\"$WORK_DIR/valgrind-nexus.err\"\neval timeout 60 valgrind \\\n    --leak-check=full \\\n    --show-leak-kinds=definite,indirect \\\n    --track-fds=yes \\\n    --log-file=\"$VALGRIND_LOG\" \\\n    $VALGRIND_CMD > /dev/null 2>\"$NEXUS_ERR\"\nEXIT_CODE=$?\n\n# Log any nexus/valgrind stderr\nif [ -s \"$NEXUS_ERR\" ]; then\n    {\n        printf \"=== %s | %s | call: %s | %s ===\\n\" \\\n            \"$STRESS_SCRIPT\" \"$(basename \"$TEST_DIR\")\" \"$CALL\" \"$(date '+%H:%M:%S')\"\n        cat \"$NEXUS_ERR\"\n        echo \"\"\n    } >> \"$STDERR_LOG\"\nfi\nrm -f \"$NEXUS_ERR\"\n\necho \"\"\nif [ ! -f \"$VALGRIND_LOG\" ]; then\n    echo \"FAIL: No valgrind log produced\"\n    exit 1\nfi\n\nif (( EXIT_CODE == 124 )); then\n    echo \"FAIL: Timed out under valgrind\"\n    rm -f \"$VALGRIND_LOG\"\n    exit 1\nfi\n\n# Extract definite leak count\nDEFINITELY_LOST=$(grep 'definitely lost:' \"$VALGRIND_LOG\" | grep -oP '\\d+(?= bytes)' | head -1)\nDEFINITELY_LOST=${DEFINITELY_LOST:-0}\nFD_LEAK=$(grep 'FILE DESCRIPTORS:' \"$VALGRIND_LOG\" | grep -oP '\\d+(?= open)' | head -1)\nFD_LEAK=${FD_LEAK:-3}\nEXTRA_FDS=$((FD_LEAK - 3))  # subtract stdin/stdout/stderr\n\necho \"Definitely lost: ${DEFINITELY_LOST} bytes\"\necho \"Extra file descriptors at exit: ${EXTRA_FDS}\"\n\n# Fail on large leaks (>4KB) or many leaked fds (>3)\nif (( DEFINITELY_LOST > 4096 )); then\n    echo \"\"\n    echo \"FAIL: Large memory leak detected (log: $VALGRIND_LOG)\"\n    cat \"$VALGRIND_LOG\"\n    exit 1\nfi\nif (( EXTRA_FDS > 3 )); then\n    echo \"\"\n    echo \"FAIL: File descriptor leak detected (log: $VALGRIND_LOG)\"\n    cat \"$VALGRIND_LOG\"\n    exit 1\nfi\n\nrm -f \"$VALGRIND_LOG\"\necho \"PASS\"\n"
  },
  {
    "path": "test-suite/stress/zombie-stress.sh",
    "content": "#!/usr/bin/env bash\n# zombie-stress.sh - Test for resource accumulation over repeated runs\n#\n# Usage: ./zombie-stress.sh <golden-test-dir> <call> [<call> ...] [-- iterations]\n#   e.g. ./zombie-stress.sh ../golden-tests/interop-3a-cp \"foo '[1,2,3]'\" -- 50\n\nsource \"$(dirname \"$0\")/common.sh\"\n\n# Split args at \"--\" into calls and options\nPOSITIONAL=()\nITERATIONS=50\nwhile [ $# -gt 0 ]; do\n    if [ \"$1\" = \"--\" ]; then\n        shift; ITERATIONS=${1:-50}; break\n    fi\n    POSITIONAL+=(\"$1\"); shift\ndone\nparse_args \"${POSITIONAL[@]}\"\n\necho \"=== Zombie Stress Test ===\"\necho \"Iterations: $ITERATIONS\"\ncompile_workload\n\nINITIAL_ZOMBIES=$(count_zombies)\nINITIAL_SHM=$(count_shm)\nINITIAL_TMP=$(count_tmp)\nFAILURES=0\n\nprintf \"\\n%-6s  %-8s  %-8s  %-8s\\n\" \"ITER\" \"ZOMBIES\" \"SHM\" \"TMPDIR\"\nprintf \"%-6s  %-8s  %-8s  %-8s\\n\" \"----\" \"-------\" \"---\" \"------\"\n\nfor i in $(seq 1 \"$ITERATIONS\"); do\n    run_nexus || true\n    sleep 0.02\n\n    ZOMBIES=$(( $(count_zombies) - INITIAL_ZOMBIES ))\n    SHM=$(( $(count_shm) - INITIAL_SHM ))\n    TMP=$(( $(count_tmp) - INITIAL_TMP ))\n\n    if (( i % 10 == 0 || SHM > 0 || TMP > 0 )); then\n        printf \"%-6d  %-8d  %-8d  %-8d\\n\" \"$i\" \"$ZOMBIES\" \"$SHM\" \"$TMP\"\n    fi\n\n    if (( SHM > 0 || TMP > 0 )); then\n        FAILURES=$((FAILURES + 1))\n    fi\ndone\n\necho \"\"\necho \"=== Summary ===\"\nFINAL_ZOMBIES=$(( $(count_zombies) - INITIAL_ZOMBIES ))\nFINAL_SHM=$(( $(count_shm) - INITIAL_SHM ))\nFINAL_TMP=$(( $(count_tmp) - INITIAL_TMP ))\n\necho \"New zombies: $FINAL_ZOMBIES\"\necho \"Leaked SHM: $FINAL_SHM\"\necho \"Leaked tmpdir: $FINAL_TMP\"\necho \"Iterations with resource leaks: $FAILURES / $ITERATIONS\"\n\nif (( FINAL_SHM > 0 || FINAL_TMP > 0 )); then\n    echo \"FAIL: Resources leaked\"\n    exit 1\nfi\necho \"PASS\"\n"
  },
  {
    "path": "test-suite/test.sh",
    "content": "#!/usr/bin/env bash\n\n./typecheck-benchmark/run-benchmarks.sh\n\n./concurrency-tests/run-tests.sh\n\n./daemon-tests/run-tests.sh\n\n./stress/run-all.sh\n\necho \"libmorloc tests not defined yet\"\n# cmorloc-tests:\n\necho \"error-message tests not defined yet\"\n# error-message-tests:\n\necho \"executable benchmarks not yet up\" \n# executable-benchmark/distributed:\n# executable-benchmark/parallel-interop:\n# executable-benchmark/serial-interop:\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-ann-20.loc",
    "content": "module aa20 (foo)\n\nf :: Int -> Int\nf x = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (x :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int) :: Int)\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-deep-10.loc",
    "content": "module ad10 (foo)\n\nf :: Int -> Int\nf x = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f x)))))))))\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-deep-100.loc",
    "content": "module ad100 (foo)\n\nf :: Int -> Int\nf x = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (x))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-deep-30.loc",
    "content": "module ad30 (foo)\n\nf :: Int -> Int\nf x = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f x)))))))))))))))))))))))))))))\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-deep-50.loc",
    "content": "module ad50 (foo)\n\nf :: Int -> Int\nf x = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (x))))))))))))))))))))))))))))))))))))))))))))))))))\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-deep.loc",
    "content": "module ad (foo)\n\nf :: Int -> Int\nf x = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f x)))))))))))))))))))\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/apply-multi-arg-20.loc",
    "content": "module ma20 (foo)\n\nf :: Int -> Int -> Int\nf x y = x\n\nfoo :: Int -> Int\nfoo x = f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f (f x 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/compose-10.loc",
    "content": "module c10 (foo)\n\nimport root ((.))\n\nid_ :: Int -> Int\nid_ x = x\n\nfoo = id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/compose-20.loc",
    "content": "module c20 (foo)\n\nimport root ((.))\n\nid_ :: Int -> Int\nid_ x = x\n\nfoo = id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/compose-40.loc",
    "content": "module c40 (foo)\n\nimport root ((.))\n\nid_ :: Int -> Int\nid_ x = x\n\nfoo = id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/partial-app.loc",
    "content": "module pa (foo)\n\nimport root ((.))\n\nadd :: Int -> Int -> Int\nadd x y = x\n\ninc :: Int -> Int\ninc = add 1\n\nfoo = inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc . inc\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/poly-compose.loc",
    "content": "module pc (foo)\n\nimport root ((.))\n\nid_ x = x\n\nfoo = id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_ . id_\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/run-benchmarks.sh",
    "content": "#!/bin/bash\n# Typecheck performance benchmarks\n# Usage: ./run-benchmarks.sh [timeout_seconds] [warn_threshold_ms]\n#\n# All benchmarks should complete well under 1 second. Any result over the\n# warning threshold (default 500ms) is flagged as SLOW.\n\nTIMEOUT=${1:-10}\nWARN_MS=${2:-500}\nSCRIPT_DIR=\"$(cd \"$(dirname \"$0\")\" && pwd)\"\n\nRED='\\033[0;31m'\nGREEN='\\033[0;32m'\nYELLOW='\\033[0;33m'\nBOLD='\\033[1m'\nRESET='\\033[0m'\n\npass=0\nfail=0\n\nprintf \"${BOLD}%-25s %10s %8s${RESET}\\n\" \"program\" \"time (ms)\" \"status\"\nprintf \"%-25s %10s %8s\\n\" \"-------------------------\" \"----------\" \"--------\"\n\nfor loc_file in \"$SCRIPT_DIR\"/*.loc; do\n    name=$(basename \"$loc_file\" .loc)\n    start_ns=$(date +%s%N)\n    timeout \"$TIMEOUT\" morloc typecheck \"$loc_file\" > /dev/null 2>&1\n    exit_code=$?\n    end_ns=$(date +%s%N)\n    diff_ms=$(( (end_ns - start_ns) / 1000000 ))\n\n    if [ $exit_code -eq 124 ]; then\n        color=\"$RED\"\n        status=\"TIMEOUT\"\n        fail=$((fail + 1))\n    elif [ $exit_code -ne 0 ]; then\n        color=\"$RED\"\n        status=\"ERROR\"\n        fail=$((fail + 1))\n    elif [ $diff_ms -gt $WARN_MS ]; then\n        color=\"$YELLOW\"\n        status=\"SLOW\"\n        fail=$((fail + 1))\n    else\n        color=\"$GREEN\"\n        status=\"ok\"\n        pass=$((pass + 1))\n    fi\n\n    printf \"%-25s %10d ${color}%8s${RESET}\\n\" \"$name\" \"$diff_ms\" \"$status\"\ndone\n\necho \"\"\nif [ $fail -eq 0 ]; then\n    printf \"${GREEN}${BOLD}All %d benchmarks passed${RESET} (warn threshold: ${WARN_MS}ms)\\n\" \"$pass\"\nelse\n    printf \"${RED}${BOLD}%d failed${RESET}, %d passed (warn threshold: ${WARN_MS}ms)\\n\" \"$fail\" \"$pass\"\nfi\n[ $fail -eq 0 ] && exit 0 || exit 1\n"
  },
  {
    "path": "test-suite/typecheck-benchmark/tuple-nested-20.loc",
    "content": "module tn20 (foo)\n\nf :: (Int, Int) -> Int\nf x = 1\n\ng :: Int -> (Int, Int)\ng x = (x, x)\n\nfoo :: Int -> Int\nfoo x = f (g (f (g (f (g (f (g (f (g (f (g (f (g (f (g (f (g (f (g x)))))))))))))))))))\n"
  }
]