[
  {
    "path": ".cargo/config.toml",
    "content": "# https://github.com/kylebarron/arrow-wasm/issues/8#issuecomment-2790469295\n[target.wasm32-unknown-unknown]\nrustflags = ['--cfg', 'getrandom_backend=\"wasm_js\"']\n"
  },
  {
    "path": ".github/dependabot.yml",
    "content": "version: 2\nupdates:\n  - package-ecosystem: cargo\n    directory: \"/\"\n    schedule:\n      interval: weekly\n    open-pull-requests-limit: 10\n    groups:\n      arrow-rs:\n        patterns:\n          - \"arrow\"\n          - \"parquet\"\n      other:\n        patterns:\n          - \"*\"\n        exclude-patterns:\n          - \"arrow\"\n          - \"parquet\"\n  - package-ecosystem: npm\n    directory: \"/\"\n    schedule:\n      interval: weekly\n    open-pull-requests-limit: 10\n  - package-ecosystem: github-actions\n    directory: \"/\"\n    schedule:\n      interval: weekly\n    open-pull-requests-limit: 10\n"
  },
  {
    "path": ".github/workflows/docs-website.yml",
    "content": "name: Publish docs website\n\non:\n  push:\n    tags:\n      - \"*\"\n\njobs:\n  docs:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n\n      - name: Install\n        run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh\n\n      - uses: Swatinem/rust-cache@v2\n\n      - uses: actions/setup-node@v5\n        with:\n          node-version: \"20\"\n\n      - name: Install JS dependencies\n        run: yarn\n\n      - name: Build bundles\n        run: yarn build\n\n      - name: Build docs\n        run: yarn docs:build\n\n      - name: Publish docs\n        run: |\n          yarn docs:publish\n"
  },
  {
    "path": ".github/workflows/pr-manipulation.yml",
    "content": "name: PR Comment Generation\n\non:\n  workflow_run:\n    workflows: [\"Build and Test\"]\n    types:\n      - completed\n\njobs:\n  comment_on_pr:\n    runs-on: ubuntu-latest\n    if: >\n      github.event.workflow_run.event == 'pull_request' &&\n      github.event.workflow_run.conclusion == 'success'\n    steps:\n      - name: 'Download artifact'\n        uses: actions/github-script@v7\n        with:\n          script: |\n            const fs = require('fs');\n            const artifacts = await github.rest.actions.listWorkflowRunArtifacts({\n              ...context.repo,\n              run_id: ${{github.event.workflow_run.id }},\n            });\n            const matchArtifact = artifacts.data.artifacts.filter((artifact) => {\n              return artifact.name == \"pr\"\n            })[0];\n            const download = await github.rest.actions.downloadArtifact({\n              ...context.repo,\n              artifact_id: matchArtifact.id,\n              archive_format: 'zip',\n            });\n            \n            fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data));\n      - run: unzip pr.zip\n      \n      - name: 'Comment on PR'\n        uses: actions/github-script@v7\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n          script: |\n            const fs = require('fs');\n            const issueNumber = Number(fs.readFileSync('./NR'));\n            const summaryContent = fs.readFileSync('./step_summary.md', 'utf-8');\n\n            const existingCommentsOpts = github.rest.issues.listComments.endpoint.merge({\n              ...context.repo, issue_number: issueNumber\n            });\n            const existingComments = await github.paginate(existingCommentsOpts);\n            const TAG = 'execution';\n            const tagPattern = `<!-- pr_asset_summary_comment \"${TAG}\" -->`;\n            const body = `${summaryContent}\\n${tagPattern}`;\n            const preExistingComment = existingComments.find((comment) => comment.body?.includes(tagPattern));\n            if(preExistingComment) {\n              await github.rest.issues.updateComment({ ...context.repo, comment_id: preExistingComment.id, body });\n            } else {\n              await github.rest.issues.createComment({ ...context.repo, issue_number: issueNumber, body });\n            }\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Build and Test\n\non:\n  push:\n    branches:\n      - main\n  pull_request:\n\njobs:\n  test:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n\n      - name: Install\n        run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh\n\n      - uses: Swatinem/rust-cache@v2\n\n      - run: wasm-pack build --dev --target nodejs\n      - run: wasm-pack test --node\n\n  check:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: wasm32-unknown-unknown\n\n      - uses: Swatinem/rust-cache@v2\n\n      - run: cargo install cargo-all-features\n\n      - name: Check all combinations of features can build\n        run: cargo check-all-features -- --target wasm32-unknown-unknown\n\n  node-test:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n\n      - name: Install\n        run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh\n\n      - uses: Swatinem/rust-cache@v2\n\n      - uses: actions/setup-node@v5\n        with:\n          node-version: \"20\"\n\n      - name: Build bundle\n        run: yarn build:test\n\n      - name: Install dev dependencies\n        run: yarn\n\n      - name: Run Node tests\n        run: yarn test\n\n  fmt:\n    name: fmt\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n        with:\n          components: rustfmt\n\n      - uses: Swatinem/rust-cache@v2\n\n      - name: Run\n        run: cargo fmt --all -- --check\n\n  clippy:\n    name: Clippy\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n        with:\n          components: clippy\n\n      - uses: Swatinem/rust-cache@v2\n\n      - name: \"clippy --all\"\n        run: cargo clippy --all --features=full --tests -- -D warnings\n\n  node-build-report:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v5\n\n      - name: Install Rust\n        uses: dtolnay/rust-toolchain@stable\n        with:\n          targets: wasm32-unknown-unknown\n\n      - name: Install\n        run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh\n\n      - uses: Swatinem/rust-cache@v2\n\n      - uses: actions/setup-node@v5\n        with:\n          node-version: \"20\"\n      - uses: awalsh128/cache-apt-pkgs-action@latest\n        with:\n          packages: brotli pv parallel jq\n          version: 1.0\n\n      - name: Build bundle\n        run: ./scripts/report_build.sh\n      - name: Size Reporting\n        run: |\n          ls report_pkg/*/*.wasm | parallel brotli -f -Z {}\n          mkdir -p ./pr\n          echo \"| Asset  | Size | Compressed Size |\" >> ./pr/step_summary.md\n          echo \"| ------ | ---- | --------------- |\" >> ./pr/step_summary.md\n          for asset in $(ls report_pkg/*/*.wasm); do\n            export SIZE=$(stat --format '%s' $asset)\n            export COMPRESSED_SIZE=$(stat --format '%s' \"${asset}.br\")\n            export asset\n            echo \"| ${asset} | $(echo $SIZE | numfmt --to=si --suffix=\"B\") | $(echo $COMPRESSED_SIZE | numfmt --to=si --suffix=\"B\") |\" >> ./pr/step_summary.md\n            echo $(jq -n '{\"asset\": $ENV.asset, \"size\": $ENV.SIZE | tonumber, \"compressed_size\": $ENV.COMPRESSED_SIZE | tonumber}')\n          done | jq -s 'map({ (.asset|tostring): .}) | add' > ./pr/asset_manifest.json\n          echo ${{ github.event.number }} > ./pr/NR\n          if [[ \"${{ github.event_type }}\" != \"pull_request\" ]]; then\n            cat ./pr/step_summary.md > $GITHUB_STEP_SUMMARY\n          fi;\n      - uses: actions/upload-artifact@v5\n        with:\n          name: pr\n          path: pr/\n  delta_generation:\n    runs-on: ubuntu-latest\n    if: >\n      github.event_name == 'pull_request'\n    needs: node-build-report\n    steps:\n      - uses: actions/download-artifact@v6\n        with:\n          name: pr\n          path: pr/\n      - name: \"Generate size deltas\"\n        uses: actions/github-script@v7\n        with:\n          script: |\n            const fs = require('fs');\n            const { execSync } = require('child_process');\n            const baseContext = {\n              repo: {\n                repo: '${{ github.event.pull_request.base.repo.name }}',\n                owner: '${{ github.event.pull_request.base.repo.owner.login }}'\n              }\n            };\n            const baseWorkflows = await github.rest.actions.listWorkflowRuns({\n              ...baseContext.repo,\n              branch: '${{ github.event.pull_request.base.ref }}',\n              status: 'success',\n              workflow_id: 'test.yml',\n            });\n            const matchWorkflow = baseWorkflows.data?.workflow_runs?.[0];\n            const artifacts = await github.rest.actions.listWorkflowRunArtifacts({\n              ...baseContext.repo,\n              run_id: matchWorkflow?.id,\n            });\n            const matchArtifact = artifacts.data.artifacts.filter((artifact) => {\n              return artifact.name == \"pr\"\n            })[0];\n            if(matchArtifact) {\n              const download = await github.rest.actions.downloadArtifact({\n                ...baseContext.repo,\n                artifact_id: matchArtifact.id,\n                archive_format: 'zip',\n              });\n\n              fs.writeFileSync('${{github.workspace}}/base.zip', Buffer.from(download.data));\n              execSync(`unzip -p base.zip asset_manifest.json >base_asset_manifest.json || true`);\n            }\n            // now, read in the asset manifests, for the head and base\n            let baseAssets = {};\n            try {\n              baseAssets = JSON.parse(fs.readFileSync('./base_asset_manifest.json')) ?? {};\n            } catch (error) {\n              console.log('No base asset manifest found');\n            }\n            const assets = JSON.parse(fs.readFileSync('./pr/asset_manifest.json'));\n            const unitOptions = {\n                style: 'unit', unit: 'byte', unitDisplay: 'narrow', notation: 'compact',\n                maximumSignificantDigits: 3\n            };\n            const formatter = new Intl.NumberFormat('en-US', unitOptions);\n            const signedFormatter = new Intl.NumberFormat('en-US', { ...unitOptions, signDisplay: 'always' });\n            const percentFormatter = Intl.NumberFormat('en-US', { style: 'percent', signDisplay: 'always' });\n            const colorMap = {\n                '-1': 'green',\n                1: 'red',\n                0: 'black',\n                NaN: 'black'\n            };\n            // compute deltas and output markdown fragments\n            const lineFragments = Object.entries(assets).map(([k, v]) => {\n                const baseAsset = baseAssets[k] ?? {};\n                const { asset, size, compressed_size, size_delta, compressed_size_delta } = {\n                    ...v,\n                    ...Object.fromEntries(['size', 'compressed_size'].map(subK => {\n                        // compute the percentage change, NaN if the asset wasn't available\n                        const proportionalDelta = v?.[subK] / baseAsset?.[subK] - 1;\n                        const absoluteDelta = v?.[subK] - baseAsset?.[subK]\n                        const sign = Math.sign(proportionalDelta);\n                        // conditionally color the output via an inline latex block\n                        let fragment = '';\n                        if(Number.isFinite(proportionalDelta)) {\n                            fragment = `${signedFormatter.format(absoluteDelta)} ${percentFormatter.format(proportionalDelta)}`;\n                        } else {\n                            fragment = 'N/A';\n                        }\n                        if(!Number.isFinite(proportionalDelta) || sign === 0) {\n                          return [`${subK}_delta`, fragment]\n                        } else {\n                          const formattedFragment = `$\\\\color{${colorMap[sign]}}\\\\textbf{${fragment.replace('%', '\\\\%')}}$`;\n                          return [`${subK}_delta`, formattedFragment]\n                        }\n                    }))\n                };\n                // output a markdown fragment\n                const sizeFragment = `${formatter.format(size)} ${size_delta}`\n                const compressedFragment = `${formatter.format(compressed_size)} ${compressed_size_delta}`\n                return [asset.replace('report_pkg/', ''), sizeFragment, compressedFragment]\n            });\n            await core.summary.addHeading('Asset Sizes').addTable([\n              [{data: 'Asset', header: true}, {data: 'Uncompressed Size', header: true}, {data: 'Compressed Size', header: true}],\n              ...lineFragments\n            ]).write();\n            fs.cpSync(process.env.GITHUB_STEP_SUMMARY, './pr/step_summary.md')\n      # - uses: actions/upload-artifact@v5\n      #   with:\n      #     name: pr\n      #     path: pr/\n"
  },
  {
    "path": ".gitignore",
    "content": "*.fgb\n.DS_Store\n*.parquet\nnode_modules\n/target\n**/*.rs.bk\npkg/\nwasm-pack.log\n.idea/\nwww/data\nwww/data/\ndata/\n!tests/data/\n.yarn\n\n# Typedoc output\ndocs_build/\n.venv/\n.env\n.envrc\n"
  },
  {
    "path": ".vscode/settings.json",
    "content": "{\n  // \"rust-analyzer.cargo.target\": \"wasm32-unknown-unknown\",\n  \"rust-analyzer.cargo.features\": \"all\"\n}\n"
  },
  {
    "path": ".yarnrc.yml",
    "content": "nodeLinker: node-modules\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\n\n## [0.7.1] - 2025-09-17\n\n### What's Changed\n\n- ci: Bump node version in docs publish CI by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/792\n- Readme edit for tl;dr in hyparquet comparison by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/793\n- Fix handling of `rowGroups` option in `ParquetFile.read` by @quidquid in https://github.com/kylebarron/parquet-wasm/pull/802\n\n### New Contributors\n\n- @quidquid made their first contribution in https://github.com/kylebarron/parquet-wasm/pull/802\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.7.0...v0.7.1\n\n## [0.7.0] - 2025-09-17\n\n### What's Changed\n\n- Streaming Writes implementation by @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/305\n- Expose schema on ParquetFile by @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/599\n- Small doc fixes by @severo in https://github.com/kylebarron/parquet-wasm/pull/762\n- Bump to arrow/parquet 56 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/783\n- feat: Support reading Parquet data with embedded Arrow schema containing string view/binary view types by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/786\n- docs: Add section to docs about hyparquet comparison by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/790\n\n### New Contributors\n\n- @severo made their first contribution in https://github.com/kylebarron/parquet-wasm/pull/762\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.6.1...v0.7.0\n\n## [0.6.1] - 2024-05-04\n\n### What's Changed\n\n- Use Blob instead of File for input in `ParquetFile.fromFile`.\n- Export wasm paths from `package.json`.\n\n## [0.6.0] - 2024-04-21\n\n### New! :sparkles:\n\n- Class-based API + concurrent streams + column selections + File reader by @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/407. This added a new `ParquetFile` API for working with files at remote URLs without downloading them first.\n- Conditional exports in `package.json`. This should make it easier to use across Node and browser.\n- Improved documentation for how to use different entry points.\n\n### Breaking Changes:\n\n- arrow2 and parquet2-based implementation has been removed.\n- Layout of files has changed. Your import may need to change.\n- Imports are now `parquet-wasm`, `parquet-wasm/esm`, `parquet-wasm/bundler`, and `parquet-wasm/node`.\n\n### What's Changed\n\n- Add conditional exports by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/382\n- CI production build size summary by @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/401\n- Remove arrow2 implementation by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/446\n- feat: add lz4_raw support for `arrow1` by @fspoettel in https://github.com/kylebarron/parquet-wasm/pull/466\n- Highlight that esm entry point needs await of default export by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/487\n- Fixes for both report builds and PR comment workflow by @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/495\n- fix package exports by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/414\n- Object store wasm usage by @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/490\n- Set Parquet key-value metadata by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/503\n- Read parquet with options by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/506\n- Documentation updates for 0.6 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/507\n- Avoid bigint for metadata queries by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/508\n- Update async API by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/510\n- Add test to read empty file by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/512\n- bump arrow libraries to version 51 by @jdoig in https://github.com/kylebarron/parquet-wasm/pull/496\n\n### New Contributors\n\n- @fspoettel made their first contribution in https://github.com/kylebarron/parquet-wasm/pull/466\n- @jdoig made their first contribution in https://github.com/kylebarron/parquet-wasm/pull/496\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.5.0...v0.6.0\n\n## [0.5.0] - 2023-10-21\n\n### What's Changed\n\n- Switch to an API based on table abstractions from [arrow-wasm](https://github.com/kylebarron/arrow-wasm).\n- Update docs\n- Initial implementation of reading to a stream of Arrow batches. By @H-Plus-Time in https://github.com/kylebarron/parquet-wasm/pull/296\n\n### New Contributors\n\n- @H-Plus-Time made their first contribution in https://github.com/kylebarron/parquet-wasm/pull/296\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.4.0...v0.5.0\n\n## [0.4.0] - 2023-08-15\n\n### What's Changed\n\n- Async reader support in the arrow2 bindings\n- Improved memory usage via `readParquetFFI` in conjunction with `arrow-js-ffi`.\n- Remove \"2\" from function names in arrow2 api by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/173\n- Make arrow2 the default bundle by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/174\n- Add bindings for arrow2 metadata (without serde support) by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/153\n- Add lz4_raw and zstd compressions for parquet2 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/114\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.3.1...v0.4.0\n\n## [0.4.0-beta.1] - 2022-08-08\n\n### What's Changed\n\n- Add lz4_raw and zstd compressions for parquet2 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/114\n- Simplify cargo features by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/117\n- Add vscode rust-analyzer target setting by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/131\n- add msrv by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/132\n- pin clap to 3.1.\\* by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/139\n- Make writerProperties optional in JS api by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/152\n- Add bindings for arrow2 metadata (without serde support) by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/153\n- Async reader by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/96\n- Cleaner error handling by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/157\n- implement `From` instead of custom methods by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/168\n- Remove \"2\" from function names in arrow2 api by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/173\n- Make arrow2 the default bundle by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/174\n- Improved documentation for async reading by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/175\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.3.1...v0.4.0-beta.1\n\n## [0.3.1] - 2022-04-26\n\n### What's Changed\n\n- Bump arrow from 11.0.0 to 11.1.0 by @dependabot in https://github.com/kylebarron/parquet-wasm/pull/77\n- Update lockfile by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/76\n- Add clippy by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/78\n- Remove old debug script by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/79\n- Bump clap from 3.1.8 to 3.1.9 by @dependabot in https://github.com/kylebarron/parquet-wasm/pull/87\n- Check that input exists/is a uint8array by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/102\n- Update test files to those written by pyarrow v7 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/103\n- Update to arrow and parquet 12.0 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/105\n- Bump clap from 3.1.9 to 3.1.12 by @dependabot in https://github.com/kylebarron/parquet-wasm/pull/98\n- Create arrow1/arrow2 read benchmarks by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/82\n- Publish docs on tag by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/106\n- Update readme by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/107\n- Add published examples section to readme by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/108\n- Unify build script by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/109\n- esm2 entrypoint with no import.meta.url by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/110\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.3.0...v0.3.1\n\n## [0.3.0] - 2022-04-04\n\n### What's Changed\n\n- Debug cli by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/64\n- Bump to arrow 11.0 to support zstd compression by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/66\n- Update bundling by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/67\n- Add dependabot by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/70\n- Bump clap from 3.1.6 to 3.1.8 by @dependabot in https://github.com/kylebarron/parquet-wasm/pull/71\n- Bump getrandom from 0.2.5 to 0.2.6 by @dependabot in https://github.com/kylebarron/parquet-wasm/pull/72\n\n### New Contributors\n\n- @dependabot made their first contribution in https://github.com/kylebarron/parquet-wasm/pull/71\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.2.0...v0.3.0\n\n## [0.2.0] - 2022-03-17\n\n- Restore arrow-rs support by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/21\n- Write parquet with arrow1 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/23\n- Refactor code into lower-level functions, use `?` operator by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/25\n- Make record batch size the nrows of the first row group by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/26\n- Rename arrow-rs api as default by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/31\n- Implement writerPropertiesBuilder for arrow1 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/30\n- Refactor into modules by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/32\n- Update bundling to create arrow2 entrypoints by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/33\n- Node testing setup by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/34\n- Helper to copy vec<u8> to Uint8Array by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/38\n- Faster builds on Node CI tests by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/39\n- Rust CI caching by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/40\n- ZSTD mac instructions in readme by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/42\n- Keep opt-level = s and remove `console_error_panic_hook` by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/48\n- WriterPropertiesBuilder for arrow2 by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/49\n- Docstrings for public functions, structs, enums by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/50\n- Compression-specific features by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/51\n- Add more node tests by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/52\n- Separate reader and writer features by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/47\n- Docs update by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/53\n- Working typedoc by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/55\n- Update docstrings and readme by @kylebarron in https://github.com/kylebarron/parquet-wasm/pull/60\n\n**Full Changelog**: https://github.com/kylebarron/parquet-wasm/compare/v0.1.1...v0.2.0\n\n## [0.1.1] - 2022-03-06\n\n- Attempt better bundling, with APIs for bundlers, Node, and the Web.\n\n## [0.1.0] - 2022-03-06\n\n- Initial release\n- Barebones `read_parquet` and `write_parquet` functions.\n"
  },
  {
    "path": "Cargo.toml",
    "content": "[package]\nname = \"parquet-wasm\"\nversion = \"0.7.1\"\nauthors = [\"Kyle Barron <kylebarron2@gmail.com>\"]\nedition = \"2024\"\ndescription = \"WebAssembly Parquet reader and writer.\"\nreadme = \"README.md\"\nrepository = \"https://github.com/kylebarron/parquet-wasm\"\nlicense = \"MIT OR Apache-2.0\"\nkeywords = [\"parquet\", \"webassembly\", \"arrow\"]\ncategories = [\"wasm\"]\nrust-version = \"1.85\"\n\n[lib]\ncrate-type = [\"cdylib\", \"rlib\"]\n\n[features]\ndefault = [\"all_compressions\", \"reader\", \"writer\", \"async\"]\nreader = []\nwriter = []\nasync = [\n    \"dep:wasm-bindgen-futures\",\n    \"dep:futures\",\n    \"dep:range-reader\",\n    \"dep:reqwest\",\n    \"dep:wasm-streams\",\n    \"dep:async-compat\",\n    \"dep:async-stream\",\n    \"parquet/async\",\n]\ndebug = [\"console_error_panic_hook\"]\n\nbrotli = [\"parquet/brotli\"]\ngzip = [\"parquet/flate2\", \"parquet/flate2-zlib-rs\"]\nsnappy = [\"parquet/snap\"]\nzstd = [\"parquet/zstd\", \"dep:zstd\", \"zstd-sys\"]\nlz4 = [\"parquet/lz4\"]\n\nall_compressions = [\"brotli\", \"gzip\", \"snappy\", \"zstd\", \"lz4\"]\n\n# Full list of available features\nfull = [\"async\", \"debug\", \"all_compressions\", \"reader\", \"writer\"]\n\n[dependencies]\nwasm-bindgen = { version = \"0.2.95\", features = [\"serde-serialize\"] }\nserde = \"1.0.225\"\nserde-wasm-bindgen = { version = \"0.6.5\" }\n# The `console_error_panic_hook` crate provides better debugging of panics by\n# logging them with `console.error`. This is great for development, but requires\n# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for\n# code size when deploying.\nconsole_error_panic_hook = { version = \"0.1.6\", optional = true }\n\n# `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size\n# compared to the default allocator's ~10K. It is slower than the default\n# allocator, however.\n# if wee_alloc only saves 10KB, might not be worth the slower allocation speed?\n# wee_alloc = \"0.4.5\"\n\njs-sys = \"0.3.72\"\nthiserror = \"2.0\"\n\narrow-wasm = { git = \"https://github.com/kylebarron/arrow-wasm\", rev = \"6da94ef0a1522a244984a7d3d58a0339d0851d96\", default-features = false, features = [\n    \"table\",\n    \"record_batch\",\n    \"schema\",\n] }\n\narrow = { version = \"56.1\", default-features = false, features = [\n    \"ipc\",\n    \"ffi\",\n] }\narrow-schema = \"56.1\"\nparquet = { version = \"56.1\", default-features = false, features = [\n    \"arrow\",\n    \"base64\",\n] }\nbytes = \"1\"\n# We only bring this in for coalesce_ranges\nobject_store = { version = \"0.12\", default-features = false }\n\nwasm-bindgen-futures = { version = \"0.4.45\", optional = true }\nfutures = { version = \"0.3\", optional = true }\nrange-reader = { version = \"0.2\", optional = true }\nreqwest = { version = \"0.12.23\", optional = true, default-features = false }\n\n# Pass \"wasm\" and \"thin\" down to the transitive zstd dependency\nzstd = { version = \"*\", features = [\n    \"wasm\",\n    \"thin\",\n], default-features = false, optional = true }\nzstd-sys = { version = \"=2.0.9\", optional = true, default-features = false }\n# 0.2.3 crashes the Node tests. See\n# https://github.com/kylebarron/parquet-wasm/pull/496#issuecomment-2057374608\nasync-compat = { version = \"=0.2.2\", optional = true }\nasync-stream = { version = \"0.3.6\", optional = true }\nwasm-streams = { version = \"0.4.2\", optional = true }\nasync-trait = \"0.1.89\"\nurl = \"2.5.7\"\n\n# https://github.com/kylebarron/arrow-wasm/issues/8#issuecomment-2790469295\n[dependencies.getrandom_v03]\npackage = \"getrandom\"\nversion = \"0.3\"\nfeatures = [\"wasm_js\"]\n\n# https://github.com/kylebarron/arrow-wasm/issues/8#issuecomment-2790469295\n[dependencies.getrandom_v02]\npackage = \"getrandom\"\nversion = \"0.2\"\nfeatures = [\"js\"]\n\n[dependencies.web-sys]\nversion = \"0.3.72\"\nfeatures = [\n    'console',\n    'Headers',\n    'Request',\n    'RequestInit',\n    'RequestMode',\n    'Response',\n    'Window',\n    \"Document\",\n    \"Element\",\n    \"File\",\n]\n\n[dev-dependencies]\nwasm-bindgen-test = \"0.3.51\"\n\n[package.metadata.cargo-all-features]\n\n# If your crate has a large number of optional dependencies, skip them for speed\nskip_optional_dependencies = true\n\n# Exclude certain features from the build matrix\ndenylist = [\n    \"full\",\n    \"all_compressions\",\n    \"default\",\n    \"brotli\",\n    \"gzip\",\n    \"snappy\",\n    \"zstd\",\n    \"lz4\",\n]\n\n[profile.release]\n# Tell `rustc` to optimize for small code size.\n# As of 3/15/22, opt-level = s was smallest\n# https://github.com/kylebarron/parquet-wasm/pull/48\nopt-level = \"s\"\nlto = true\n"
  },
  {
    "path": "DEVELOP.md",
    "content": "# Development\n\n- Install [wasm-pack](https://rustwasm.github.io/wasm-pack/)\n- Compile: `wasm-pack build`, or change targets, e.g. `wasm-pack build --target nodejs`\n- Publish `wasm-pack publish`.\n\n### MacOS\n\nSome steps may need a specific configuration if run on MacOS. Specifically, the default `clang` shipped with Macs (as of March 2022) doesn't have WebAssembly compilation supported out of the box. To build ZSTD, you may need to install a later version via Homebrew and update your paths to find the correct executables.\n\n```\nbrew install llvm\nexport PATH=\"/usr/local/opt/llvm/bin/:$PATH\"\nexport CC=/usr/local/opt/llvm/bin/clang\nexport AR=/usr/local/opt/llvm/bin/llvm-ar\n```\n\nNote that homebrew paths are different on an Apple ARM-based Mac:\n\n```\nbrew install llvm\nexport PATH=\"/opt/homebrew/opt/llvm/bin/:$PATH\"\nexport CC=/opt/homebrew/opt/llvm/bin/clang\nexport AR=/opt/homebrew/opt/llvm/bin/llvm-ar\n```\n\nSee [this description](https://github.com/kylebarron/parquet-wasm/pull/2#issue-1159174043) and its references for more info.\n\n## Publishing\n\n`wasm-pack` supports [three different targets](https://rustwasm.github.io/docs/wasm-pack/commands/build.html#target):\n\n- `bundler` (used with bundlers like Webpack)\n- `nodejs` (used with Node, supports `require`)\n- `web` (used as an ES module directly from the web)\n\nThere are good reasons to distribute as any of these... so why not distribute as all three? `wasm-pack` doesn't support this directly but the build script in `scripts/build.sh` calls `wasm-pack` three times and merges the outputs. This means that bundler users can use the default, Node users can use `parquet-wasm/node` and ES Modules users can use `parquet-wasm/web` in their imports.\n\nTo publish:\n\n```\nyarn build\nwasm-pack publish\n```\n"
  },
  {
    "path": "LICENSE_APACHE",
    "content": "                              Apache License\n                        Version 2.0, January 2004\n                     http://www.apache.org/licenses/\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n1. Definitions.\n\n   \"License\" shall mean the terms and conditions for use, reproduction,\n   and distribution as defined by Sections 1 through 9 of this document.\n\n   \"Licensor\" shall mean the copyright owner or entity authorized by\n   the copyright owner that is granting the License.\n\n   \"Legal Entity\" shall mean the union of the acting entity and all\n   other entities that control, are controlled by, or are under common\n   control with that entity. For the purposes of this definition,\n   \"control\" means (i) the power, direct or indirect, to cause the\n   direction or management of such entity, whether by contract or\n   otherwise, or (ii) ownership of fifty percent (50%) or more of the\n   outstanding shares, or (iii) beneficial ownership of such entity.\n\n   \"You\" (or \"Your\") shall mean an individual or Legal Entity\n   exercising permissions granted by this License.\n\n   \"Source\" form shall mean the preferred form for making modifications,\n   including but not limited to software source code, documentation\n   source, and configuration files.\n\n   \"Object\" form shall mean any form resulting from mechanical\n   transformation or translation of a Source form, including but\n   not limited to compiled object code, generated documentation,\n   and conversions to other media types.\n\n   \"Work\" shall mean the work of authorship, whether in Source or\n   Object form, made available under the License, as indicated by a\n   copyright notice that is included in or attached to the work\n   (an example is provided in the Appendix below).\n\n   \"Derivative Works\" shall mean any work, whether in Source or Object\n   form, that is based on (or derived from) the Work and for which the\n   editorial revisions, annotations, elaborations, or other modifications\n   represent, as a whole, an original work of authorship. For the purposes\n   of this License, Derivative Works shall not include works that remain\n   separable from, or merely link (or bind by name) to the interfaces of,\n   the Work and Derivative Works thereof.\n\n   \"Contribution\" shall mean any work of authorship, including\n   the original version of the Work and any modifications or additions\n   to that Work or Derivative Works thereof, that is intentionally\n   submitted to Licensor for inclusion in the Work by the copyright owner\n   or by an individual or Legal Entity authorized to submit on behalf of\n   the copyright owner. For the purposes of this definition, \"submitted\"\n   means any form of electronic, verbal, or written communication sent\n   to the Licensor or its representatives, including but not limited to\n   communication on electronic mailing lists, source code control systems,\n   and issue tracking systems that are managed by, or on behalf of, the\n   Licensor for the purpose of discussing and improving the Work, but\n   excluding communication that is conspicuously marked or otherwise\n   designated in writing by the copyright owner as \"Not a Contribution.\"\n\n   \"Contributor\" shall mean Licensor and any individual or Legal Entity\n   on behalf of whom a Contribution has been received by Licensor and\n   subsequently incorporated within the Work.\n\n2. Grant of Copyright License. Subject to the terms and conditions of\n   this License, each Contributor hereby grants to You a perpetual,\n   worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n   copyright license to reproduce, prepare Derivative Works of,\n   publicly display, publicly perform, sublicense, and distribute the\n   Work and such Derivative Works in Source or Object form.\n\n3. Grant of Patent License. Subject to the terms and conditions of\n   this License, each Contributor hereby grants to You a perpetual,\n   worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n   (except as stated in this section) patent license to make, have made,\n   use, offer to sell, sell, import, and otherwise transfer the Work,\n   where such license applies only to those patent claims licensable\n   by such Contributor that are necessarily infringed by their\n   Contribution(s) alone or by combination of their Contribution(s)\n   with the Work to which such Contribution(s) was submitted. If You\n   institute patent litigation against any entity (including a\n   cross-claim or counterclaim in a lawsuit) alleging that the Work\n   or a Contribution incorporated within the Work constitutes direct\n   or contributory patent infringement, then any patent licenses\n   granted to You under this License for that Work shall terminate\n   as of the date such litigation is filed.\n\n4. Redistribution. You may reproduce and distribute copies of the\n   Work or Derivative Works thereof in any medium, with or without\n   modifications, and in Source or Object form, provided that You\n   meet the following conditions:\n\n   (a) You must give any other recipients of the Work or\n       Derivative Works a copy of this License; and\n\n   (b) You must cause any modified files to carry prominent notices\n       stating that You changed the files; and\n\n   (c) You must retain, in the Source form of any Derivative Works\n       that You distribute, all copyright, patent, trademark, and\n       attribution notices from the Source form of the Work,\n       excluding those notices that do not pertain to any part of\n       the Derivative Works; and\n\n   (d) If the Work includes a \"NOTICE\" text file as part of its\n       distribution, then any Derivative Works that You distribute must\n       include a readable copy of the attribution notices contained\n       within such NOTICE file, excluding those notices that do not\n       pertain to any part of the Derivative Works, in at least one\n       of the following places: within a NOTICE text file distributed\n       as part of the Derivative Works; within the Source form or\n       documentation, if provided along with the Derivative Works; or,\n       within a display generated by the Derivative Works, if and\n       wherever such third-party notices normally appear. The contents\n       of the NOTICE file are for informational purposes only and\n       do not modify the License. You may add Your own attribution\n       notices within Derivative Works that You distribute, alongside\n       or as an addendum to the NOTICE text from the Work, provided\n       that such additional attribution notices cannot be construed\n       as modifying the License.\n\n   You may add Your own copyright statement to Your modifications and\n   may provide additional or different license terms and conditions\n   for use, reproduction, or distribution of Your modifications, or\n   for any such Derivative Works as a whole, provided Your use,\n   reproduction, and distribution of the Work otherwise complies with\n   the conditions stated in this License.\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\n   any Contribution intentionally submitted for inclusion in the Work\n   by You to the Licensor shall be under the terms and conditions of\n   this License, without any additional terms or conditions.\n   Notwithstanding the above, nothing herein shall supersede or modify\n   the terms of any separate license agreement you may have executed\n   with Licensor regarding such Contributions.\n\n6. Trademarks. This License does not grant permission to use the trade\n   names, trademarks, service marks, or product names of the Licensor,\n   except as required for reasonable and customary use in describing the\n   origin of the Work and reproducing the content of the NOTICE file.\n\n7. Disclaimer of Warranty. Unless required by applicable law or\n   agreed to in writing, Licensor provides the Work (and each\n   Contributor provides its Contributions) on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n   implied, including, without limitation, any warranties or conditions\n   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n   PARTICULAR PURPOSE. You are solely responsible for determining the\n   appropriateness of using or redistributing the Work and assume any\n   risks associated with Your exercise of permissions under this License.\n\n8. Limitation of Liability. In no event and under no legal theory,\n   whether in tort (including negligence), contract, or otherwise,\n   unless required by applicable law (such as deliberate and grossly\n   negligent acts) or agreed to in writing, shall any Contributor be\n   liable to You for damages, including any direct, indirect, special,\n   incidental, or consequential damages of any character arising as a\n   result of this License or out of the use or inability to use the\n   Work (including but not limited to damages for loss of goodwill,\n   work stoppage, computer failure or malfunction, or any and all\n   other commercial damages or losses), even if such Contributor\n   has been advised of the possibility of such damages.\n\n9. Accepting Warranty or Additional Liability. While redistributing\n   the Work or Derivative Works thereof, You may choose to offer,\n   and charge a fee for, acceptance of support, warranty, indemnity,\n   or other liability obligations and/or rights consistent with this\n   License. However, in accepting such obligations, You may act only\n   on Your own behalf and on Your sole responsibility, not on behalf\n   of any other Contributor, and only if You agree to indemnify,\n   defend, and hold each Contributor harmless for any liability\n   incurred by, or claims asserted against, such Contributor by reason\n   of your accepting any such warranty or additional liability.\n\nEND OF TERMS AND CONDITIONS\n"
  },
  {
    "path": "LICENSE_MIT",
    "content": "Copyright (c) 2022 Kyle Barron\n\nPermission is hereby granted, free of charge, to any\nperson obtaining a copy of this software and associated\ndocumentation files (the \"Software\"), to deal in the\nSoftware without restriction, including without\nlimitation the rights to use, copy, modify, merge,\npublish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software\nis furnished to do so, subject to the following\nconditions:\n\nThe above copyright notice and this permission notice\nshall be included in all copies or substantial portions\nof the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\nANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\nTO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\nSHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# WASM Parquet [![npm version](https://img.shields.io/npm/v/parquet-wasm.svg)](https://www.npmjs.com/package/parquet-wasm)\n\nWebAssembly bindings to read and write the [Apache Parquet](https://parquet.apache.org/) format to and from [Apache Arrow](https://arrow.apache.org/) using the Rust [`parquet`](https://crates.io/crates/parquet) and [`arrow`](https://crates.io/crates/arrow) crates.\n\nThis is designed to be used alongside a JavaScript Arrow implementation, such as the canonical [JS Arrow library](https://arrow.apache.org/docs/js/).\n\nIncluding read and write support and all compression codecs, the brotli-compressed WASM bundle is 1.2 MB. Refer to [custom builds](#custom-builds) for how to build a smaller bundle. A minimal read-only bundle without compression support can be as small as 456 KB brotli-compressed.\n\n## Install\n\n`parquet-wasm` is published to NPM. Install with\n\n```\nyarn add parquet-wasm\n```\n\nor\n\n```\nnpm install parquet-wasm\n```\n\n## API\n\nParquet-wasm has both a synchronous and asynchronous API. The sync API is simpler but requires fetching the entire Parquet buffer in advance, which is often prohibitive.\n\n### Sync API\n\nRefer to these functions:\n\n- [`readParquet`](https://kylebarron.dev/parquet-wasm/functions/esm_parquet_wasm.readParquet.html): Read a Parquet file synchronously.\n- [`readSchema`](https://kylebarron.dev/parquet-wasm/functions/esm_parquet_wasm.readSchema.html): Read an Arrow schema from a Parquet file synchronously.\n- [`writeParquet`](https://kylebarron.dev/parquet-wasm/functions/esm_parquet_wasm.writeParquet.html): Write a Parquet file synchronously.\n\n### Async API\n\n- [`readParquetStream`](https://kylebarron.dev/parquet-wasm/functions/esm_parquet_wasm.readParquetStream.html): Create a [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) that emits Arrow RecordBatches from a Parquet file.\n- [`ParquetFile`](https://kylebarron.dev/parquet-wasm/classes/esm_parquet_wasm.ParquetFile.html): A class for reading portions of a remote Parquet file. Use [`fromUrl`](https://kylebarron.dev/parquet-wasm/classes/esm_parquet_wasm.ParquetFile.html#fromUrl) to construct from a remote URL or [`fromFile`](https://kylebarron.dev/parquet-wasm/classes/esm_parquet_wasm.ParquetFile.html#fromFile) to construct from a [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) handle. Note that when you're done using this class, you'll need to call [`free`](https://kylebarron.dev/parquet-wasm/classes/esm_parquet_wasm.ParquetFile.html#free) to release any memory held by the ParquetFile instance itself.\n\n\nBoth sync and async functions return or accept a [`Table`](https://kylebarron.dev/parquet-wasm/classes/bundler_parquet_wasm.Table.html) class, an Arrow table in WebAssembly memory. Refer to its documentation for moving data into/out of WebAssembly.\n\n## Entry Points\n\n\n| Entry point                                                               | Description                                             | Documentation        |\n| ------------------------------------------------------------------------- | ------------------------------------------------------- | -------------------- |\n| `parquet-wasm`, `parquet-wasm/esm`, or `parquet-wasm/esm/parquet_wasm.js` | ESM, to be used directly from the Web as an ES Module   | [Link][esm-docs]     |\n| `parquet-wasm/bundler`                                                    | \"Bundler\" build, to be used in bundlers such as Webpack | [Link][bundler-docs] |\n| `parquet-wasm/node`                                                       | Node build, to be used with synchronous `require` in NodeJS         | [Link][node-docs]    |\n\n[bundler-docs]: https://kylebarron.dev/parquet-wasm/modules/bundler_parquet_wasm.html\n[node-docs]: https://kylebarron.dev/parquet-wasm/modules/node_parquet_wasm.html\n[esm-docs]: https://kylebarron.dev/parquet-wasm/modules/esm_parquet_wasm.html\n\n### ESM\n\nThe `esm` entry point is the primary entry point. It is the default export from `parquet-wasm`, and is also accessible at `parquet-wasm/esm` and `parquet-wasm/esm/parquet_wasm.js` (for symmetric imports [directly from a browser](#using-directly-from-a-browser)).\n\n**Note that when using the `esm` bundles, you must manually initialize the WebAssembly module before using any APIs**. Otherwise, you'll get an error `TypeError: Cannot read properties of undefined`. There are multiple ways to initialize the WebAssembly code:\n\n#### Asynchronous initialization\n\nThe primary way to initialize is by awaiting the default export.\n\n```js\nimport wasmInit, {readParquet} from \"parquet-wasm\";\n\nawait wasmInit();\n```\n\nWithout any parameter, this will try to fetch a file named `'parquet_wasm_bg.wasm'` at the same location as `parquet-wasm`. (E.g. this snippet `input = new URL('parquet_wasm_bg.wasm', import.meta.url);`).\n\nNote that you can also pass in a custom URL if you want to host the `.wasm` file on your own servers.\n\n```js\nimport wasmInit, {readParquet} from \"parquet-wasm\";\n\n// Update this version to match the version you're using.\nconst wasmUrl = \"https://cdn.jsdelivr.net/npm/parquet-wasm@0.6.1/esm/parquet_wasm_bg.wasm\";\nawait wasmInit(wasmUrl);\n```\n\n#### Synchronous initialization\n\nThe `initSync` named export allows for\n\n```js\nimport {initSync, readParquet} from \"parquet-wasm\";\n\n// The contents of esm/parquet_wasm_bg.wasm in an ArrayBuffer\nconst wasmBuffer = new ArrayBuffer(...);\n\n// Initialize the Wasm synchronously\ninitSync(wasmBuffer)\n```\n\nAsync initialization should be preferred over downloading the Wasm buffer and then initializing it synchronously, as [`WebAssembly.instantiateStreaming`](https://developer.mozilla.org/en-US/docs/WebAssembly/JavaScript_interface/instantiateStreaming_static) is the most efficient way to both download and initialize Wasm code.\n\n### Bundler\n\nThe `bundler` entry point doesn't require manual initialization of the WebAssembly blob, but needs setup with whatever bundler you're using. [Refer to the Rust Wasm documentation for more info](https://rustwasm.github.io/docs/wasm-bindgen/reference/deployment.html#bundlers).\n\n### Node\n\nThe `node` entry point can be loaded synchronously from Node.\n\n```js\nconst {readParquet} = require(\"parquet-wasm\");\n\nconst wasmTable = readParquet(...);\n```\n\n### Using directly from a browser\n\nYou can load the `esm/parquet_wasm.js` file directly from a CDN\n\n```js\nconst parquet = await import(\n  \"https://cdn.jsdelivr.net/npm/parquet-wasm@0.6.1/esm/+esm\"\n)\nawait parquet.default();\n\nconst wasmTable = parquet.readParquet(...);\n```\n\nThis specific endpoint will minify the ESM before you receive it.\n\n### Debug functions\n\nThese functions are not present in normal builds to cut down on bundle size. To create a custom build, see [Custom Builds](#custom-builds) below.\n\n#### `setPanicHook`\n\n`setPanicHook(): void`\n\nSets [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook) in Rust, which provides better debugging of panics by having more informative `console.error` messages. Initialize this first if you're getting errors such as `RuntimeError: Unreachable executed`.\n\nThe WASM bundle must be compiled with the `console_error_panic_hook` feature for this function to exist.\n\n## Example\n\n```js\nimport * as arrow from \"apache-arrow\";\nimport initWasm, {\n  Compression,\n  readParquet,\n  Table,\n  writeParquet,\n  WriterPropertiesBuilder,\n} from \"parquet-wasm\";\n\n// Instantiate the WebAssembly context\nawait initWasm();\n\n// Create Arrow Table in JS\nconst LENGTH = 2000;\nconst rainAmounts = Float32Array.from({ length: LENGTH }, () =>\n  Number((Math.random() * 20).toFixed(1))\n);\n\nconst rainDates = Array.from(\n  { length: LENGTH },\n  (_, i) => new Date(Date.now() - 1000 * 60 * 60 * 24 * i)\n);\n\nconst rainfall = arrow.tableFromArrays({\n  precipitation: rainAmounts,\n  date: rainDates,\n});\n\n// Write Arrow Table to Parquet\n\n// wasmTable is an Arrow table in WebAssembly memory\nconst wasmTable = Table.fromIPCStream(arrow.tableToIPC(rainfall, \"stream\"));\nconst writerProperties = new WriterPropertiesBuilder()\n  .setCompression(Compression.ZSTD)\n  .build();\nconst parquetUint8Array = writeParquet(wasmTable, writerProperties);\n\n// Read Parquet buffer back to Arrow Table\n// arrowWasmTable is an Arrow table in WebAssembly memory\nconst arrowWasmTable = readParquet(parquetUint8Array);\n\n// table is now an Arrow table in JS memory\nconst table = arrow.tableFromIPC(arrowWasmTable.intoIPCStream());\nconsole.log(table.schema.toString());\n// Schema<{ 0: precipitation: Float32, 1: date: Date64<MILLISECOND> }>\n```\n\n### Published examples\n\n(These may use older versions of the library with a different API).\n\n- [GeoParquet on the Web (Observable)](https://observablehq.com/@kylebarron/geoparquet-on-the-web)\n- [Hello, Parquet-WASM (Observable)](https://observablehq.com/@bmschmidt/hello-parquet-wasm)\n\n## Comparison to [`hyparquet`](https://github.com/hyparam/hyparquet)\n\n`hyparquet` is another Parquet reader for JavaScript. That project is written in pure JavaScript and has subtly different goals and comparing it to `parquet-wasm` provides benefits and costs.\n\n**tl;dr**: if you can use [Arrow](https://arrow.apache.org/) in your app, use `parquet-wasm` for better performance and memory usage. Otherwise, or if you are latency-focused, hyparquet could be better for you.\n\n### Advantages of `hyparquet`:\n\n- Smaller bundle size\n- Pure JS, so easier to debug\n- No WebAssembly, so no initialization step\n- No WebAssembly, so no separate memory space.\n\n### Advantages of `parquet-wasm`:\n\n- Faster for large files, as it uses a very high-performance Rust Parquet library compiled to WebAssembly\n- Faster and more memory efficient because it loads data into Apache Arrow, a high-performance binary memory format. In comparison, hyparquet loads data to JS objects, which are _much_ less memory efficient than Arrow buffers. This is especially true for large files.\n- Even though `parquet-wasm` has a larger bundle size, the bandwidth savings of loading large amounts of Parquet can quickly make up for that overhead.\n\n### Conclusion\n\n- If you only need to load the _metadata_ of Parquet files, or if you have _very small_ Parquet files, using hyparquet could be a good choice as hyparquet is smaller, and thus the overhead before loading the file could be smaller.\n- If you need the _absolute smallest_ bundle size, hyparquet may be better for your use case.\n- Otherwise, since `parquet-wasm`:\n    1. Uses a really high performance Rust library\n    2. Is running in WebAssembly, and\n    3. Converts to a high-performance binary memory format\n\n    If you have large files and can use the resulting Arrow data directly without converting to JS objects, `parquet-wasm` should be significantly faster and more memory efficient.\n\nFeel free to open an issue to discuss more!\n\n## Performance considerations\n\nTl;dr: When you have a `Table` object (resulting from `readParquet`), try the new\n[`Table.intoFFI`](https://kylebarron.dev/parquet-wasm/classes/esm_parquet_wasm.Table.html#intoFFI)\nAPI to move it to JavaScript memory. This API is less well tested than the [`Table.intoIPCStream`](https://kylebarron.dev/parquet-wasm/classes/esm_parquet_wasm.Table.html#intoIPCStream) API, but should be\nfaster and have **much** less memory overhead (by a factor of 2). If you hit any bugs, please\n[create a reproducible issue](https://github.com/kylebarron/parquet-wasm/issues/new).\n\nUnder the hood, `parquet-wasm` first decodes a Parquet file into Arrow _in WebAssembly memory_. But\nthen that WebAssembly memory needs to be copied into JavaScript for use by Arrow JS. The \"normal\"\nconversion APIs (e.g. `Table.intoIPCStream`) use the [Arrow IPC\nformat](https://arrow.apache.org/docs/python/ipc.html) to get the data back to JavaScript. But this\nrequires another memory copy _inside WebAssembly_ to assemble the various arrays into a single\nbuffer to be copied back to JS.\n\nInstead, the new `Table.intoFFI` API uses Arrow's [C Data\nInterface](https://arrow.apache.org/docs/format/CDataInterface.html) to be able to copy or view\nArrow arrays from within WebAssembly memory without any serialization.\n\nNote that this approach uses the [`arrow-js-ffi`](https://github.com/kylebarron/arrow-js-ffi)\nlibrary to parse the Arrow C Data Interface definitions. This library has not yet been tested in\nproduction, so it may have bugs!\n\nI wrote an [interactive blog\npost](https://observablehq.com/@kylebarron/zero-copy-apache-arrow-with-webassembly) on this approach\nand the Arrow C Data Interface if you want to read more!\n\n### Example\n\n```js\nimport * as arrow from \"apache-arrow\";\nimport { parseTable } from \"arrow-js-ffi\";\nimport initWasm, { wasmMemory, readParquet } from \"parquet-wasm\";\n\n// Instantiate the WebAssembly context\nawait initWasm();\n\n// A reference to the WebAssembly memory object.\nconst WASM_MEMORY = wasmMemory();\n\nconst resp = await fetch(\"https://example.com/file.parquet\");\nconst parquetUint8Array = new Uint8Array(await resp.arrayBuffer());\nconst wasmArrowTable = readParquet(parquetUint8Array).intoFFI();\n\n// Arrow JS table that was directly copied from Wasm memory\nconst table: arrow.Table = parseTable(\n  WASM_MEMORY.buffer,\n  wasmArrowTable.arrayAddrs(),\n  wasmArrowTable.schemaAddr()\n);\n\n// VERY IMPORTANT! You must call `drop` on the Wasm table object when you're done using it\n// to release the Wasm memory.\n// Note that any access to the pointers in this table is undefined behavior after this call.\n// Calling any `wasmArrowTable` method will error.\nwasmArrowTable.drop();\n```\n\n## Compression support\n\nThe Parquet specification permits several compression codecs. This library currently supports:\n\n- [x] Uncompressed\n- [x] Snappy\n- [x] Gzip\n- [x] Brotli\n- [x] ZSTD\n- [x] LZ4_RAW\n- [ ] LZ4 (deprecated)\n\nLZ4 support in Parquet is a bit messy. As described [here](https://github.com/apache/parquet-format/blob/54e53e5d7794d383529dd30746378f19a12afd58/Compression.md), there are _two_ LZ4 compression options in Parquet (as of version 2.9.0). The original version `LZ4` is now deprecated; it used an undocumented framing scheme which made interoperability difficult. The specification now reads:\n\n> It is strongly suggested that implementors of Parquet writers deprecate this compression codec in their user-facing APIs, and advise users to switch to the newer, interoperable `LZ4_RAW` codec.\n\nIt's currently unknown how widespread the ecosystem support is for `LZ4_RAW`. As of `pyarrow` v7, it now writes `LZ4_RAW` by default and presumably has read support for it as well.\n\n## Custom builds\n\nIn some cases, you may know ahead of time that your Parquet files will only include a single compression codec, say Snappy, or even no compression at all. In these cases, you may want to create a custom build of `parquet-wasm` to keep bundle size at a minimum. If you install the Rust toolchain and `wasm-pack` (see [Development](DEVELOP.md)), you can create a custom build with only the compression codecs you require.\n\nThe minimum supported Rust version in this project is 1.60. To upgrade your toolchain, use `rustup update stable`.\n\n### Example custom builds\n\nReader-only bundle with Snappy compression:\n\n```\nwasm-pack build --no-default-features --features snappy --features reader\n```\n\nWriter-only bundle with no compression support, targeting Node:\n\n```\nwasm-pack build --target nodejs --no-default-features --features writer\n```\n\nBundle with reader and writer support, targeting Node, using `arrow` and `parquet` crates with all their supported compressions, with `console_error_panic_hook` enabled:\n\n```bash\nwasm-pack build \\\n  --target nodejs \\\n  --no-default-features \\\n  --features reader \\\n  --features writer \\\n  --features all_compressions \\\n  --features debug\n# Or, given the fact that the default feature includes several of these features, a shorter version:\nwasm-pack build --target nodejs --features debug\n```\n\nRefer to the [`wasm-pack` documentation](https://rustwasm.github.io/docs/wasm-pack/commands/build.html) for more info on flags such as `--release`, `--dev`, `target`, and to the [Cargo documentation](https://doc.rust-lang.org/cargo/reference/features.html) for more info on how to use features.\n\n### Available features\n\nBy default, `all_compressions`, `reader`, `writer`, and `async` features are enabled. Use `--no-default-features` to remove these defaults.\n\n- `reader`: Activate read support.\n- `writer`: Activate write support.\n- `async`: Activate asynchronous read support.\n- `all_compressions`: Activate all supported compressions.\n- `brotli`: Activate Brotli compression.\n- `gzip`: Activate Gzip compression.\n- `snappy`: Activate Snappy compression.\n- `zstd`: Activate ZSTD compression.\n- `lz4`: Activate LZ4_RAW compression.\n- `debug`: Expose the `setPanicHook` function for better error messages for Rust panics.\n\n## Node <20\n\nOn Node versions before 20, you'll have to [polyfill the Web Cryptography API](https://docs.rs/getrandom/latest/getrandom/#nodejs-es-module-support).\n\n## Future work\n\n- [ ] Example of pushdown predicate filtering, to download only chunks that match a specific condition\n- [ ] Column filtering, to download only certain columns\n- [ ] More tests\n\n## Acknowledgements\n\nA starting point of my work came from @my-liminal-space's [`read-parquet-browser`](https://github.com/my-liminal-space/read-parquet-browser) (which is also dual licensed MIT and Apache 2).\n\n@domoritz's [`arrow-wasm`](https://github.com/domoritz/arrow-wasm) was a very helpful reference for bootstrapping Rust-WASM bindings.\n"
  },
  {
    "path": "bench/bench.ts",
    "content": "import b from \"benny\";\nimport * as parquet from \"../pkg/node\";\nimport { readFileSync } from \"fs\";\n\nconst dataDir = `${__dirname}/data`;\n\n// https://stackoverflow.com/a/43053803\nconst cartesian = (...a) =>\n  a.reduce((a, b) => a.flatMap((d) => b.map((e) => [d, e].flat())));\n\nconst partitions = [1, 5, 20];\nconst compressions = [\"brotli\", \"gzip\", \"none\", \"snappy\"];\n\nconst testCases: [number, string][] = cartesian(partitions, compressions);\n\nconst createReadTests = () =>\n  testCases.map(([partitions, compression, api]) => {\n    const file = `${partitions}-partition-${compression}`;\n    const testName = `${api} ${file}`;\n    return b.add(testName, () => {\n      const arr = loadFile(file);\n      return () => parquet.readParquet2(arr);\n    });\n  });\n\nfunction loadFile(name: string): Uint8Array {\n  const dataPath = `${dataDir}/${name}.parquet`;\n  return new Uint8Array(readFileSync(dataPath));\n}\n\nb.suite(\n  \"Read Parquet\",\n\n  ...createReadTests(),\n\n  b.cycle(),\n  b.configure({ minDisplayPrecision: 2 }),\n  b.complete(),\n  b.save({\n    file: \"bench\",\n    folder: \"bench/results/\",\n    version: \"0.3.0\",\n    details: true,\n    format: \"chart.html\",\n  })\n);\n"
  },
  {
    "path": "bench/make_data.py",
    "content": "from pathlib import Path\n\nimport numpy as np\nimport pandas as pd\nimport pyarrow as pa\nimport pyarrow.parquet as pq\n\ncompressions = [\"SNAPPY\", \"GZIP\", \"BROTLI\", \"ZSTD\", \"NONE\"]\n\n\ndef create_table(n_rows=1_000_000):\n    data = {}\n\n    for dtype in [\"uint8\", \"uint16\", \"uint32\"]:\n        data[dtype] = pa.array(np.random.randint(0, np.iinfo(dtype).max, size=n_rows))\n\n    data[\"bool\"] = pa.array(np.random.randint(0, 2, size=n_rows), type=pa.bool_())\n\n    # Todo column with string data?\n    # https://stackoverflow.com/a/2257449\n\n    return pa.table(data)\n\n\ndef write_table(table):\n    # Create data directory\n    Path(\"data\").mkdir(exist_ok=True)\n\n    data_len = len(table)\n    for n_partitions in [1, 5, 20]:\n        for compression in compressions:\n            row_group_size = data_len / n_partitions\n            compression_text = str(compression).lower()\n            fname = f\"data/{n_partitions}-partition-{compression_text}.parquet\"\n            pq.write_table(\n                table, fname, row_group_size=row_group_size, compression=compression\n            )\n\n\ndef main():\n    table = create_table()\n    write_table(table)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "bench/pyproject.toml",
    "content": "[tool.poetry]\nname = \"parquet-wasm-bench\"\nversion = \"0.1.0\"\ndescription = \"Create data for parquet-wasm benchmarks\"\nauthors = [\"Kyle Barron <kylebarron2@gmail.com>\"]\nlicense = \"MIT\"\n\n[tool.poetry.dependencies]\npython = \"^3.8\"\nnumpy = \"^1.22.3\"\npyarrow = \"^7.0.0\"\npandas = \"^1.4.2\"\n\n[tool.poetry.dev-dependencies]\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"scripts\": {\n    \"build\": \"bash ./scripts/build.sh\",\n    \"build:test\": \"ENV='DEV' yarn build\",\n    \"docs:build\": \"typedoc\",\n    \"docs:publish\": \"gh-pages -d docs_build\",\n    \"docs:serve\": \"cd docs_build && python -m http.server 8081\",\n    \"docs:watch\": \"typedoc --watch\",\n    \"test\": \"vitest run ./tests/js/index.test.ts\"\n  },\n  \"devDependencies\": {\n    \"@fastify/static\": \"^7.0.4\",\n    \"@types/node\": \"^24\",\n    \"apache-arrow\": \"^20.0.0\",\n    \"arrow-js-ffi\": \"^0.4.3\",\n    \"benny\": \"^3.7.1\",\n    \"fastify\": \"^4.28.1\",\n    \"gh-pages\": \"^6.2.0\",\n    \"typedoc\": \"^0.28.13\",\n    \"typescript\": \"^5.6.3\",\n    \"vitest\": \"^3.2.4\"\n  },\n  \"volta\": {\n    \"node\": \"20.12.2\",\n    \"yarn\": \"1.22.19\"\n  }\n}\n"
  },
  {
    "path": "scripts/build.sh",
    "content": "#! /usr/bin/env bash\nrm -rf tmp_build pkg\nmkdir -p tmp_build\n\nif [ \"$ENV\" == \"DEV\" ]; then\n  BUILD=\"--dev\"\n  FLAGS=\"--features debug\"\nelse\n  BUILD=\"--release\"\n  FLAGS=\"\"\nfi\n\n# Build node version into tmp_build/node\necho \"Building node\"\nwasm-pack build \\\n  $BUILD \\\n  --out-dir tmp_build/node \\\n  --target nodejs \\\n  $FLAGS &\n[ -n \"$CI\" ] && wait;\n\n# Build web version into tmp_build/esm\necho \"Building esm\"\nwasm-pack build \\\n  $BUILD \\\n  --out-dir tmp_build/esm \\\n  --target web \\\n  $FLAGS &\n[ -n \"$CI\" ] && wait;\n\n# Build bundler version into tmp_build/bundler\necho \"Building bundler\"\nwasm-pack build \\\n  $BUILD \\\n  --out-dir tmp_build/bundler \\\n  --target bundler \\\n  $FLAGS &\nwait\n\n\n# Copy files into pkg/\nmkdir -p pkg/{node,esm,bundler}\n\ncp tmp_build/bundler/parquet* pkg/bundler/\ncp tmp_build/esm/parquet* pkg/esm\ncp tmp_build/node/parquet* pkg/node\n\ncp tmp_build/bundler/{LICENSE_APACHE,LICENSE_MIT,README.md} pkg/\n\n# Copy in combined package.json from template\n# https://stackoverflow.com/a/24904276\n# Note that keys from the second file will overwrite keys from the first.\njq -s '.[0] * .[1]' templates/package.json tmp_build/bundler/package.json > pkg/package.json\n\n# Create minimal package.json in esm/ folder with type: module\necho '{\"type\": \"module\"}' > pkg/esm/package.json\n\n# Update files array in package.json using JQ\njq '.files = [\"*\"] | .module=\"esm/parquet_wasm.js\" | .types=\"esm/parquet_wasm.d.ts\"' pkg/package.json > pkg/package.json.tmp\n\n# Overwrite existing package.json file\nmv pkg/package.json.tmp pkg/package.json\n\nrm -rf tmp_build\n"
  },
  {
    "path": "scripts/report_build.sh",
    "content": "rm -rf report_pkg\nmkdir -p report_pkg\n\necho \"Building arrow-rs slim\"\nwasm-pack build \\\n  --release \\\n  --no-pack \\\n  --out-dir report_pkg/slim \\\n  --out-name parquet_wasm \\\n  --target web \\\n  --no-default-features \\\n  --features={reader,writer}\necho \"Building arrow-rs sync\"\nwasm-pack build \\\n  --release \\\n  --no-pack \\\n  --out-dir report_pkg/sync \\\n  --out-name parquet_wasm \\\n  --target web \\\n  --no-default-features \\\n  --features={reader,writer,all_compressions} &\n\necho \"Building arrow-rs async_full\"\nwasm-pack build \\\n  --release \\\n  --no-pack \\\n  --out-dir report_pkg/async_full \\\n  --out-name parquet_wasm \\\n  --target web \\\n  --features=full &\n\nwait;"
  },
  {
    "path": "src/common/fetch.rs",
    "content": "use futures::channel::oneshot;\nuse futures::future::BoxFuture;\nuse range_reader::{RangeOutput, RangedAsyncReader};\nuse wasm_bindgen::prelude::*;\nuse wasm_bindgen_futures::spawn_local;\n\n/// Get content-length of file\npub async fn _get_content_length(url: String) -> Result<usize, reqwest::Error> {\n    let client = reqwest::Client::new();\n    let resp = client.head(url).send().await?;\n    Ok(resp.content_length().unwrap().try_into().unwrap())\n}\n\npub async fn get_content_length(url: String) -> Result<usize, reqwest::Error> {\n    let (sender, receiver) = oneshot::channel::<usize>();\n    spawn_local(async move {\n        let inner_data = _get_content_length(url).await.unwrap();\n        sender.send(inner_data).unwrap();\n    });\n    let data = receiver.await.unwrap();\n    Ok(data)\n}\n\n/// Construct range header from start and length\npub fn range_from_start_and_length(start: u64, length: u64) -> String {\n    // Subtract 1 from length because end is inclusive\n    // > bytes units ... are offsets (zero-indexed & inclusive)\n    // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range\n    format!(\"bytes={}-{}\", start, start + length - 1)\n}\n\npub fn range_from_start(start: u64) -> String {\n    format!(\"bytes={start}-\")\n}\n\npub fn range_from_end(length: usize) -> String {\n    format!(\"bytes=-{length}\")\n}\n\n/// Make range request on remote file\nasync fn _make_range_request(\n    url: &str,\n    start: u64,\n    length: usize,\n) -> Result<Vec<u8>, reqwest::Error> {\n    let client = reqwest::Client::new();\n    let range_str = range_from_start_and_length(start, length as u64);\n    let resp = client\n        .get(url)\n        .header(\"Range\", range_str)\n        .send()\n        .await?\n        .error_for_status()?;\n    Ok(resp.bytes().await?.to_vec())\n}\n\npub async fn make_range_request(\n    url: String,\n    start: u64,\n    length: usize,\n) -> Result<Vec<u8>, JsValue> {\n    let (sender, receiver) = oneshot::channel::<Vec<u8>>();\n    spawn_local(async move {\n        let inner_data = _make_range_request(&url, start, length).await.unwrap();\n        sender.send(inner_data).unwrap();\n    });\n    let data = receiver.await.unwrap();\n    Ok(data)\n}\n\n/// Create a RangedAsyncReader\npub fn create_reader(\n    url: String,\n    content_length: usize,\n    min_request_size: Option<usize>,\n) -> RangedAsyncReader {\n    // at least 4kb per s3 request. Adjust to your liking.\n    let min_request_size = min_request_size.unwrap_or(4 * 1024);\n\n    // Closure for making an individual HTTP range request to a file\n    let range_get = Box::new(move |start: u64, length: usize| {\n        let url = url.clone();\n\n        Box::pin(async move {\n            let data = make_range_request(url.clone(), start, length)\n                .await\n                .unwrap();\n            Ok(RangeOutput { start, data })\n        }) as BoxFuture<'static, std::io::Result<RangeOutput>>\n    });\n\n    RangedAsyncReader::new(content_length, min_request_size, range_get)\n}\n"
  },
  {
    "path": "src/common/mod.rs",
    "content": "pub mod properties;\n\n#[cfg(feature = \"async\")]\npub mod fetch;\n\n#[cfg(feature = \"async\")]\npub mod stream;\n"
  },
  {
    "path": "src/common/properties.rs",
    "content": "use wasm_bindgen::prelude::*;\n\n/// Supported compression algorithms.\n///\n/// Codecs added in format version X.Y can be read by readers based on X.Y and later.\n/// Codec support may vary between readers based on the format version and\n/// libraries available at runtime.\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n#[allow(non_camel_case_types)]\n#[wasm_bindgen]\npub enum Compression {\n    UNCOMPRESSED,\n    SNAPPY,\n    GZIP,\n    BROTLI,\n    /// @deprecated as of Parquet 2.9.0.\n    /// Switch to LZ4_RAW\n    LZ4,\n    ZSTD,\n    LZ4_RAW,\n    LZO,\n}\n\nimpl From<Compression> for parquet::basic::Compression {\n    fn from(x: Compression) -> parquet::basic::Compression {\n        match x {\n            Compression::UNCOMPRESSED => parquet::basic::Compression::UNCOMPRESSED,\n            Compression::SNAPPY => parquet::basic::Compression::SNAPPY,\n            Compression::GZIP => parquet::basic::Compression::GZIP(Default::default()),\n            Compression::BROTLI => parquet::basic::Compression::BROTLI(Default::default()),\n            Compression::LZ4 => parquet::basic::Compression::LZ4,\n            Compression::ZSTD => parquet::basic::Compression::ZSTD(Default::default()),\n            Compression::LZ4_RAW => parquet::basic::Compression::LZ4_RAW,\n            Compression::LZO => parquet::basic::Compression::LZO,\n        }\n    }\n}\n\nimpl From<parquet::basic::Compression> for Compression {\n    fn from(x: parquet::basic::Compression) -> Compression {\n        match x {\n            parquet::basic::Compression::UNCOMPRESSED => Compression::UNCOMPRESSED,\n            parquet::basic::Compression::SNAPPY => Compression::SNAPPY,\n            parquet::basic::Compression::GZIP(_) => Compression::GZIP,\n            parquet::basic::Compression::BROTLI(_) => Compression::BROTLI,\n            parquet::basic::Compression::LZ4 => Compression::LZ4,\n            parquet::basic::Compression::ZSTD(_) => Compression::ZSTD,\n            parquet::basic::Compression::LZ4_RAW => Compression::LZ4_RAW,\n            parquet::basic::Compression::LZO => Compression::LZO,\n        }\n    }\n}\n\n/// Encodings supported by Parquet.\n/// Not all encodings are valid for all types. These enums are also used to specify the\n/// encoding of definition and repetition levels.\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n#[allow(non_camel_case_types)]\n#[wasm_bindgen]\npub enum Encoding {\n    /// Default byte encoding.\n    /// - BOOLEAN - 1 bit per value, 0 is false; 1 is true.\n    /// - INT32 - 4 bytes per value, stored as little-endian.\n    /// - INT64 - 8 bytes per value, stored as little-endian.\n    /// - FLOAT - 4 bytes per value, stored as little-endian.\n    /// - DOUBLE - 8 bytes per value, stored as little-endian.\n    /// - BYTE_ARRAY - 4 byte length stored as little endian, followed by bytes.\n    /// - FIXED_LEN_BYTE_ARRAY - just the bytes are stored.\n    PLAIN,\n\n    /// **Deprecated** dictionary encoding.\n    ///\n    /// The values in the dictionary are encoded using PLAIN encoding.\n    /// Since it is deprecated, RLE_DICTIONARY encoding is used for a data page, and\n    /// PLAIN encoding is used for dictionary page.\n    PLAIN_DICTIONARY,\n\n    /// Group packed run length encoding.\n    ///\n    /// Usable for definition/repetition levels encoding and boolean values.\n    RLE,\n\n    /// Bit packed encoding.\n    ///\n    /// This can only be used if the data has a known max width.\n    /// Usable for definition/repetition levels encoding.\n    BIT_PACKED,\n\n    /// Delta encoding for integers, either INT32 or INT64.\n    ///\n    /// Works best on sorted data.\n    DELTA_BINARY_PACKED,\n\n    /// Encoding for byte arrays to separate the length values and the data.\n    ///\n    /// The lengths are encoded using DELTA_BINARY_PACKED encoding.\n    DELTA_LENGTH_BYTE_ARRAY,\n\n    /// Incremental encoding for byte arrays.\n    ///\n    /// Prefix lengths are encoded using DELTA_BINARY_PACKED encoding.\n    /// Suffixes are stored using DELTA_LENGTH_BYTE_ARRAY encoding.\n    DELTA_BYTE_ARRAY,\n\n    /// Dictionary encoding.\n    ///\n    /// The ids are encoded using the RLE encoding.\n    RLE_DICTIONARY,\n\n    /// Encoding for floating-point data.\n    ///\n    /// K byte-streams are created where K is the size in bytes of the data type.\n    /// The individual bytes of an FP value are scattered to the corresponding stream and\n    /// the streams are concatenated.\n    /// This itself does not reduce the size of the data but can lead to better compression\n    /// afterwards.\n    BYTE_STREAM_SPLIT,\n}\n\nimpl From<Encoding> for parquet::basic::Encoding {\n    fn from(x: Encoding) -> parquet::basic::Encoding {\n        match x {\n            Encoding::PLAIN => parquet::basic::Encoding::PLAIN,\n            Encoding::PLAIN_DICTIONARY => parquet::basic::Encoding::PLAIN_DICTIONARY,\n            Encoding::RLE => parquet::basic::Encoding::RLE,\n            #[allow(deprecated)]\n            Encoding::BIT_PACKED => parquet::basic::Encoding::BIT_PACKED,\n            Encoding::DELTA_BINARY_PACKED => parquet::basic::Encoding::DELTA_BINARY_PACKED,\n            Encoding::DELTA_LENGTH_BYTE_ARRAY => parquet::basic::Encoding::DELTA_LENGTH_BYTE_ARRAY,\n            Encoding::DELTA_BYTE_ARRAY => parquet::basic::Encoding::DELTA_BYTE_ARRAY,\n            Encoding::RLE_DICTIONARY => parquet::basic::Encoding::RLE_DICTIONARY,\n            Encoding::BYTE_STREAM_SPLIT => parquet::basic::Encoding::BYTE_STREAM_SPLIT,\n        }\n    }\n}\n\nimpl From<parquet::basic::Encoding> for Encoding {\n    fn from(x: parquet::basic::Encoding) -> Encoding {\n        match x {\n            parquet::basic::Encoding::PLAIN => Encoding::PLAIN,\n            parquet::basic::Encoding::PLAIN_DICTIONARY => Encoding::PLAIN_DICTIONARY,\n            parquet::basic::Encoding::RLE => Encoding::RLE,\n            #[allow(deprecated)]\n            parquet::basic::Encoding::BIT_PACKED => Encoding::BIT_PACKED,\n            parquet::basic::Encoding::DELTA_BINARY_PACKED => Encoding::DELTA_BINARY_PACKED,\n            parquet::basic::Encoding::DELTA_LENGTH_BYTE_ARRAY => Encoding::DELTA_LENGTH_BYTE_ARRAY,\n            parquet::basic::Encoding::DELTA_BYTE_ARRAY => Encoding::DELTA_BYTE_ARRAY,\n            parquet::basic::Encoding::RLE_DICTIONARY => Encoding::RLE_DICTIONARY,\n            parquet::basic::Encoding::BYTE_STREAM_SPLIT => Encoding::BYTE_STREAM_SPLIT,\n        }\n    }\n}\n\n/// The Parquet version to use when writing\n#[allow(non_camel_case_types)]\n#[wasm_bindgen]\npub enum WriterVersion {\n    V1,\n    V2,\n}\n\nimpl From<WriterVersion> for parquet::file::properties::WriterVersion {\n    fn from(x: WriterVersion) -> parquet::file::properties::WriterVersion {\n        match x {\n            WriterVersion::V1 => parquet::file::properties::WriterVersion::PARQUET_1_0,\n            WriterVersion::V2 => parquet::file::properties::WriterVersion::PARQUET_2_0,\n        }\n    }\n}\n"
  },
  {
    "path": "src/common/stream.rs",
    "content": "use futures::AsyncWrite;\n\npub struct WrappedWritableStream<'writer> {\n    pub stream: wasm_streams::writable::IntoAsyncWrite<'writer>,\n}\n\nimpl AsyncWrite for WrappedWritableStream<'_> {\n    fn poll_write(\n        self: std::pin::Pin<&mut Self>,\n        cx: &mut std::task::Context<'_>,\n        buf: &[u8],\n    ) -> std::task::Poll<std::io::Result<usize>> {\n        AsyncWrite::poll_write(std::pin::Pin::new(&mut self.get_mut().stream), cx, buf)\n    }\n\n    fn poll_flush(\n        self: std::pin::Pin<&mut Self>,\n        cx: &mut std::task::Context<'_>,\n    ) -> std::task::Poll<std::io::Result<()>> {\n        AsyncWrite::poll_flush(std::pin::Pin::new(&mut self.get_mut().stream), cx)\n    }\n\n    fn poll_close(\n        self: std::pin::Pin<&mut Self>,\n        cx: &mut std::task::Context<'_>,\n    ) -> std::task::Poll<std::io::Result<()>> {\n        AsyncWrite::poll_close(std::pin::Pin::new(&mut self.get_mut().stream), cx)\n    }\n}\n\nunsafe impl Send for WrappedWritableStream<'_> {}\n"
  },
  {
    "path": "src/error.rs",
    "content": "use arrow::error::ArrowError;\nuse parquet::errors::ParquetError;\nuse thiserror::Error;\nuse wasm_bindgen::{JsError, JsValue};\n\n#[derive(Error, Debug)]\npub enum ParquetWasmError {\n    #[error(transparent)]\n    ArrowError(Box<ArrowError>),\n\n    #[error(transparent)]\n    ParquetError(Box<ParquetError>),\n    #[error(\"Column {0} not found in table\")]\n    UnknownColumn(String),\n    #[cfg(feature = \"async\")]\n    #[error(\"HTTP error: `{0}`\")]\n    HTTPError(Box<reqwest::Error>),\n    #[error(\"Platform error: `{0}`\")]\n    PlatformSupportError(String),\n    #[error(\"Dyn casting error\")]\n    DynCastingError(JsValue),\n}\n\npub type Result<T> = std::result::Result<T, ParquetWasmError>;\npub type WasmResult<T> = std::result::Result<T, JsError>;\n\nimpl From<ArrowError> for ParquetWasmError {\n    fn from(err: ArrowError) -> Self {\n        Self::ArrowError(Box::new(err))\n    }\n}\n\nimpl From<ParquetError> for ParquetWasmError {\n    fn from(err: ParquetError) -> Self {\n        Self::ParquetError(Box::new(err))\n    }\n}\n\n#[cfg(feature = \"async\")]\nimpl From<reqwest::Error> for ParquetWasmError {\n    fn from(err: reqwest::Error) -> Self {\n        Self::HTTPError(Box::new(err))\n    }\n}\n"
  },
  {
    "path": "src/lib.rs",
    "content": "extern crate web_sys;\n\npub mod common;\npub mod utils;\n\npub mod error;\npub mod metadata;\n#[cfg(feature = \"reader\")]\npub mod read_options;\n#[cfg(feature = \"reader\")]\npub mod reader;\n#[cfg(all(feature = \"reader\", feature = \"async\"))]\npub mod reader_async;\npub mod wasm;\n#[cfg(feature = \"writer\")]\npub mod writer;\n#[cfg(feature = \"writer\")]\npub mod writer_properties;\n\n#[cfg(all(feature = \"writer\", feature = \"async\"))]\npub mod writer_async;\n\n// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global\n// allocator.\n/*#[cfg(feature = \"wee_alloc\")]\n#[global_allocator]\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;*/\n"
  },
  {
    "path": "src/metadata.rs",
    "content": "use wasm_bindgen::prelude::*;\n\nuse crate::common::properties::{Compression, Encoding};\n\n/// Global Parquet metadata.\n#[derive(Debug, Clone)]\n#[wasm_bindgen]\npub struct ParquetMetaData(parquet::file::metadata::ParquetMetaData);\n\n#[wasm_bindgen]\nimpl ParquetMetaData {\n    /// Returns file metadata as reference.\n    #[wasm_bindgen(js_name = fileMetadata)]\n    pub fn file_metadata(&self) -> FileMetaData {\n        self.0.file_metadata().clone().into()\n    }\n\n    /// Returns number of row groups in this file.\n    #[wasm_bindgen(js_name = numRowGroups)]\n    pub fn num_row_groups(&self) -> usize {\n        self.0.num_row_groups()\n    }\n\n    /// Returns row group metadata for `i`th position.\n    /// Position should be less than number of row groups `num_row_groups`.\n    #[wasm_bindgen(js_name = rowGroup)]\n    pub fn row_group(&self, i: usize) -> RowGroupMetaData {\n        self.0.row_group(i).clone().into()\n    }\n\n    /// Returns row group metadata for all row groups\n    #[wasm_bindgen(js_name = rowGroups)]\n    pub fn row_groups(&self) -> Vec<RowGroupMetaData> {\n        self.0\n            .row_groups()\n            .iter()\n            .map(|rg| rg.clone().into())\n            .collect()\n    }\n\n    // /// Returns the column index for this file if loaded\n    // pub fn column_index(&self) -> Option<ParquetColumnIndex> {\n    //     self.0.column_index()\n    // }\n}\n\nimpl From<parquet::file::metadata::ParquetMetaData> for ParquetMetaData {\n    fn from(value: parquet::file::metadata::ParquetMetaData) -> Self {\n        Self(value)\n    }\n}\n\nimpl From<ParquetMetaData> for parquet::file::metadata::ParquetMetaData {\n    fn from(value: ParquetMetaData) -> Self {\n        value.0\n    }\n}\n\n/// Metadata for a Parquet file.\n#[derive(Debug, Clone)]\n#[wasm_bindgen]\npub struct FileMetaData(parquet::file::metadata::FileMetaData);\n\n#[wasm_bindgen]\nimpl FileMetaData {\n    /// Returns version of this file.\n    #[wasm_bindgen]\n    pub fn version(&self) -> i32 {\n        self.0.version()\n    }\n\n    /// Returns number of rows in the file.\n    #[wasm_bindgen(js_name = numRows)]\n    pub fn num_rows(&self) -> f64 {\n        self.0.num_rows() as f64\n    }\n\n    /// String message for application that wrote this file.\n    ///\n    /// This should have the following format:\n    /// `<application> version <application version> (build <application build hash>)`.\n    ///\n    /// ```shell\n    /// parquet-mr version 1.8.0 (build 0fda28af84b9746396014ad6a415b90592a98b3b)\n    /// ```\n    #[wasm_bindgen(js_name = createdBy)]\n    pub fn created_by(&self) -> Option<String> {\n        let s = self.0.created_by()?;\n        Some(s.to_string())\n    }\n\n    /// Returns key_value_metadata of this file.\n    #[wasm_bindgen(js_name = keyValueMetadata)]\n    pub fn key_value_metadata(&self) -> Result<js_sys::Map, JsValue> {\n        let map = js_sys::Map::new();\n        if let Some(metadata) = self.0.key_value_metadata() {\n            for meta in metadata {\n                if let Some(value) = &meta.value {\n                    map.set(&JsValue::from_str(&meta.key), &JsValue::from_str(value));\n                }\n            }\n        }\n        Ok(map)\n    }\n}\n\nimpl From<parquet::file::metadata::FileMetaData> for FileMetaData {\n    fn from(value: parquet::file::metadata::FileMetaData) -> Self {\n        Self(value)\n    }\n}\n\nimpl From<FileMetaData> for parquet::file::metadata::FileMetaData {\n    fn from(value: FileMetaData) -> Self {\n        value.0\n    }\n}\n\n/// Metadata for a Parquet row group.\n#[derive(Debug, Clone)]\n#[wasm_bindgen]\npub struct RowGroupMetaData(parquet::file::metadata::RowGroupMetaData);\n\n#[wasm_bindgen]\nimpl RowGroupMetaData {\n    /// Number of columns in this row group.\n    #[wasm_bindgen(js_name = numColumns)]\n    pub fn num_columns(&self) -> usize {\n        self.0.num_columns()\n    }\n\n    /// Returns column chunk metadata for `i`th column.\n    #[wasm_bindgen]\n    pub fn column(&self, i: usize) -> ColumnChunkMetaData {\n        self.0.column(i).clone().into()\n    }\n\n    /// Returns column chunk metadata for all columns\n    #[wasm_bindgen]\n    pub fn columns(&self) -> Vec<ColumnChunkMetaData> {\n        self.0\n            .columns()\n            .iter()\n            .map(|col| col.clone().into())\n            .collect()\n    }\n\n    /// Number of rows in this row group.\n    #[wasm_bindgen(js_name = numRows)]\n    pub fn num_rows(&self) -> f64 {\n        self.0.num_rows() as f64\n    }\n\n    /// Total byte size of all uncompressed column data in this row group.\n    #[wasm_bindgen(js_name = totalByteSize)]\n    pub fn total_byte_size(&self) -> f64 {\n        self.0.total_byte_size() as f64\n    }\n\n    /// Total size of all compressed column data in this row group.\n    #[wasm_bindgen(js_name = compressedSize)]\n    pub fn compressed_size(&self) -> f64 {\n        self.0.compressed_size() as f64\n    }\n}\n\nimpl From<parquet::file::metadata::RowGroupMetaData> for RowGroupMetaData {\n    fn from(value: parquet::file::metadata::RowGroupMetaData) -> Self {\n        Self(value)\n    }\n}\n\nimpl From<RowGroupMetaData> for parquet::file::metadata::RowGroupMetaData {\n    fn from(value: RowGroupMetaData) -> Self {\n        value.0\n    }\n}\n\n/// Metadata for a Parquet column chunk.\n#[derive(Debug, Clone)]\n#[wasm_bindgen]\npub struct ColumnChunkMetaData(parquet::file::metadata::ColumnChunkMetaData);\n\n#[wasm_bindgen]\nimpl ColumnChunkMetaData {\n    /// File where the column chunk is stored.\n    ///\n    /// If not set, assumed to belong to the same file as the metadata.\n    /// This path is relative to the current file.\n    #[wasm_bindgen(js_name = filePath)]\n    pub fn file_path(&self) -> Option<String> {\n        self.0.file_path().map(|s| s.to_string())\n    }\n\n    /// Byte offset in `file_path()`.\n    #[wasm_bindgen(js_name = fileOffset)]\n    pub fn file_offset(&self) -> i64 {\n        self.0.file_offset()\n    }\n\n    // /// Type of this column. Must be primitive.\n    // pub fn column_type(&self) -> Type {\n    //     self.column_descr.physical_type()\n    // }\n\n    /// Path (or identifier) of this column.\n    #[wasm_bindgen(js_name = columnPath)]\n    pub fn column_path(&self) -> Vec<String> {\n        let path = self.0.column_path();\n        path.parts().to_vec()\n    }\n\n    /// All encodings used for this column.\n    #[wasm_bindgen]\n    pub fn encodings(&self) -> Vec<Encoding> {\n        self.0\n            .encodings()\n            .iter()\n            .map(|encoding| (*encoding).into())\n            .collect()\n    }\n\n    /// Total number of values in this column chunk.\n    #[wasm_bindgen(js_name = numValues)]\n    pub fn num_values(&self) -> f64 {\n        self.0.num_values() as f64\n    }\n\n    /// Compression for this column.\n    pub fn compression(&self) -> Compression {\n        self.0.compression().into()\n    }\n\n    /// Returns the total compressed data size of this column chunk.\n    #[wasm_bindgen(js_name = compressedSize)]\n    pub fn compressed_size(&self) -> f64 {\n        self.0.compressed_size() as f64\n    }\n\n    /// Returns the total uncompressed data size of this column chunk.\n    #[wasm_bindgen(js_name = uncompressedSize)]\n    pub fn uncompressed_size(&self) -> f64 {\n        self.0.uncompressed_size() as f64\n    }\n}\n\nimpl From<parquet::file::metadata::ColumnChunkMetaData> for ColumnChunkMetaData {\n    fn from(value: parquet::file::metadata::ColumnChunkMetaData) -> Self {\n        Self(value)\n    }\n}\n\nimpl From<ColumnChunkMetaData> for parquet::file::metadata::ColumnChunkMetaData {\n    fn from(value: ColumnChunkMetaData) -> Self {\n        value.0\n    }\n}\n"
  },
  {
    "path": "src/read_options.rs",
    "content": "use parquet::arrow::ProjectionMask;\nuse parquet::arrow::arrow_reader::ArrowReaderBuilder;\nuse parquet::schema::types::SchemaDescriptor;\nuse serde::{Deserialize, Serialize};\nuse wasm_bindgen::prelude::*;\n\nuse crate::error::{ParquetWasmError, Result};\n\n#[wasm_bindgen(typescript_custom_section)]\nconst TS_ReaderOptions: &'static str = r#\"\nexport type ReaderOptions = {\n    /* The number of rows in each batch. If not provided, the upstream parquet default is 1024. */\n    batchSize?: number;\n    /* Only read data from the provided row group indexes. */\n    rowGroups?: number[];\n    /* Provide a limit to the number of rows to be read. */\n    limit?: number;\n    /* Provide an offset to skip over the given number of rows. */\n    offset?: number;\n    /* The column names from the file to read. */\n    columns?: string[];\n    /* The number of concurrent requests to make in the async reader. */\n    concurrency?: number;\n};\n\"#;\n\n#[wasm_bindgen]\nextern \"C\" {\n    /// Reader options\n    #[wasm_bindgen(typescript_type = \"ReaderOptions\")]\n    pub type ReaderOptions;\n}\n\n#[derive(Clone, Serialize, Deserialize, Default)]\n#[serde(rename_all = \"camelCase\")]\npub struct JsReaderOptions {\n    /// The number of rows in each batch. If not provided, the upstream parquet default is 1024.\n    pub batch_size: Option<usize>,\n\n    /// Only read data from the provided row group indexes\n    pub row_groups: Option<Vec<usize>>,\n\n    /// Provide a limit to the number of rows to be read\n    pub limit: Option<usize>,\n\n    /// Provide an offset to skip over the given number of rows\n    pub offset: Option<usize>,\n\n    /// The column names from the file to read.\n    pub columns: Option<Vec<String>>,\n\n    /// The number of concurrent requests to make in the async reader.\n    pub concurrency: Option<usize>,\n}\n\nimpl JsReaderOptions {\n    pub fn apply_to_builder<T>(\n        &self,\n        mut builder: ArrowReaderBuilder<T>,\n    ) -> Result<ArrowReaderBuilder<T>> {\n        if let Some(batch_size) = self.batch_size {\n            builder = builder.with_batch_size(batch_size);\n        }\n\n        if let Some(limit) = self.limit {\n            builder = builder.with_limit(limit);\n        }\n\n        if let Some(offset) = self.offset {\n            builder = builder.with_offset(offset);\n        }\n\n        if let Some(columns) = &self.columns {\n            let parquet_schema = builder.parquet_schema();\n            let projection_mask = generate_projection_mask(columns, parquet_schema)?;\n\n            builder = builder.with_projection(projection_mask);\n        }\n\n        if let Some(row_groups) = &self.row_groups {\n            builder = builder.with_row_groups(row_groups.clone());\n        }\n\n        Ok(builder)\n    }\n}\n\nimpl TryFrom<ReaderOptions> for JsReaderOptions {\n    type Error = serde_wasm_bindgen::Error;\n\n    fn try_from(value: ReaderOptions) -> std::result::Result<Self, Self::Error> {\n        serde_wasm_bindgen::from_value(value.obj)\n    }\n}\n\nfn generate_projection_mask<S: AsRef<str>>(\n    columns: &[S],\n    pq_schema: &SchemaDescriptor,\n) -> Result<ProjectionMask> {\n    let col_paths = pq_schema\n        .columns()\n        .iter()\n        .map(|col| col.path().string())\n        .collect::<Vec<_>>();\n    let indices: Vec<usize> = columns\n        .iter()\n        .map(|col| {\n            let col = col.as_ref();\n            let field_indices: Vec<usize> = col_paths\n                .iter()\n                .enumerate()\n                .filter(|(_idx, path)| {\n                    // identical OR the path starts with the column AND the substring is immediately followed by the\n                    // path separator\n                    path.as_str() == col\n                        || path.starts_with(col) && {\n                            let left_index = path.find(col).unwrap();\n                            path.chars().nth(left_index + col.len()).unwrap() == '.'\n                        }\n                })\n                .map(|(idx, _)| idx)\n                .collect();\n            if field_indices.is_empty() {\n                Err(ParquetWasmError::UnknownColumn(col.to_string()))\n            } else {\n                Ok(field_indices)\n            }\n        })\n        .collect::<Result<Vec<Vec<usize>>>>()?\n        .into_iter()\n        .flatten()\n        .collect();\n    let projection_mask = ProjectionMask::leaves(pq_schema, indices);\n    Ok(projection_mask)\n}\n"
  },
  {
    "path": "src/reader.rs",
    "content": "use std::sync::Arc;\n\nuse crate::error::Result;\nuse crate::read_options::JsReaderOptions;\nuse arrow_schema::{DataType, FieldRef};\nuse arrow_wasm::{Schema, Table};\nuse bytes::Bytes;\nuse parquet::arrow::arrow_reader::{\n    ArrowReaderMetadata, ArrowReaderOptions, ParquetRecordBatchReaderBuilder,\n};\n\n/// Internal function to read a buffer with Parquet data into a buffer with Arrow IPC Stream data\npub fn read_parquet(parquet_file: Vec<u8>, options: JsReaderOptions) -> Result<Table> {\n    // Create Parquet reader\n    let cursor: Bytes = parquet_file.into();\n\n    let metadata = ArrowReaderMetadata::load(&cursor, Default::default())?;\n    let metadata = cast_metadata_view_types(&metadata)?;\n\n    let mut builder = ParquetRecordBatchReaderBuilder::new_with_metadata(cursor, metadata);\n\n    let schema = builder.schema().clone();\n\n    if let Some(batch_size) = options.batch_size {\n        builder = builder.with_batch_size(batch_size);\n    }\n\n    if let Some(row_groups) = options.row_groups {\n        builder = builder.with_row_groups(row_groups);\n    }\n\n    if let Some(limit) = options.limit {\n        builder = builder.with_limit(limit);\n    }\n\n    if let Some(offset) = options.offset {\n        builder = builder.with_offset(offset);\n    }\n\n    // Create Arrow reader\n    let reader = builder.build()?;\n\n    let mut batches = vec![];\n\n    for maybe_chunk in reader {\n        batches.push(maybe_chunk?)\n    }\n\n    Ok(Table::new(schema, batches))\n}\n\n/// Internal function to read a buffer with Parquet data into an Arrow schema\npub fn read_schema(parquet_file: Vec<u8>) -> Result<Schema> {\n    // Create Parquet reader\n    let cursor: Bytes = parquet_file.into();\n    let builder = ParquetRecordBatchReaderBuilder::try_new(cursor)?;\n    let schema = builder.schema().clone();\n    Ok(schema.into())\n}\n\n/// Cast any view types in the metadata's schema to non-view types\npub(crate) fn cast_metadata_view_types(\n    metadata: &ArrowReaderMetadata,\n) -> Result<ArrowReaderMetadata> {\n    let original_arrow_schema = metadata.schema();\n    if has_view_types(original_arrow_schema.fields().iter()) {\n        let new_schema = cast_view_types(original_arrow_schema);\n        let arrow_options = ArrowReaderOptions::default().with_schema(new_schema);\n        Ok(ArrowReaderMetadata::try_new(\n            metadata.metadata().clone(),\n            arrow_options,\n        )?)\n    } else {\n        Ok(metadata.clone())\n    }\n}\n\n/// Cast any view types in the schema to non-view types\n///\n/// Casts:\n///\n/// - StringView to String\n/// - BinaryView to Binary\n///\n/// Arrow JS does not currently support view types\n/// https://github.com/apache/arrow-js/issues/44\nfn cast_view_types(schema: &arrow_schema::Schema) -> arrow_schema::SchemaRef {\n    let new_fields = _cast_view_types_of_fields(schema.fields().iter());\n    Arc::new(arrow_schema::Schema::new_with_metadata(\n        new_fields,\n        schema.metadata().clone(),\n    ))\n}\n\n/// Recursively cast any view types in the fields to non-view types\n///\n/// This includes any view types that are the children of nested types like Structs and Lists\nfn _cast_view_types_of_fields<'a>(fields: impl Iterator<Item = &'a FieldRef>) -> Vec<FieldRef> {\n    fields\n        .map(|field| {\n            let new_data_type = match field.data_type() {\n                DataType::Utf8View => DataType::Utf8,\n                DataType::BinaryView => DataType::Binary,\n                DataType::Struct(struct_fields) => {\n                    DataType::Struct(_cast_view_types_of_fields(struct_fields.iter()).into())\n                }\n                DataType::List(inner_field) => DataType::List(\n                    _cast_view_types_of_fields([inner_field].into_iter())\n                        .into_iter()\n                        .next()\n                        .unwrap(),\n                ),\n                DataType::LargeList(inner_field) => DataType::LargeList(\n                    _cast_view_types_of_fields([inner_field].into_iter())\n                        .into_iter()\n                        .next()\n                        .unwrap(),\n                ),\n                DataType::FixedSizeList(inner_field, list_size) => DataType::FixedSizeList(\n                    _cast_view_types_of_fields([inner_field].into_iter())\n                        .into_iter()\n                        .next()\n                        .unwrap(),\n                    *list_size,\n                ),\n                other => other.clone(),\n            };\n            Arc::new(field.as_ref().clone().with_data_type(new_data_type))\n        })\n        .collect()\n}\n\nfn has_view_types<'a>(mut fields: impl Iterator<Item = &'a FieldRef>) -> bool {\n    fields.any(|field| match field.data_type() {\n        DataType::Utf8View | DataType::BinaryView => true,\n        DataType::Struct(struct_fields) => has_view_types(struct_fields.iter()),\n        DataType::List(inner_field) => has_view_types([inner_field].into_iter()),\n        DataType::LargeList(inner_field) => has_view_types([inner_field].into_iter()),\n        DataType::FixedSizeList(inner_field, _list_size) => {\n            has_view_types([inner_field].into_iter())\n        }\n        _other => false,\n    })\n}\n"
  },
  {
    "path": "src/reader_async.rs",
    "content": "//! An asynchronous Parquet reader that is able to read and inspect remote files without\n//! downloading them in entirety.\n\nuse crate::common::fetch::{\n    create_reader, get_content_length, range_from_end, range_from_start_and_length,\n};\nuse crate::error::{Result, WasmResult};\nuse crate::read_options::{JsReaderOptions, ReaderOptions};\nuse crate::reader::cast_metadata_view_types;\nuse futures::channel::oneshot;\nuse futures::future::BoxFuture;\nuse object_store::coalesce_ranges;\nuse std::ops::Range;\nuse std::sync::Arc;\nuse wasm_bindgen::prelude::*;\nuse wasm_bindgen_futures::spawn_local;\n\nuse arrow::ipc::writer::StreamWriter;\nuse arrow_wasm::{RecordBatch, Table};\nuse bytes::Bytes;\nuse futures::TryStreamExt;\nuse futures::{FutureExt, StreamExt, stream};\nuse parquet::arrow::arrow_reader::{ArrowReaderMetadata, ArrowReaderOptions};\nuse parquet::arrow::async_reader::{\n    AsyncFileReader, MetadataSuffixFetch, ParquetRecordBatchStream, ParquetRecordBatchStreamBuilder,\n};\n\nuse async_compat::{Compat, CompatExt};\nuse parquet::file::metadata::{\n    FileMetaData, PageIndexPolicy, ParquetMetaData, ParquetMetaDataReader,\n};\nuse range_reader::RangedAsyncReader;\nuse reqwest::Client;\n\n/// Range requests with a gap less than or equal to this,\n/// will be coalesced into a single request by [`coalesce_ranges`]\nconst OBJECT_STORE_COALESCE_DEFAULT: u64 = 1024 * 1024;\n\nfn create_builder<T: AsyncFileReader + Unpin + 'static>(\n    reader: T,\n    meta: &ArrowReaderMetadata,\n    options: &JsReaderOptions,\n) -> Result<ParquetRecordBatchStreamBuilder<T>> {\n    // Cast any view types to non-view types\n    let metadata = cast_metadata_view_types(meta)?;\n\n    let builder = ParquetRecordBatchStreamBuilder::new_with_metadata(reader, metadata);\n    options.apply_to_builder(builder)\n}\n\n/// An abstraction over either a browser File handle or an ObjectStore instance\n///\n/// This allows exposing a single ParquetFile class to the user.\n#[derive(Clone)]\nenum InnerParquetFile {\n    File(JsFileReader),\n    Http(HTTPFileReader),\n}\n\nimpl AsyncFileReader for InnerParquetFile {\n    fn get_bytes(&mut self, range: Range<u64>) -> BoxFuture<'_, parquet::errors::Result<Bytes>> {\n        match self {\n            Self::File(reader) => reader.get_bytes(range),\n            Self::Http(reader) => reader.get_bytes(range),\n        }\n    }\n\n    fn get_byte_ranges(\n        &mut self,\n        ranges: Vec<Range<u64>>,\n    ) -> BoxFuture<'_, parquet::errors::Result<Vec<Bytes>>> {\n        match self {\n            Self::File(reader) => reader.get_byte_ranges(ranges),\n            Self::Http(reader) => reader.get_byte_ranges(ranges),\n        }\n    }\n\n    fn get_metadata<'a>(\n        &'a mut self,\n        options: Option<&'a ArrowReaderOptions>,\n    ) -> BoxFuture<'a, parquet::errors::Result<Arc<ParquetMetaData>>> {\n        match self {\n            Self::File(reader) => reader.get_metadata(options),\n            Self::Http(reader) => reader.get_metadata(options),\n        }\n    }\n}\n\n#[wasm_bindgen]\npub struct ParquetFile {\n    reader: InnerParquetFile,\n    meta: ArrowReaderMetadata,\n}\n\n#[wasm_bindgen]\nimpl ParquetFile {\n    /// Construct a ParquetFile from a new URL.\n    #[wasm_bindgen(js_name = fromUrl)]\n    pub async fn from_url(url: String) -> WasmResult<ParquetFile> {\n        let client = Client::new();\n        let mut reader = HTTPFileReader::new(url, client, OBJECT_STORE_COALESCE_DEFAULT);\n        let meta = ArrowReaderMetadata::load_async(&mut reader, Default::default()).await?;\n        Ok(Self {\n            reader: InnerParquetFile::Http(reader),\n            meta,\n        })\n    }\n\n    /// Construct a ParquetFile from a new [Blob] or [File] handle.\n    ///\n    /// [Blob]: https://developer.mozilla.org/en-US/docs/Web/API/Blob\n    /// [File]: https://developer.mozilla.org/en-US/docs/Web/API/File\n    ///\n    /// Safety: Do not use this in a multi-threaded environment,\n    /// (transitively depends on `!Send` `web_sys::Blob`)\n    #[wasm_bindgen(js_name = fromFile)]\n    pub async fn from_file(handle: web_sys::Blob) -> WasmResult<ParquetFile> {\n        let mut reader = JsFileReader::new(handle, 1024);\n        let meta = ArrowReaderMetadata::load_async(&mut reader, Default::default()).await?;\n        Ok(Self {\n            reader: InnerParquetFile::File(reader),\n            meta,\n        })\n    }\n\n    #[wasm_bindgen]\n    pub fn metadata(&self) -> WasmResult<crate::metadata::ParquetMetaData> {\n        Ok(self.meta.metadata().as_ref().to_owned().into())\n    }\n\n    #[wasm_bindgen]\n    pub fn schema(&self) -> WasmResult<arrow_wasm::Schema> {\n        Ok(self.meta.schema().clone().into())\n    }\n\n    /// Read from the Parquet file in an async fashion.\n    ///\n    /// @param options\n    ///\n    ///    Options for reading Parquet data. Optional keys include:\n    ///\n    ///    - `batchSize`: The number of rows in each batch. If not provided, the upstream parquet\n    ///           default is 1024.\n    ///    - `rowGroups`: Only read data from the provided row group indexes.\n    ///    - `limit`: Provide a limit to the number of rows to be read.\n    ///    - `offset`: Provide an offset to skip over the given number of rows.\n    ///    - `columns`: The column names from the file to read.\n    #[wasm_bindgen]\n    pub async fn read(&self, options: Option<ReaderOptions>) -> WasmResult<Table> {\n        let options = options\n            .map(|x| x.try_into())\n            .transpose()?\n            .unwrap_or_default();\n        let builder = create_builder(self.reader.clone(), &self.meta, &options)?;\n\n        let schema = builder.schema().clone();\n        let stream = builder.build()?;\n        let batches = stream.try_collect::<Vec<_>>().await.unwrap();\n\n        Ok(Table::new(schema, batches))\n    }\n\n    /// Create a readable stream of record batches.\n    ///\n    /// Each item in the stream will be a {@linkcode RecordBatch}.\n    ///\n    /// @param options\n    ///\n    ///    Options for reading Parquet data. Optional keys include:\n    ///\n    ///    - `batchSize`: The number of rows in each batch. If not provided, the upstream parquet\n    ///           default is 1024.\n    ///    - `rowGroups`: Only read data from the provided row group indexes.\n    ///    - `limit`: Provide a limit to the number of rows to be read.\n    ///    - `offset`: Provide an offset to skip over the given number of rows.\n    ///    - `columns`: The column names from the file to read.\n    ///    - `concurrency`: The number of concurrent requests to make\n    #[wasm_bindgen]\n    pub async fn stream(\n        &self,\n        options: Option<ReaderOptions>,\n    ) -> WasmResult<wasm_streams::readable::sys::ReadableStream> {\n        let options: JsReaderOptions = options\n            .map(|x| x.try_into())\n            .transpose()?\n            .unwrap_or_default();\n\n        let concurrency = options.concurrency.unwrap_or_default().max(1);\n        let row_groups = options\n            .row_groups\n            .clone()\n            .unwrap_or_else(|| (0..self.meta.metadata().num_row_groups()).collect());\n        let reader = self.reader.clone();\n        let meta = self.meta.clone();\n\n        let buffered_stream = stream::iter(row_groups.into_iter().map(move |i| {\n            let builder = create_builder(reader.clone(), &meta.clone(), &options.clone())\n                .unwrap()\n                .with_row_groups(vec![i]);\n            builder.build().unwrap().try_collect::<Vec<_>>()\n        }))\n        .buffered(concurrency);\n        let out_stream = buffered_stream.flat_map(|maybe_record_batches| {\n            stream::iter(maybe_record_batches.unwrap())\n                .map(|record_batch| Ok(RecordBatch::new(record_batch).into()))\n        });\n        Ok(wasm_streams::ReadableStream::from_stream(out_stream).into_raw())\n    }\n}\n\n#[derive(Debug, Clone)]\npub struct HTTPFileReader {\n    url: String,\n    client: Client,\n    coalesce_byte_size: u64,\n}\n\nimpl HTTPFileReader {\n    pub fn new(url: String, client: Client, coalesce_byte_size: u64) -> Self {\n        Self {\n            url,\n            client,\n            coalesce_byte_size,\n        }\n    }\n}\n\nimpl MetadataSuffixFetch for &mut HTTPFileReader {\n    fn fetch_suffix(&mut self, suffix: usize) -> BoxFuture<'_, parquet::errors::Result<Bytes>> {\n        async move {\n            let range_str = range_from_end(suffix);\n\n            // Map reqwest error to parquet error\n            // let map_err = |err| parquet::errors::ParquetError::External(Box::new(err));\n\n            let bytes = make_range_request_with_client(\n                self.url.to_string(),\n                self.client.clone(),\n                range_str,\n            )\n            .await\n            .unwrap();\n\n            Ok(bytes)\n        }\n        .boxed()\n    }\n}\n\nasync fn get_bytes_http(\n    url: String,\n    client: Client,\n    range: Range<u64>,\n) -> parquet::errors::Result<Bytes> {\n    let range_str = range_from_start_and_length(range.start, range.end - range.start);\n\n    // Map reqwest error to parquet error\n    // let map_err = |err| parquet::errors::ParquetError::External(Box::new(err));\n\n    let bytes = make_range_request_with_client(url, client, range_str)\n        .await\n        .unwrap();\n\n    Ok(bytes)\n}\n\nimpl AsyncFileReader for HTTPFileReader {\n    fn get_bytes(&mut self, range: Range<u64>) -> BoxFuture<'_, parquet::errors::Result<Bytes>> {\n        get_bytes_http(self.url.clone(), self.client.clone(), range).boxed()\n    }\n\n    fn get_byte_ranges(\n        &mut self,\n        ranges: Vec<Range<u64>>,\n    ) -> BoxFuture<'_, parquet::errors::Result<Vec<Bytes>>> {\n        async move {\n            coalesce_ranges(\n                &ranges,\n                |range| get_bytes_http(self.url.clone(), self.client.clone(), range),\n                self.coalesce_byte_size,\n            )\n            .await\n        }\n        .boxed()\n    }\n\n    fn get_metadata<'a>(\n        &'a mut self,\n        _options: Option<&'a ArrowReaderOptions>,\n    ) -> BoxFuture<'a, parquet::errors::Result<Arc<ParquetMetaData>>> {\n        async move {\n            let metadata = ParquetMetaDataReader::new()\n                .with_page_index_policy(PageIndexPolicy::Optional)\n                .load_via_suffix_and_finish(self)\n                .await?;\n            Ok(Arc::new(metadata))\n        }\n        .boxed()\n    }\n}\n\n#[derive(Debug, Clone)]\nstruct WrappedFile {\n    inner: web_sys::Blob,\n    pub size: u64,\n}\n/// Safety: This is not in fact thread-safe. Do not attempt to use this in work-stealing\n/// async runtimes / multi-threaded environments\n///\n/// web_sys::Blob objects, like all JSValues, are !Send (even in JS, there's\n/// maybe ~5 Transferable types), and eventually boil down to PhantomData<*mut u8>.\n/// Any struct that holds one is inherently !Send, which disqualifies it from being used\n/// with the AsyncFileReader trait.\nunsafe impl Send for WrappedFile {}\nunsafe impl Sync for WrappedFile {}\n\nimpl WrappedFile {\n    pub fn new(inner: web_sys::Blob) -> Self {\n        let size = inner.size() as u64;\n        Self { inner, size }\n    }\n\n    pub async fn get_bytes(&mut self, range: Range<u64>) -> Vec<u8> {\n        use js_sys::Uint8Array;\n        use wasm_bindgen_futures::JsFuture;\n        let (sender, receiver) = oneshot::channel();\n        let file = self.inner.clone();\n        spawn_local(async move {\n            let subset_blob = file\n                .slice_with_i32_and_i32(\n                    range.start.try_into().unwrap(),\n                    range.end.try_into().unwrap(),\n                )\n                .unwrap();\n            let buf = JsFuture::from(subset_blob.array_buffer()).await.unwrap();\n            let out_vec = Uint8Array::new_with_byte_offset(&buf, 0).to_vec();\n            sender.send(out_vec).unwrap();\n        });\n\n        receiver.await.unwrap()\n    }\n}\n\nasync fn get_bytes_file(\n    mut file: WrappedFile,\n    range: Range<u64>,\n) -> parquet::errors::Result<Bytes> {\n    let (sender, receiver) = oneshot::channel();\n    spawn_local(async move {\n        let result: Bytes = file.get_bytes(range).await.into();\n        sender.send(result).unwrap()\n    });\n    let data = receiver.await.unwrap();\n    Ok(data)\n}\n\n#[derive(Debug, Clone)]\npub struct JsFileReader {\n    file: WrappedFile,\n    coalesce_byte_size: u64,\n}\n\nimpl JsFileReader {\n    pub fn new(file: web_sys::Blob, coalesce_byte_size: u64) -> Self {\n        Self {\n            file: WrappedFile::new(file),\n            coalesce_byte_size,\n        }\n    }\n}\n\nimpl AsyncFileReader for JsFileReader {\n    fn get_bytes(&mut self, range: Range<u64>) -> BoxFuture<'_, parquet::errors::Result<Bytes>> {\n        async move {\n            let (sender, receiver) = oneshot::channel();\n            let mut file = self.file.clone();\n            spawn_local(async move {\n                let result: Bytes = file.get_bytes(range).await.into();\n                sender.send(result).unwrap()\n            });\n            let data = receiver.await.unwrap();\n            Ok(data)\n        }\n        .boxed()\n    }\n\n    fn get_byte_ranges(\n        &mut self,\n        ranges: Vec<Range<u64>>,\n    ) -> BoxFuture<'_, parquet::errors::Result<Vec<Bytes>>> {\n        async move {\n            coalesce_ranges(\n                &ranges,\n                |range| get_bytes_file(self.file.clone(), range),\n                self.coalesce_byte_size,\n            )\n            .await\n        }\n        .boxed()\n    }\n\n    fn get_metadata<'a>(\n        &'a mut self,\n        _options: Option<&'a ArrowReaderOptions>,\n    ) -> BoxFuture<'a, parquet::errors::Result<Arc<ParquetMetaData>>> {\n        let file_size = self.file.size;\n        async move {\n            let metadata = ParquetMetaDataReader::new()\n                .with_page_index_policy(PageIndexPolicy::Optional)\n                .load_and_finish(self, file_size)\n                .await?;\n            Ok(Arc::new(metadata))\n        }\n        .boxed()\n    }\n}\n\npub async fn make_range_request_with_client(\n    url: String,\n    client: Client,\n    range_str: String,\n) -> std::result::Result<Bytes, JsValue> {\n    let (sender, receiver) = oneshot::channel();\n    spawn_local(async move {\n        let resp = client\n            .get(url)\n            .header(\"Range\", range_str)\n            .send()\n            .await\n            .unwrap()\n            .error_for_status()\n            .unwrap();\n        let bytes = resp.bytes().await.unwrap();\n        sender.send(bytes).unwrap();\n    });\n    let data = receiver.await.unwrap();\n    Ok(data)\n}\n\npub async fn read_metadata_async(\n    url: String,\n    content_length: Option<usize>,\n) -> Result<FileMetaData> {\n    let content_length = match content_length {\n        Some(content_length) => content_length,\n        None => get_content_length(url.clone()).await?,\n    };\n    let reader = create_reader(url, content_length, None);\n    let builder = ParquetRecordBatchStreamBuilder::new(reader.compat()).await?;\n    let meta = builder.metadata().file_metadata().clone();\n    Ok(meta)\n}\n\npub async fn _read_row_group(\n    url: String,\n    content_length: Option<usize>,\n    row_group: usize,\n) -> Result<(\n    ParquetRecordBatchStream<Compat<RangedAsyncReader>>,\n    Arc<arrow::datatypes::Schema>,\n)> {\n    let content_length = match content_length {\n        Some(content_length) => content_length,\n        None => get_content_length(url.clone()).await?,\n    };\n    let reader = create_reader(url, content_length, None);\n\n    let mut compat = reader.compat();\n    let metadata = ArrowReaderMetadata::load_async(&mut compat, Default::default()).await?;\n    let builder = create_builder(compat, &metadata, &Default::default())?;\n\n    let arrow_schema = builder.schema().clone();\n    let parquet_reader = builder.with_row_groups(vec![row_group]).build()?;\n    Ok((parquet_reader, arrow_schema))\n}\n\npub async fn read_row_group(\n    url: String,\n    row_group: usize,\n    chunk_fn: impl Fn(arrow::record_batch::RecordBatch) -> arrow::record_batch::RecordBatch,\n) -> Result<Vec<u8>> {\n    let (mut parquet_reader, arrow_schema) = _read_row_group(url, None, row_group).await?;\n    // Create IPC Writer\n    let mut output_file = Vec::new();\n    {\n        let mut writer = StreamWriter::try_new(&mut output_file, &arrow_schema)?;\n        while let Some(maybe_record_batch) = parquet_reader.next().await {\n            let record_batch = chunk_fn(maybe_record_batch?);\n            writer.write(&record_batch)?;\n        }\n        writer.finish()?;\n    }\n    Ok(output_file)\n}\n\npub async fn read_record_batch_stream(\n    url: String,\n    content_length: Option<usize>,\n) -> Result<ParquetRecordBatchStream<Compat<RangedAsyncReader>>> {\n    let content_length = match content_length {\n        Some(_content_length) => _content_length,\n        None => get_content_length(url.clone()).await?,\n    };\n    let reader = crate::common::fetch::create_reader(url, content_length, None);\n\n    let mut compat = reader.compat();\n    let metadata = ArrowReaderMetadata::load_async(&mut compat, Default::default()).await?;\n    let builder = create_builder(compat, &metadata, &Default::default())?;\n    let parquet_reader = builder.build()?;\n    Ok(parquet_reader)\n}\n"
  },
  {
    "path": "src/utils.rs",
    "content": "use wasm_bindgen::prelude::*;\n\n/// Call this function at least once during initialization to get better error\n// messages if the underlying Rust code ever panics (creates uncaught errors).\n#[cfg(feature = \"console_error_panic_hook\")]\n#[wasm_bindgen(js_name = setPanicHook)]\npub fn set_panic_hook() {\n    // When the `console_error_panic_hook` feature is enabled, we can call the\n    // `set_panic_hook` function at least once during initialization, and then\n    // we will get better error messages if our code ever panics.\n    //\n    // For more details see\n    // https://github.com/rustwasm/console_error_panic_hook#readme\n    console_error_panic_hook::set_once();\n}\n\n// A macro to provide `println!(..)`-style syntax for `console.log` logging.\n#[cfg(target_arch = \"wasm32\")]\n#[macro_export]\nmacro_rules! log {\n    ( $( $t:tt )* ) => {\n        web_sys::console::log_1(&format!( $( $t )* ).into());\n    }\n}\n\n#[cfg(not(target_arch = \"wasm32\"))]\n#[macro_export]\nmacro_rules! log {\n    ( $( $t:tt )* ) => {\n        println!(\"LOG - {}\", format!( $( $t )* ));\n    }\n}\n\n/// Raise an error if the input array is empty\npub fn assert_parquet_file_not_empty(parquet_file: &[u8]) -> Result<(), JsError> {\n    if parquet_file.is_empty() {\n        return Err(JsError::new(\"Empty input provided or not a Uint8Array.\"));\n    }\n    Ok(())\n}\n"
  },
  {
    "path": "src/wasm.rs",
    "content": "use crate::error::WasmResult;\n#[cfg(feature = \"reader\")]\nuse crate::read_options::ReaderOptions;\nuse crate::utils::assert_parquet_file_not_empty;\nuse arrow_wasm::{RecordBatch, Schema, Table};\nuse wasm_bindgen::prelude::*;\n\n/// Read a Parquet file into Arrow data.\n///\n/// This returns an Arrow table in WebAssembly memory. To transfer the Arrow table to JavaScript\n/// memory you have two options:\n///\n/// - (Easier): Call {@linkcode Table.intoIPCStream} to construct a buffer that can be parsed with\n///   Arrow JS's `tableFromIPC` function.\n/// - (More performant but bleeding edge): Call {@linkcode Table.intoFFI} to construct a data\n///   representation that can be parsed zero-copy from WebAssembly with\n///   [arrow-js-ffi](https://github.com/kylebarron/arrow-js-ffi) using `parseTable`.\n///\n/// Example with IPC stream:\n///\n/// ```js\n/// import { tableFromIPC } from \"apache-arrow\";\n/// import initWasm, {readParquet} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n///\n/// const resp = await fetch(\"https://example.com/file.parquet\");\n/// const parquetUint8Array = new Uint8Array(await resp.arrayBuffer());\n/// const arrowWasmTable = readParquet(parquetUint8Array);\n/// const arrowTable = tableFromIPC(arrowWasmTable.intoIPCStream());\n/// ```\n///\n/// Example with `arrow-js-ffi`:\n///\n/// ```js\n/// import { parseTable } from \"arrow-js-ffi\";\n/// import initWasm, {readParquet, wasmMemory} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n/// const WASM_MEMORY = wasmMemory();\n///\n/// const resp = await fetch(\"https://example.com/file.parquet\");\n/// const parquetUint8Array = new Uint8Array(await resp.arrayBuffer());\n/// const arrowWasmTable = readParquet(parquetUint8Array);\n/// const ffiTable = arrowWasmTable.intoFFI();\n/// const arrowTable = parseTable(\n///   WASM_MEMORY.buffer,\n///   ffiTable.arrayAddrs(),\n///   ffiTable.schemaAddr()\n/// );\n/// ```\n///\n/// @param parquet_file Uint8Array containing Parquet data\n/// @param options\n///\n///    Options for reading Parquet data. Optional keys include:\n///\n///    - `batchSize`: The number of rows in each batch. If not provided, the upstream parquet\n///           default is 1024.\n///    - `rowGroups`: Only read data from the provided row group indexes.\n///    - `limit`: Provide a limit to the number of rows to be read.\n///    - `offset`: Provide an offset to skip over the given number of rows.\n///    - `columns`: The column names from the file to read.\n#[wasm_bindgen(js_name = readParquet)]\n#[cfg(feature = \"reader\")]\npub fn read_parquet(parquet_file: Vec<u8>, options: Option<ReaderOptions>) -> WasmResult<Table> {\n    assert_parquet_file_not_empty(parquet_file.as_slice())?;\n    Ok(crate::reader::read_parquet(\n        parquet_file,\n        options\n            .map(|x| x.try_into())\n            .transpose()?\n            .unwrap_or_default(),\n    )?)\n}\n\n/// Read an Arrow schema from a Parquet file in memory.\n///\n/// This returns an Arrow schema in WebAssembly memory. To transfer the Arrow schema to JavaScript\n/// memory you have two options:\n///\n/// - (Easier): Call {@linkcode Schema.intoIPCStream} to construct a buffer that can be parsed with\n///   Arrow JS's `tableFromIPC` function. This results in an Arrow JS Table with zero rows but a\n///   valid schema.\n/// - (More performant but bleeding edge): Call {@linkcode Schema.intoFFI} to construct a data\n///   representation that can be parsed zero-copy from WebAssembly with\n///   [arrow-js-ffi](https://github.com/kylebarron/arrow-js-ffi) using `parseSchema`.\n///\n/// Example with IPC Stream:\n///\n/// ```js\n/// import { tableFromIPC } from \"apache-arrow\";\n/// import initWasm, {readSchema} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n///\n/// const resp = await fetch(\"https://example.com/file.parquet\");\n/// const parquetUint8Array = new Uint8Array(await resp.arrayBuffer());\n/// const arrowWasmSchema = readSchema(parquetUint8Array);\n/// const arrowTable = tableFromIPC(arrowWasmSchema.intoIPCStream());\n/// const arrowSchema = arrowTable.schema;\n/// ```\n///\n/// Example with `arrow-js-ffi`:\n///\n/// ```js\n/// import { parseSchema } from \"arrow-js-ffi\";\n/// import initWasm, {readSchema, wasmMemory} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n/// const WASM_MEMORY = wasmMemory();\n///\n/// const resp = await fetch(\"https://example.com/file.parquet\");\n/// const parquetUint8Array = new Uint8Array(await resp.arrayBuffer());\n/// const arrowWasmSchema = readSchema(parquetUint8Array);\n/// const ffiSchema = arrowWasmSchema.intoFFI();\n/// const arrowTable = parseSchema(WASM_MEMORY.buffer, ffiSchema.addr());\n/// const arrowSchema = arrowTable.schema;\n/// ```\n///\n/// @param parquet_file Uint8Array containing Parquet data\n#[wasm_bindgen(js_name = readSchema)]\n#[cfg(feature = \"reader\")]\npub fn read_schema(parquet_file: Vec<u8>) -> WasmResult<Schema> {\n    assert_parquet_file_not_empty(parquet_file.as_slice())?;\n    Ok(crate::reader::read_schema(parquet_file)?)\n}\n\n/// Write Arrow data to a Parquet file.\n///\n/// For example, to create a Parquet file with Snappy compression:\n///\n/// ```js\n/// import { tableToIPC } from \"apache-arrow\";\n/// // Edit the `parquet-wasm` import as necessary\n/// import initWasm, {\n///   Table,\n///   WriterPropertiesBuilder,\n///   Compression,\n///   writeParquet,\n/// } from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n///\n/// // Given an existing arrow JS table under `table`\n/// const wasmTable = Table.fromIPCStream(tableToIPC(table, \"stream\"));\n/// const writerProperties = new WriterPropertiesBuilder()\n///   .setCompression(Compression.SNAPPY)\n///   .build();\n/// const parquetUint8Array = writeParquet(wasmTable, writerProperties);\n/// ```\n///\n/// If `writerProperties` is not provided or is `null`, the default writer properties will be used.\n/// This is equivalent to `new WriterPropertiesBuilder().build()`.\n///\n/// @param table A {@linkcode Table} representation in WebAssembly memory.\n/// @param writer_properties (optional) Configuration for writing to Parquet. Use the {@linkcode\n/// WriterPropertiesBuilder} to build a writing configuration, then call `.build()` to create an\n/// immutable writer properties to pass in here.\n/// @returns Uint8Array containing written Parquet data.\n#[wasm_bindgen(js_name = writeParquet)]\n#[cfg(feature = \"writer\")]\npub fn write_parquet(\n    table: Table,\n    writer_properties: Option<crate::writer_properties::WriterProperties>,\n) -> WasmResult<Vec<u8>> {\n    let (schema, batches) = table.into_inner();\n    Ok(crate::writer::write_parquet(\n        batches.into_iter(),\n        schema,\n        writer_properties.unwrap_or_default(),\n    )?)\n}\n\n/// Read a Parquet file into a stream of Arrow `RecordBatch`es.\n///\n/// This returns a ReadableStream containing RecordBatches in WebAssembly memory. To transfer the\n/// Arrow table to JavaScript memory you have two options:\n///\n/// - (Easier): Call {@linkcode RecordBatch.intoIPCStream} to construct a buffer that can be parsed\n///   with Arrow JS's `tableFromIPC` function. (The table will have a single internal record\n///   batch).\n/// - (More performant but bleeding edge): Call {@linkcode RecordBatch.intoFFI} to construct a data\n///   representation that can be parsed zero-copy from WebAssembly with\n///   [arrow-js-ffi](https://github.com/kylebarron/arrow-js-ffi) using `parseRecordBatch`.\n///\n/// Example with IPC stream:\n///\n/// ```js\n/// import { tableFromIPC, Table } from \"apache-arrow\";\n/// import initWasm, {readParquetStream} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n///\n/// const stream = await readParquetStream(url);\n///\n/// const batches = [];\n/// for await (const wasmRecordBatch of stream) {\n///   const arrowTable = tableFromIPC(wasmRecordBatch.intoIPCStream());\n///   batches.push(...arrowTable.batches);\n/// }\n/// const table = new Table(batches);\n/// ```\n///\n/// Example with `arrow-js-ffi`:\n///\n/// ```js\n/// import { Table } from \"apache-arrow\";\n/// import { parseRecordBatch } from \"arrow-js-ffi\";\n/// import initWasm, {readParquetStream, wasmMemory} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n/// const WASM_MEMORY = wasmMemory();\n///\n/// const stream = await readParquetStream(url);\n///\n/// const batches = [];\n/// for await (const wasmRecordBatch of stream) {\n///   const ffiRecordBatch = wasmRecordBatch.intoFFI();\n///   const recordBatch = parseRecordBatch(\n///     WASM_MEMORY.buffer,\n///     ffiRecordBatch.arrayAddr(),\n///     ffiRecordBatch.schemaAddr(),\n///     true\n///   );\n///   batches.push(recordBatch);\n/// }\n/// const table = new Table(batches);\n/// ```\n///\n/// @param url URL to Parquet file\n#[wasm_bindgen(js_name = readParquetStream)]\n#[cfg(all(feature = \"reader\", feature = \"async\"))]\npub async fn read_parquet_stream(\n    url: String,\n    content_length: Option<usize>,\n) -> WasmResult<wasm_streams::readable::sys::ReadableStream> {\n    use futures::StreamExt;\n    let parquet_stream = crate::reader_async::read_record_batch_stream(url, content_length).await?;\n    let stream = parquet_stream.map(|maybe_record_batch| {\n        let record_batch = maybe_record_batch.unwrap();\n        Ok(RecordBatch::new(record_batch).into())\n    });\n    Ok(wasm_streams::ReadableStream::from_stream(stream).into_raw())\n}\n\n/// Transform a ReadableStream of RecordBatches to a ReadableStream of bytes\n///\n/// Browser example with piping to a file via the File System API:\n///\n/// ```js\n/// import initWasm, {ParquetFile, transformParquetStream} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n///\n/// const fileInstance = await ParquetFile.fromUrl(\"https://example.com/file.parquet\");\n/// const recordBatchStream = await fileInstance.stream();\n/// const serializedParquetStream = await transformParquetStream(recordBatchStream);\n/// // NB: requires transient user activation - you would typically do this before ☝️\n/// const handle = await window.showSaveFilePicker();\n/// const writable = await handle.createWritable();\n/// await serializedParquetStream.pipeTo(writable);\n/// ```\n///\n/// NodeJS (ESM) example with piping to a file:\n/// ```js\n/// import { open } from \"node:fs/promises\";\n/// import { Writable } from \"node:stream\";\n/// import initWasm, {ParquetFile, transformParquetStream} from \"parquet-wasm\";\n///\n/// // Instantiate the WebAssembly context\n/// await initWasm();\n///\n/// const fileInstance = await ParquetFile.fromUrl(\"https://example.com/file.parquet\");\n/// const recordBatchStream = await fileInstance.stream();\n/// const serializedParquetStream = await transformParquetStream(recordBatchStream);\n///\n/// // grab a file handle via fsPromises\n/// const handle = await open(\"file.parquet\");\n/// const destinationStream = Writable.toWeb(handle.createWriteStream());\n/// await serializedParquetStream.pipeTo(destinationStream);\n///\n/// ```\n/// NB: the above is a little contrived - `await writeFile(\"file.parquet\", serializedParquetStream)`\n/// is enough for most use cases.\n///\n/// Browser kitchen sink example - teeing to the Cache API, using as a streaming post body, transferring\n/// to a Web Worker:\n/// ```js\n/// // prelude elided - see above\n/// const serializedParquetStream = await transformParquetStream(recordBatchStream);\n/// const [cacheStream, bodyStream] = serializedParquetStream.tee();\n/// const postProm = fetch(targetUrl, {\n///     method: \"POST\",\n///     duplex: \"half\",\n///     body: bodyStream\n/// });\n/// const targetCache = await caches.open(\"foobar\");\n/// await targetCache.put(\"https://example.com/file.parquet\", new Response(cacheStream));\n/// // this could have been done with another tee, but beware of buffering\n/// const workerStream = await targetCache.get(\"https://example.com/file.parquet\").body;\n/// const worker = new Worker(\"worker.js\");\n/// worker.postMessage(workerStream, [workerStream]);\n/// await postProm;\n/// ```\n///\n/// @param stream A {@linkcode ReadableStream} of {@linkcode RecordBatch} instances\n/// @param writer_properties (optional) Configuration for writing to Parquet. Use the {@linkcode\n/// WriterPropertiesBuilder} to build a writing configuration, then call `.build()` to create an\n/// immutable writer properties to pass in here.\n/// @returns ReadableStream containing serialized Parquet data.\n#[wasm_bindgen(js_name = \"transformParquetStream\")]\n#[cfg(all(feature = \"writer\", feature = \"async\"))]\npub async fn transform_parquet_stream(\n    stream: wasm_streams::readable::sys::ReadableStream,\n    writer_properties: Option<crate::writer_properties::WriterProperties>,\n) -> WasmResult<wasm_streams::readable::sys::ReadableStream> {\n    use futures::{StreamExt, TryStreamExt};\n    use wasm_bindgen::convert::TryFromJsValue;\n\n    use crate::error::ParquetWasmError;\n    let batches = wasm_streams::ReadableStream::from_raw(stream)\n        .into_stream()\n        .map(|maybe_chunk| {\n            let chunk = maybe_chunk?;\n            arrow_wasm::RecordBatch::try_from_js_value(chunk)\n        })\n        .map_err(ParquetWasmError::DynCastingError);\n    let output_stream = super::writer_async::transform_parquet_stream(\n        batches,\n        writer_properties.unwrap_or_default(),\n    )\n    .await;\n    Ok(output_stream?)\n}\n"
  },
  {
    "path": "src/writer.rs",
    "content": "use crate::error::Result;\nuse arrow::datatypes::SchemaRef;\nuse arrow::record_batch::RecordBatch;\nuse parquet::arrow::arrow_writer::ArrowWriter;\n\n/// Internal function to write a buffer of data in Arrow IPC Stream format to a Parquet file using\n/// the arrow and parquet crates\npub fn write_parquet(\n    batches: impl Iterator<Item = RecordBatch>,\n    schema: SchemaRef,\n    writer_properties: crate::writer_properties::WriterProperties,\n) -> Result<Vec<u8>> {\n    // Create Parquet writer\n    let mut output_file: Vec<u8> = vec![];\n    let mut writer =\n        ArrowWriter::try_new(&mut output_file, schema, Some(writer_properties.into()))?;\n\n    // Iterate over IPC chunks, writing each batch to Parquet\n    for record_batch in batches {\n        writer.write(&record_batch)?;\n    }\n\n    writer.close()?;\n\n    Ok(output_file)\n}\n"
  },
  {
    "path": "src/writer_async.rs",
    "content": "use crate::common::stream::WrappedWritableStream;\nuse crate::error::{ParquetWasmError, Result};\nuse async_compat::CompatExt;\nuse futures::StreamExt;\nuse futures::channel::oneshot;\nuse parquet::arrow::async_writer::AsyncArrowWriter;\nuse wasm_bindgen_futures::spawn_local;\n\npub async fn transform_parquet_stream(\n    batches: impl futures::Stream<Item = Result<arrow_wasm::RecordBatch>> + 'static,\n    writer_properties: crate::writer_properties::WriterProperties,\n) -> Result<wasm_streams::readable::sys::ReadableStream> {\n    let options = Some(writer_properties.into());\n\n    let raw_stream = wasm_streams::transform::sys::TransformStream::new();\n    if let Ok(raw_stream) = raw_stream {\n        let (writable_stream, output_stream) = {\n            let raw_writable = raw_stream.writable();\n            let inner_writer =\n                wasm_streams::WritableStream::from_raw(raw_writable).into_async_write();\n            let writable_stream = WrappedWritableStream {\n                stream: inner_writer,\n            };\n            (writable_stream, raw_stream.readable())\n        };\n        // construct a channel for the purposes of signalling errors occuring at the start of the stream.\n        // Errors that occur during writing will have to fuse the stream.\n        let (sender, receiver) = oneshot::channel::<Result<()>>();\n        spawn_local(async move {\n            let adapted_stream = batches.peekable();\n            let mut pinned_stream = std::pin::pin!(adapted_stream);\n            let first_batch = pinned_stream.as_mut().peek().await;\n            if let Some(Ok(first_batch)) = first_batch {\n                let schema = first_batch.schema().into_inner();\n                let writer = AsyncArrowWriter::try_new(writable_stream.compat(), schema, options);\n                match writer {\n                    Ok(mut writer) => {\n                        // unblock the calling thread's receiver (indicating that stream initialization was error-free)\n                        let _ = sender.send(Ok(()));\n                        while let Some(batch) = pinned_stream.next().await {\n                            if let Ok(batch) = batch {\n                                let _ = writer.write(&batch.into()).await;\n                            }\n                        }\n                        let _ = writer.close().await;\n                    }\n                    Err(err) => {\n                        let _ = sender.send(Err(ParquetWasmError::ParquetError(Box::new(err))));\n                    }\n                }\n            } else if let Some(Err(err)) = first_batch {\n                let _ = sender.send(Err(ParquetWasmError::DynCastingError(\n                    err.to_string().into(),\n                )));\n            } else {\n                let _ = sender.send(Err(ParquetWasmError::DynCastingError(\n                    \"null first batch\".to_string().into(),\n                )));\n            }\n        });\n        match receiver.await.unwrap() {\n            Ok(()) => Ok(output_stream),\n            Err(err) => Err(err),\n        }\n    } else {\n        Err(ParquetWasmError::PlatformSupportError(\n            \"Failed to create TransformStream\".to_string(),\n        ))\n    }\n}\n"
  },
  {
    "path": "src/writer_properties.rs",
    "content": "use std::collections::HashMap;\n\nuse crate::common::properties::{Compression, Encoding, WriterVersion};\nuse crate::error::WasmResult;\nuse parquet::file::metadata::KeyValue;\nuse wasm_bindgen::prelude::*;\n\n/// Controls the level of statistics to be computed by the writer\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n#[wasm_bindgen]\npub enum EnabledStatistics {\n    /// Compute no statistics\n    None,\n    /// Compute chunk-level statistics but not page-level\n    Chunk,\n    /// Compute page-level and chunk-level statistics\n    Page,\n}\n\nimpl From<EnabledStatistics> for parquet::file::properties::EnabledStatistics {\n    fn from(statistics: EnabledStatistics) -> Self {\n        match statistics {\n            EnabledStatistics::None => parquet::file::properties::EnabledStatistics::None,\n            EnabledStatistics::Chunk => parquet::file::properties::EnabledStatistics::Chunk,\n            EnabledStatistics::Page => parquet::file::properties::EnabledStatistics::Page,\n        }\n    }\n}\n\n/// Immutable struct to hold writing configuration for `writeParquet`.\n///\n/// Use {@linkcode WriterPropertiesBuilder} to create a configuration, then call {@linkcode\n/// WriterPropertiesBuilder.build} to create an instance of `WriterProperties`.\n#[wasm_bindgen]\npub struct WriterProperties(parquet::file::properties::WriterProperties);\n\nimpl From<WriterProperties> for parquet::file::properties::WriterProperties {\n    fn from(props: WriterProperties) -> Self {\n        props.0\n    }\n}\n\nimpl Default for WriterProperties {\n    fn default() -> Self {\n        WriterPropertiesBuilder::default().build()\n    }\n}\n\n#[wasm_bindgen(typescript_custom_section)]\nconst TS_FieldMetadata: &'static str = r#\"\nexport type KeyValueMetadata = Map<string, string>;\n\"#;\n\n#[wasm_bindgen]\nextern \"C\" {\n    /// Key value metadata\n    #[wasm_bindgen(typescript_type = \"KeyValueMetadata\")]\n    pub type KeyValueMetadata;\n}\n\n/// Builder to create a writing configuration for `writeParquet`\n///\n/// Call {@linkcode build} on the finished builder to create an immputable {@linkcode WriterProperties} to pass to `writeParquet`\n#[wasm_bindgen]\npub struct WriterPropertiesBuilder(parquet::file::properties::WriterPropertiesBuilder);\n\n#[wasm_bindgen]\nimpl WriterPropertiesBuilder {\n    /// Returns default state of the builder.\n    #[wasm_bindgen(constructor)]\n    pub fn new() -> WriterPropertiesBuilder {\n        WriterPropertiesBuilder(parquet::file::properties::WriterProperties::builder())\n    }\n\n    /// Finalizes the configuration and returns immutable writer properties struct.\n    #[wasm_bindgen]\n    pub fn build(self) -> WriterProperties {\n        WriterProperties(self.0.build())\n    }\n\n    // ----------------------------------------------------------------------\n    // Writer properties related to a file\n\n    /// Sets writer version.\n    #[wasm_bindgen(js_name = setWriterVersion)]\n    pub fn set_writer_version(self, value: WriterVersion) -> Self {\n        Self(self.0.set_writer_version(value.into()))\n    }\n\n    /// Sets data page size limit.\n    #[wasm_bindgen(js_name = setDataPageSizeLimit)]\n    pub fn set_data_page_size_limit(self, value: usize) -> Self {\n        Self(self.0.set_data_page_size_limit(value))\n    }\n\n    /// Sets dictionary page size limit.\n    #[wasm_bindgen(js_name = setDictionaryPageSizeLimit)]\n    pub fn set_dictionary_page_size_limit(self, value: usize) -> Self {\n        Self(self.0.set_dictionary_page_size_limit(value))\n    }\n\n    /// Sets write batch size.\n    #[wasm_bindgen(js_name = setWriteBatchSize)]\n    pub fn set_write_batch_size(self, value: usize) -> Self {\n        Self(self.0.set_write_batch_size(value))\n    }\n\n    /// Sets maximum number of rows in a row group.\n    #[wasm_bindgen(js_name = setMaxRowGroupSize)]\n    pub fn set_max_row_group_size(self, value: usize) -> Self {\n        Self(self.0.set_max_row_group_size(value))\n    }\n\n    /// Sets \"created by\" property.\n    #[wasm_bindgen(js_name = setCreatedBy)]\n    pub fn set_created_by(self, value: String) -> Self {\n        Self(self.0.set_created_by(value))\n    }\n\n    /// Sets \"key_value_metadata\" property.\n    #[wasm_bindgen(js_name = setKeyValueMetadata)]\n    pub fn set_key_value_metadata(\n        self,\n        value: KeyValueMetadata,\n    ) -> WasmResult<WriterPropertiesBuilder> {\n        let options: Option<HashMap<String, String>> = serde_wasm_bindgen::from_value(value.obj)?;\n        let kv_options = options.map(|options| {\n            options\n                .iter()\n                .map(|(k, v)| KeyValue::new(k.clone(), Some(v.clone())))\n                .collect()\n        });\n        Ok(Self(self.0.set_key_value_metadata(kv_options)))\n    }\n\n    // ----------------------------------------------------------------------\n    // Setters for any column (global)\n\n    /// Sets encoding for any column.\n    ///\n    /// If dictionary is not enabled, this is treated as a primary encoding for all\n    /// columns. In case when dictionary is enabled for any column, this value is\n    /// considered to be a fallback encoding for that column.\n    ///\n    /// Panics if user tries to set dictionary encoding here, regardless of dictionary\n    /// encoding flag being set.\n    #[wasm_bindgen(js_name = setEncoding)]\n    pub fn set_encoding(self, value: Encoding) -> Self {\n        Self(self.0.set_encoding(value.into()))\n    }\n\n    /// Sets compression codec for any column.\n    #[wasm_bindgen(js_name = setCompression)]\n    pub fn set_compression(self, value: Compression) -> Self {\n        Self(self.0.set_compression(value.into()))\n    }\n\n    /// Sets flag to enable/disable dictionary encoding for any column.\n    ///\n    /// Use this method to set dictionary encoding, instead of explicitly specifying\n    /// encoding in `set_encoding` method.\n    #[wasm_bindgen(js_name = setDictionaryEnabled)]\n    pub fn set_dictionary_enabled(self, value: bool) -> Self {\n        Self(self.0.set_dictionary_enabled(value))\n    }\n\n    /// Sets flag to enable/disable statistics for any column.\n    #[wasm_bindgen(js_name = setStatisticsEnabled)]\n    pub fn set_statistics_enabled(self, value: EnabledStatistics) -> Self {\n        Self(self.0.set_statistics_enabled(value.into()))\n    }\n\n    // ----------------------------------------------------------------------\n    // Setters for a specific column\n\n    /// Sets encoding for a column.\n    /// Takes precedence over globally defined settings.\n    ///\n    /// If dictionary is not enabled, this is treated as a primary encoding for this\n    /// column. In case when dictionary is enabled for this column, either through\n    /// global defaults or explicitly, this value is considered to be a fallback\n    /// encoding for this column.\n    ///\n    /// Panics if user tries to set dictionary encoding here, regardless of dictionary\n    /// encoding flag being set.\n    #[wasm_bindgen(js_name = setColumnEncoding)]\n    pub fn set_column_encoding(self, col: String, value: Encoding) -> Self {\n        let column_path = parquet::schema::types::ColumnPath::from(col);\n        Self(self.0.set_column_encoding(column_path, value.into()))\n    }\n\n    /// Sets compression codec for a column.\n    /// Takes precedence over globally defined settings.\n    #[wasm_bindgen(js_name = setColumnCompression)]\n    pub fn set_column_compression(self, col: String, value: Compression) -> Self {\n        let column_path = parquet::schema::types::ColumnPath::from(col);\n        Self(self.0.set_column_compression(column_path, value.into()))\n    }\n\n    /// Sets flag to enable/disable dictionary encoding for a column.\n    /// Takes precedence over globally defined settings.\n    #[wasm_bindgen(js_name = setColumnDictionaryEnabled)]\n    pub fn set_column_dictionary_enabled(self, col: String, value: bool) -> Self {\n        let column_path = parquet::schema::types::ColumnPath::from(col);\n        Self(self.0.set_column_dictionary_enabled(column_path, value))\n    }\n\n    /// Sets flag to enable/disable statistics for a column.\n    /// Takes precedence over globally defined settings.\n    #[wasm_bindgen(js_name = setColumnStatisticsEnabled)]\n    pub fn set_column_statistics_enabled(self, col: String, value: EnabledStatistics) -> Self {\n        let column_path = parquet::schema::types::ColumnPath::from(col);\n        Self(\n            self.0\n                .set_column_statistics_enabled(column_path, value.into()),\n        )\n    }\n}\n\nimpl Default for WriterPropertiesBuilder {\n    fn default() -> Self {\n        WriterPropertiesBuilder::new()\n    }\n}\n"
  },
  {
    "path": "templates/package.json",
    "content": "{\n  \"name\": \"parquet-wasm\",\n  \"collaborators\": [\n    \"Kyle Barron <kylebarron2@gmail.com>\"\n  ],\n  \"description\": \"WebAssembly Parquet reader and writer.\",\n  \"license\": \"MIT OR Apache-2.0\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/kylebarron/parquet-wasm\"\n  },\n  \"files\": [\n    \"*\"\n  ],\n  \"module\": \"bundler/parquet_wasm.js\",\n  \"types\": \"bundler/parquet_wasm.d.ts\",\n  \"sideEffects\": [],\n  \"keywords\": [\n    \"parquet\",\n    \"webassembly\",\n    \"arrow\"\n  ],\n  \"$comment\": \"We export ./esm/parquet_wasm.js so that code can work the same bundled and directly on the frontend\",\n  \"exports\": {\n    \"./bundler/parquet_wasm_bg.wasm\": \"./bundler/parquet_wasm_bg.wasm\",\n    \"./esm/parquet_wasm_bg.wasm\": \"./esm/parquet_wasm_bg.wasm\",\n    \"./node/parquet_wasm_bg.wasm\": \"./node/parquet_wasm_bg.wasm\",\n    \"./bundler\": {\n      \"types\": \"./bundler/parquet_wasm.d.ts\",\n      \"default\": \"./bundler/parquet_wasm.js\"\n    },\n    \"./esm\": {\n      \"types\": \"./esm/parquet_wasm.d.ts\",\n      \"default\": \"./esm/parquet_wasm.js\"\n    },\n    \"./node\": {\n      \"types\": \"./node/parquet_wasm.d.ts\",\n      \"default\": \"./node/parquet_wasm.js\"\n    },\n    \"./esm/parquet_wasm.js\": {\n      \"types\": \"./esm/parquet_wasm.d.ts\",\n      \"default\": \"./esm/parquet_wasm.js\"\n    },\n    \".\": {\n      \"node\": {\n        \"types\": \"./node/parquet_wasm.d.ts\",\n        \"default\": \"./node/parquet_wasm.js\"\n      },\n      \"types\": \"./esm/parquet_wasm.d.ts\",\n      \"default\": \"./esm/parquet_wasm.js\"\n    }\n  }\n}\n"
  },
  {
    "path": "tests/data/.python-version",
    "content": "3.12\n"
  },
  {
    "path": "tests/data/README.md",
    "content": "To create test data:\n\n```\nuv run python generate_data.py\n```\n"
  },
  {
    "path": "tests/data/generate_data.py",
    "content": "import pandas as pd\nimport pyarrow as pa\nimport pyarrow.feather as feather\nimport pyarrow.parquet as pq\n\ncompressions = [\"SNAPPY\", \"GZIP\", \"BROTLI\", \"LZ4\", \"ZSTD\", \"NONE\"]\n\n\ndef create_data():\n    data = {\n        \"str\": pa.array([\"a\", \"b\", \"c\", \"d\"], type=pa.string()),\n        \"uint8\": pa.array([1, 2, 3, 4], type=pa.uint8()),\n        \"int32\": pa.array([0, -2147483638, 2147483637, 1], type=pa.int32()),\n        \"bool\": pa.array([True, True, False, False], type=pa.bool_()),\n    }\n    return pa.table(data)\n\n\ndef write_data(table):\n    feather.write_feather(table, \"data.arrow\", compression=\"uncompressed\")\n\n    data_len = len(table)\n\n    for n_partitions in [1, 2]:\n        for compression in compressions:\n            row_group_size = data_len / n_partitions\n            compression_text = str(compression).lower()\n            fname = f\"{n_partitions}-partition-{compression_text}.parquet\"\n            pq.write_table(\n                table, fname, row_group_size=row_group_size, compression=compression\n            )\n\n\ndef write_empty_table():\n    pd.DataFrame().to_parquet(\"empty.parquet\")\n\n\ndef create_string_view_table():\n    data = {\n        \"string_view\": pa.array([\"a\", \"b\", \"c\", \"d\"], type=pa.string_view()),\n        \"binary_view\": pa.array([b\"a\", b\"b\", b\"c\", b\"d\"], type=pa.binary_view()),\n    }\n    return pa.table(data)\n\n\ndef write_string_view_table():\n    table = create_string_view_table()\n    pq.write_table(table, \"string_view.parquet\", compression=\"snappy\")\n\n\ndef main():\n    table = create_data()\n    write_data(table)\n    write_empty_table()\n    write_string_view_table()\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "tests/data/generate_geo_data.py",
    "content": "import json\n\nimport geopandas as gpd\nimport pyarrow as pa\nimport pyarrow.parquet as pq\nimport pygeos\nfrom geopandas.io.arrow import _create_metadata\n\ngdf = gpd.read_file(gpd.datasets.get_path(\"naturalearth_cities\"))\n\ngdf.to_parquet(\"naturalearth_cities_wkb.parquet\", index=None)\n\n\ndef construct_geoarrow_table(gdf: gpd.GeoDataFrame) -> pa.Table:\n    # Note in this quick example we omit metadata on the table header\n    non_geo_cols = [col for col in gdf.columns if col != gdf.geometry.name]\n    table = pa.Table.from_pandas(gdf[non_geo_cols])\n    pygeos_array = pygeos.from_shapely(gdf.geometry.values)\n    coords = pygeos.get_coordinates(pygeos_array)\n    parr = pa.FixedSizeListArray.from_arrays(coords.flat, 2)\n    geo_metadata = _create_metadata(gdf)\n    geo_metadata[\"columns\"][gdf._geometry_column_name][\"encoding\"] = \"geoarrow\"\n    table_with_geom = table.append_column(\"geometry\", parr)\n    metadata = table_with_geom.schema.metadata\n    metadata.update({b\"geo\": json.dumps(geo_metadata).encode()})\n    return table_with_geom.replace_schema_metadata(metadata)\n\n\ngdf_arrow_encoding = construct_geoarrow_table(gdf)\npq.write_table(\n    gdf_arrow_encoding, \"naturalearth_cities_geoarrow.parquet\", compression=\"snappy\"\n)\n"
  },
  {
    "path": "tests/data/pyproject.toml",
    "content": "[project]\nname = \"generate-test-data\"\nversion = \"0.1.0\"\ndescription = \"Add your description here\"\nreadme = \"README.md\"\nrequires-python = \">=3.12\"\ndependencies = [\n    \"pandas>=2.3.2\",\n    \"pyarrow>=21.0.0\",\n]\n\n[dependency-groups]\ndev = [\n    \"ipykernel>=6.30.1\",\n]\n"
  },
  {
    "path": "tests/data/uv.lock",
    "content": "version = 1\nrevision = 3\nrequires-python = \">=3.12\"\n\n[[package]]\nname = \"appnope\"\nversion = \"0.1.4\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz\", hash = \"sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee\", size = 4170, upload-time = \"2024-02-06T09:43:11.258Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl\", hash = \"sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c\", size = 4321, upload-time = \"2024-02-06T09:43:09.663Z\" },\n]\n\n[[package]]\nname = \"asttokens\"\nversion = \"3.0.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz\", hash = \"sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7\", size = 61978, upload-time = \"2024-11-30T04:30:14.439Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl\", hash = \"sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2\", size = 26918, upload-time = \"2024-11-30T04:30:10.946Z\" },\n]\n\n[[package]]\nname = \"cffi\"\nversion = \"2.0.0\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"pycparser\", marker = \"implementation_name != 'PyPy'\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz\", hash = \"sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529\", size = 523588, upload-time = \"2025-09-08T23:24:04.541Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d\", size = 185271, upload-time = \"2025-09-08T23:22:44.795Z\" },\n    { url = \"https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c\", size = 181048, upload-time = \"2025-09-08T23:22:45.938Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl\", hash = \"sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe\", size = 212529, upload-time = \"2025-09-08T23:22:47.349Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl\", hash = \"sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062\", size = 220097, upload-time = \"2025-09-08T23:22:48.677Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl\", hash = \"sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e\", size = 207983, upload-time = \"2025-09-08T23:22:50.06Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl\", hash = \"sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037\", size = 206519, upload-time = \"2025-09-08T23:22:51.364Z\" },\n    { url = \"https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl\", hash = \"sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba\", size = 219572, upload-time = \"2025-09-08T23:22:52.902Z\" },\n    { url = \"https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94\", size = 222963, upload-time = \"2025-09-08T23:22:54.518Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187\", size = 221361, upload-time = \"2025-09-08T23:22:55.867Z\" },\n    { url = \"https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl\", hash = \"sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18\", size = 172932, upload-time = \"2025-09-08T23:22:57.188Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5\", size = 183557, upload-time = \"2025-09-08T23:22:58.351Z\" },\n    { url = \"https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl\", hash = \"sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6\", size = 177762, upload-time = \"2025-09-08T23:22:59.668Z\" },\n    { url = \"https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb\", size = 185230, upload-time = \"2025-09-08T23:23:00.879Z\" },\n    { url = \"https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca\", size = 181043, upload-time = \"2025-09-08T23:23:02.231Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl\", hash = \"sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b\", size = 212446, upload-time = \"2025-09-08T23:23:03.472Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl\", hash = \"sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b\", size = 220101, upload-time = \"2025-09-08T23:23:04.792Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl\", hash = \"sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2\", size = 207948, upload-time = \"2025-09-08T23:23:06.127Z\" },\n    { url = \"https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl\", hash = \"sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3\", size = 206422, upload-time = \"2025-09-08T23:23:07.753Z\" },\n    { url = \"https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl\", hash = \"sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26\", size = 219499, upload-time = \"2025-09-08T23:23:09.648Z\" },\n    { url = \"https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c\", size = 222928, upload-time = \"2025-09-08T23:23:10.928Z\" },\n    { url = \"https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b\", size = 221302, upload-time = \"2025-09-08T23:23:12.42Z\" },\n    { url = \"https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl\", hash = \"sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27\", size = 172909, upload-time = \"2025-09-08T23:23:14.32Z\" },\n    { url = \"https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75\", size = 183402, upload-time = \"2025-09-08T23:23:15.535Z\" },\n    { url = \"https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl\", hash = \"sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91\", size = 177780, upload-time = \"2025-09-08T23:23:16.761Z\" },\n    { url = \"https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl\", hash = \"sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5\", size = 185320, upload-time = \"2025-09-08T23:23:18.087Z\" },\n    { url = \"https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl\", hash = \"sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13\", size = 181487, upload-time = \"2025-09-08T23:23:19.622Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl\", hash = \"sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b\", size = 220049, upload-time = \"2025-09-08T23:23:20.853Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl\", hash = \"sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c\", size = 207793, upload-time = \"2025-09-08T23:23:22.08Z\" },\n    { url = \"https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl\", hash = \"sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef\", size = 206300, upload-time = \"2025-09-08T23:23:23.314Z\" },\n    { url = \"https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl\", hash = \"sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775\", size = 219244, upload-time = \"2025-09-08T23:23:24.541Z\" },\n    { url = \"https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl\", hash = \"sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205\", size = 222828, upload-time = \"2025-09-08T23:23:26.143Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl\", hash = \"sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1\", size = 220926, upload-time = \"2025-09-08T23:23:27.873Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl\", hash = \"sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f\", size = 175328, upload-time = \"2025-09-08T23:23:44.61Z\" },\n    { url = \"https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl\", hash = \"sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25\", size = 185650, upload-time = \"2025-09-08T23:23:45.848Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl\", hash = \"sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad\", size = 180687, upload-time = \"2025-09-08T23:23:47.105Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl\", hash = \"sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9\", size = 188773, upload-time = \"2025-09-08T23:23:29.347Z\" },\n    { url = \"https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl\", hash = \"sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d\", size = 185013, upload-time = \"2025-09-08T23:23:30.63Z\" },\n    { url = \"https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl\", hash = \"sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c\", size = 221593, upload-time = \"2025-09-08T23:23:31.91Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl\", hash = \"sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8\", size = 209354, upload-time = \"2025-09-08T23:23:33.214Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl\", hash = \"sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc\", size = 208480, upload-time = \"2025-09-08T23:23:34.495Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl\", hash = \"sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592\", size = 221584, upload-time = \"2025-09-08T23:23:36.096Z\" },\n    { url = \"https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl\", hash = \"sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512\", size = 224443, upload-time = \"2025-09-08T23:23:37.328Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl\", hash = \"sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4\", size = 223437, upload-time = \"2025-09-08T23:23:38.945Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl\", hash = \"sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e\", size = 180487, upload-time = \"2025-09-08T23:23:40.423Z\" },\n    { url = \"https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl\", hash = \"sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6\", size = 191726, upload-time = \"2025-09-08T23:23:41.742Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl\", hash = \"sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9\", size = 184195, upload-time = \"2025-09-08T23:23:43.004Z\" },\n]\n\n[[package]]\nname = \"colorama\"\nversion = \"0.4.6\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz\", hash = \"sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44\", size = 27697, upload-time = \"2022-10-25T02:36:22.414Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl\", hash = \"sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6\", size = 25335, upload-time = \"2022-10-25T02:36:20.889Z\" },\n]\n\n[[package]]\nname = \"comm\"\nversion = \"0.2.3\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz\", hash = \"sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971\", size = 6319, upload-time = \"2025-07-25T14:02:04.452Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl\", hash = \"sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417\", size = 7294, upload-time = \"2025-07-25T14:02:02.896Z\" },\n]\n\n[[package]]\nname = \"debugpy\"\nversion = \"1.8.16\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/ca/d4/722d0bcc7986172ac2ef3c979ad56a1030e3afd44ced136d45f8142b1f4a/debugpy-1.8.16.tar.gz\", hash = \"sha256:31e69a1feb1cf6b51efbed3f6c9b0ef03bc46ff050679c4be7ea6d2e23540870\", size = 1643809, upload-time = \"2025-08-06T18:00:02.647Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/61/fb/0387c0e108d842c902801bc65ccc53e5b91d8c169702a9bbf4f7efcedf0c/debugpy-1.8.16-cp312-cp312-macosx_14_0_universal2.whl\", hash = \"sha256:b202e2843e32e80b3b584bcebfe0e65e0392920dc70df11b2bfe1afcb7a085e4\", size = 2511822, upload-time = \"2025-08-06T18:00:18.526Z\" },\n    { url = \"https://files.pythonhosted.org/packages/37/44/19e02745cae22bf96440141f94e15a69a1afaa3a64ddfc38004668fcdebf/debugpy-1.8.16-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:64473c4a306ba11a99fe0bb14622ba4fbd943eb004847d9b69b107bde45aa9ea\", size = 4230135, upload-time = \"2025-08-06T18:00:19.997Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f3/0b/19b1ba5ee4412f303475a2c7ad5858efb99c90eae5ec627aa6275c439957/debugpy-1.8.16-cp312-cp312-win32.whl\", hash = \"sha256:833a61ed446426e38b0dd8be3e9d45ae285d424f5bf6cd5b2b559c8f12305508\", size = 5281271, upload-time = \"2025-08-06T18:00:21.281Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b1/e0/bc62e2dc141de53bd03e2c7cb9d7011de2e65e8bdcdaa26703e4d28656ba/debugpy-1.8.16-cp312-cp312-win_amd64.whl\", hash = \"sha256:75f204684581e9ef3dc2f67687c3c8c183fde2d6675ab131d94084baf8084121\", size = 5323149, upload-time = \"2025-08-06T18:00:23.033Z\" },\n    { url = \"https://files.pythonhosted.org/packages/62/66/607ab45cc79e60624df386e233ab64a6d8d39ea02e7f80e19c1d451345bb/debugpy-1.8.16-cp313-cp313-macosx_14_0_universal2.whl\", hash = \"sha256:85df3adb1de5258dca910ae0bb185e48c98801ec15018a263a92bb06be1c8787\", size = 2496157, upload-time = \"2025-08-06T18:00:24.361Z\" },\n    { url = \"https://files.pythonhosted.org/packages/4d/a0/c95baae08a75bceabb79868d663a0736655e427ab9c81fb848da29edaeac/debugpy-1.8.16-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:bee89e948bc236a5c43c4214ac62d28b29388453f5fd328d739035e205365f0b\", size = 4222491, upload-time = \"2025-08-06T18:00:25.806Z\" },\n    { url = \"https://files.pythonhosted.org/packages/5b/2f/1c8db6ddd8a257c3cd2c46413b267f1d5fa3df910401c899513ce30392d6/debugpy-1.8.16-cp313-cp313-win32.whl\", hash = \"sha256:cf358066650439847ec5ff3dae1da98b5461ea5da0173d93d5e10f477c94609a\", size = 5281126, upload-time = \"2025-08-06T18:00:27.207Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d3/ba/c3e154ab307366d6c5a9c1b68de04914e2ce7fa2f50d578311d8cc5074b2/debugpy-1.8.16-cp313-cp313-win_amd64.whl\", hash = \"sha256:b5aea1083f6f50023e8509399d7dc6535a351cc9f2e8827d1e093175e4d9fa4c\", size = 5323094, upload-time = \"2025-08-06T18:00:29.03Z\" },\n    { url = \"https://files.pythonhosted.org/packages/52/57/ecc9ae29fa5b2d90107cd1d9bf8ed19aacb74b2264d986ae9d44fe9bdf87/debugpy-1.8.16-py2.py3-none-any.whl\", hash = \"sha256:19c9521962475b87da6f673514f7fd610328757ec993bf7ec0d8c96f9a325f9e\", size = 5287700, upload-time = \"2025-08-06T18:00:42.333Z\" },\n]\n\n[[package]]\nname = \"decorator\"\nversion = \"5.2.1\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz\", hash = \"sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360\", size = 56711, upload-time = \"2025-02-24T04:41:34.073Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl\", hash = \"sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a\", size = 9190, upload-time = \"2025-02-24T04:41:32.565Z\" },\n]\n\n[[package]]\nname = \"executing\"\nversion = \"2.2.1\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz\", hash = \"sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4\", size = 1129488, upload-time = \"2025-09-01T09:48:10.866Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl\", hash = \"sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017\", size = 28317, upload-time = \"2025-09-01T09:48:08.5Z\" },\n]\n\n[[package]]\nname = \"generate-test-data\"\nversion = \"0.1.0\"\nsource = { virtual = \".\" }\ndependencies = [\n    { name = \"pandas\" },\n    { name = \"pyarrow\" },\n]\n\n[package.dev-dependencies]\ndev = [\n    { name = \"ipykernel\" },\n]\n\n[package.metadata]\nrequires-dist = [\n    { name = \"pandas\", specifier = \">=2.3.2\" },\n    { name = \"pyarrow\", specifier = \">=21.0.0\" },\n]\n\n[package.metadata.requires-dev]\ndev = [{ name = \"ipykernel\", specifier = \">=6.30.1\" }]\n\n[[package]]\nname = \"ipykernel\"\nversion = \"6.30.1\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"appnope\", marker = \"sys_platform == 'darwin'\" },\n    { name = \"comm\" },\n    { name = \"debugpy\" },\n    { name = \"ipython\" },\n    { name = \"jupyter-client\" },\n    { name = \"jupyter-core\" },\n    { name = \"matplotlib-inline\" },\n    { name = \"nest-asyncio\" },\n    { name = \"packaging\" },\n    { name = \"psutil\" },\n    { name = \"pyzmq\" },\n    { name = \"tornado\" },\n    { name = \"traitlets\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/bb/76/11082e338e0daadc89c8ff866185de11daf67d181901038f9e139d109761/ipykernel-6.30.1.tar.gz\", hash = \"sha256:6abb270161896402e76b91394fcdce5d1be5d45f456671e5080572f8505be39b\", size = 166260, upload-time = \"2025-08-04T15:47:35.018Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/fc/c7/b445faca8deb954fe536abebff4ece5b097b923de482b26e78448c89d1dd/ipykernel-6.30.1-py3-none-any.whl\", hash = \"sha256:aa6b9fb93dca949069d8b85b6c79b2518e32ac583ae9c7d37c51d119e18b3fb4\", size = 117484, upload-time = \"2025-08-04T15:47:32.622Z\" },\n]\n\n[[package]]\nname = \"ipython\"\nversion = \"9.5.0\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"colorama\", marker = \"sys_platform == 'win32'\" },\n    { name = \"decorator\" },\n    { name = \"ipython-pygments-lexers\" },\n    { name = \"jedi\" },\n    { name = \"matplotlib-inline\" },\n    { name = \"pexpect\", marker = \"sys_platform != 'emscripten' and sys_platform != 'win32'\" },\n    { name = \"prompt-toolkit\" },\n    { name = \"pygments\" },\n    { name = \"stack-data\" },\n    { name = \"traitlets\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/6e/71/a86262bf5a68bf211bcc71fe302af7e05f18a2852fdc610a854d20d085e6/ipython-9.5.0.tar.gz\", hash = \"sha256:129c44b941fe6d9b82d36fc7a7c18127ddb1d6f02f78f867f402e2e3adde3113\", size = 4389137, upload-time = \"2025-08-29T12:15:21.519Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/08/2a/5628a99d04acb2d2f2e749cdf4ea571d2575e898df0528a090948018b726/ipython-9.5.0-py3-none-any.whl\", hash = \"sha256:88369ffa1d5817d609120daa523a6da06d02518e582347c29f8451732a9c5e72\", size = 612426, upload-time = \"2025-08-29T12:15:18.866Z\" },\n]\n\n[[package]]\nname = \"ipython-pygments-lexers\"\nversion = \"1.1.1\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"pygments\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz\", hash = \"sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81\", size = 8393, upload-time = \"2025-01-17T11:24:34.505Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl\", hash = \"sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c\", size = 8074, upload-time = \"2025-01-17T11:24:33.271Z\" },\n]\n\n[[package]]\nname = \"jedi\"\nversion = \"0.19.2\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"parso\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz\", hash = \"sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0\", size = 1231287, upload-time = \"2024-11-11T01:41:42.873Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl\", hash = \"sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9\", size = 1572278, upload-time = \"2024-11-11T01:41:40.175Z\" },\n]\n\n[[package]]\nname = \"jupyter-client\"\nversion = \"8.6.3\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"jupyter-core\" },\n    { name = \"python-dateutil\" },\n    { name = \"pyzmq\" },\n    { name = \"tornado\" },\n    { name = \"traitlets\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz\", hash = \"sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419\", size = 342019, upload-time = \"2024-09-17T10:44:17.613Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl\", hash = \"sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f\", size = 106105, upload-time = \"2024-09-17T10:44:15.218Z\" },\n]\n\n[[package]]\nname = \"jupyter-core\"\nversion = \"5.8.1\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"platformdirs\" },\n    { name = \"pywin32\", marker = \"platform_python_implementation != 'PyPy' and sys_platform == 'win32'\" },\n    { name = \"traitlets\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz\", hash = \"sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941\", size = 88923, upload-time = \"2025-05-27T07:38:16.655Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl\", hash = \"sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0\", size = 28880, upload-time = \"2025-05-27T07:38:15.137Z\" },\n]\n\n[[package]]\nname = \"matplotlib-inline\"\nversion = \"0.1.7\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"traitlets\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz\", hash = \"sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90\", size = 8159, upload-time = \"2024-04-15T13:44:44.803Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl\", hash = \"sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca\", size = 9899, upload-time = \"2024-04-15T13:44:43.265Z\" },\n]\n\n[[package]]\nname = \"nest-asyncio\"\nversion = \"1.6.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz\", hash = \"sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe\", size = 7418, upload-time = \"2024-01-21T14:25:19.227Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl\", hash = \"sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c\", size = 5195, upload-time = \"2024-01-21T14:25:17.223Z\" },\n]\n\n[[package]]\nname = \"numpy\"\nversion = \"2.3.3\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz\", hash = \"sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029\", size = 20576648, upload-time = \"2025-09-09T16:54:12.543Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf\", size = 20957014, upload-time = \"2025-09-09T15:56:29.966Z\" },\n    { url = \"https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25\", size = 14185220, upload-time = \"2025-09-09T15:56:32.175Z\" },\n    { url = \"https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl\", hash = \"sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe\", size = 5113918, upload-time = \"2025-09-09T15:56:34.175Z\" },\n    { url = \"https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl\", hash = \"sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b\", size = 6647922, upload-time = \"2025-09-09T15:56:36.149Z\" },\n    { url = \"https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8\", size = 14281991, upload-time = \"2025-09-09T15:56:40.548Z\" },\n    { url = \"https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20\", size = 16641643, upload-time = \"2025-09-09T15:56:43.343Z\" },\n    { url = \"https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea\", size = 16056787, upload-time = \"2025-09-09T15:56:46.141Z\" },\n    { url = \"https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7\", size = 18579598, upload-time = \"2025-09-09T15:56:49.844Z\" },\n    { url = \"https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl\", hash = \"sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf\", size = 6320800, upload-time = \"2025-09-09T15:56:52.499Z\" },\n    { url = \"https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl\", hash = \"sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb\", size = 12786615, upload-time = \"2025-09-09T15:56:54.422Z\" },\n    { url = \"https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl\", hash = \"sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5\", size = 10195936, upload-time = \"2025-09-09T15:56:56.541Z\" },\n    { url = \"https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf\", size = 20949588, upload-time = \"2025-09-09T15:56:59.087Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7\", size = 14177802, upload-time = \"2025-09-09T15:57:01.73Z\" },\n    { url = \"https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl\", hash = \"sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6\", size = 5106537, upload-time = \"2025-09-09T15:57:03.765Z\" },\n    { url = \"https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl\", hash = \"sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7\", size = 6640743, upload-time = \"2025-09-09T15:57:07.921Z\" },\n    { url = \"https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c\", size = 14278881, upload-time = \"2025-09-09T15:57:11.349Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93\", size = 16636301, upload-time = \"2025-09-09T15:57:14.245Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae\", size = 16053645, upload-time = \"2025-09-09T15:57:16.534Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86\", size = 18578179, upload-time = \"2025-09-09T15:57:18.883Z\" },\n    { url = \"https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl\", hash = \"sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8\", size = 6312250, upload-time = \"2025-09-09T15:57:21.296Z\" },\n    { url = \"https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl\", hash = \"sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf\", size = 12783269, upload-time = \"2025-09-09T15:57:23.034Z\" },\n    { url = \"https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl\", hash = \"sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5\", size = 10195314, upload-time = \"2025-09-09T15:57:25.045Z\" },\n    { url = \"https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl\", hash = \"sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc\", size = 21048025, upload-time = \"2025-09-09T15:57:27.257Z\" },\n    { url = \"https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl\", hash = \"sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc\", size = 14301053, upload-time = \"2025-09-09T15:57:30.077Z\" },\n    { url = \"https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl\", hash = \"sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b\", size = 5229444, upload-time = \"2025-09-09T15:57:32.733Z\" },\n    { url = \"https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl\", hash = \"sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19\", size = 6738039, upload-time = \"2025-09-09T15:57:34.328Z\" },\n    { url = \"https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30\", size = 14352314, upload-time = \"2025-09-09T15:57:36.255Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e\", size = 16701722, upload-time = \"2025-09-09T15:57:38.622Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl\", hash = \"sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3\", size = 16132755, upload-time = \"2025-09-09T15:57:41.16Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl\", hash = \"sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea\", size = 18651560, upload-time = \"2025-09-09T15:57:43.459Z\" },\n    { url = \"https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl\", hash = \"sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd\", size = 6442776, upload-time = \"2025-09-09T15:57:45.793Z\" },\n    { url = \"https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl\", hash = \"sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d\", size = 12927281, upload-time = \"2025-09-09T15:57:47.492Z\" },\n    { url = \"https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl\", hash = \"sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1\", size = 10265275, upload-time = \"2025-09-09T15:57:49.647Z\" },\n    { url = \"https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl\", hash = \"sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593\", size = 20951527, upload-time = \"2025-09-09T15:57:52.006Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl\", hash = \"sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652\", size = 14186159, upload-time = \"2025-09-09T15:57:54.407Z\" },\n    { url = \"https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl\", hash = \"sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7\", size = 5114624, upload-time = \"2025-09-09T15:57:56.5Z\" },\n    { url = \"https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl\", hash = \"sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a\", size = 6642627, upload-time = \"2025-09-09T15:57:58.206Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe\", size = 14296926, upload-time = \"2025-09-09T15:58:00.035Z\" },\n    { url = \"https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421\", size = 16638958, upload-time = \"2025-09-09T15:58:02.738Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl\", hash = \"sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021\", size = 16071920, upload-time = \"2025-09-09T15:58:05.029Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl\", hash = \"sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf\", size = 18577076, upload-time = \"2025-09-09T15:58:07.745Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl\", hash = \"sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0\", size = 6366952, upload-time = \"2025-09-09T15:58:10.096Z\" },\n    { url = \"https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl\", hash = \"sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8\", size = 12919322, upload-time = \"2025-09-09T15:58:12.138Z\" },\n    { url = \"https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl\", hash = \"sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe\", size = 10478630, upload-time = \"2025-09-09T15:58:14.64Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl\", hash = \"sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00\", size = 21047987, upload-time = \"2025-09-09T15:58:16.889Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl\", hash = \"sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a\", size = 14301076, upload-time = \"2025-09-09T15:58:20.343Z\" },\n    { url = \"https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl\", hash = \"sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d\", size = 5229491, upload-time = \"2025-09-09T15:58:22.481Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl\", hash = \"sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a\", size = 6737913, upload-time = \"2025-09-09T15:58:24.569Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54\", size = 14352811, upload-time = \"2025-09-09T15:58:26.416Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e\", size = 16702689, upload-time = \"2025-09-09T15:58:28.831Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl\", hash = \"sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097\", size = 16133855, upload-time = \"2025-09-09T15:58:31.349Z\" },\n    { url = \"https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl\", hash = \"sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970\", size = 18652520, upload-time = \"2025-09-09T15:58:33.762Z\" },\n    { url = \"https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl\", hash = \"sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5\", size = 6515371, upload-time = \"2025-09-09T15:58:36.04Z\" },\n    { url = \"https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl\", hash = \"sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f\", size = 13112576, upload-time = \"2025-09-09T15:58:37.927Z\" },\n    { url = \"https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl\", hash = \"sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b\", size = 10545953, upload-time = \"2025-09-09T15:58:40.576Z\" },\n]\n\n[[package]]\nname = \"packaging\"\nversion = \"25.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz\", hash = \"sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f\", size = 165727, upload-time = \"2025-04-19T11:48:59.673Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl\", hash = \"sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484\", size = 66469, upload-time = \"2025-04-19T11:48:57.875Z\" },\n]\n\n[[package]]\nname = \"pandas\"\nversion = \"2.3.2\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"numpy\" },\n    { name = \"python-dateutil\" },\n    { name = \"pytz\" },\n    { name = \"tzdata\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/79/8e/0e90233ac205ad182bd6b422532695d2b9414944a280488105d598c70023/pandas-2.3.2.tar.gz\", hash = \"sha256:ab7b58f8f82706890924ccdfb5f48002b83d2b5a3845976a9fb705d36c34dcdb\", size = 4488684, upload-time = \"2025-08-21T10:28:29.257Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/ec/db/614c20fb7a85a14828edd23f1c02db58a30abf3ce76f38806155d160313c/pandas-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:3fbb977f802156e7a3f829e9d1d5398f6192375a3e2d1a9ee0803e35fe70a2b9\", size = 11587652, upload-time = \"2025-08-21T10:27:15.888Z\" },\n    { url = \"https://files.pythonhosted.org/packages/99/b0/756e52f6582cade5e746f19bad0517ff27ba9c73404607c0306585c201b3/pandas-2.3.2-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:1b9b52693123dd234b7c985c68b709b0b009f4521000d0525f2b95c22f15944b\", size = 10717686, upload-time = \"2025-08-21T10:27:18.486Z\" },\n    { url = \"https://files.pythonhosted.org/packages/37/4c/dd5ccc1e357abfeee8353123282de17997f90ff67855f86154e5a13b81e5/pandas-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:0bd281310d4f412733f319a5bc552f86d62cddc5f51d2e392c8787335c994175\", size = 11278722, upload-time = \"2025-08-21T10:27:21.149Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d3/a4/f7edcfa47e0a88cda0be8b068a5bae710bf264f867edfdf7b71584ace362/pandas-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:96d31a6b4354e3b9b8a2c848af75d31da390657e3ac6f30c05c82068b9ed79b9\", size = 11987803, upload-time = \"2025-08-21T10:27:23.767Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f6/61/1bce4129f93ab66f1c68b7ed1c12bac6a70b1b56c5dab359c6bbcd480b52/pandas-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:df4df0b9d02bb873a106971bb85d448378ef14b86ba96f035f50bbd3688456b4\", size = 12766345, upload-time = \"2025-08-21T10:27:26.6Z\" },\n    { url = \"https://files.pythonhosted.org/packages/8e/46/80d53de70fee835531da3a1dae827a1e76e77a43ad22a8cd0f8142b61587/pandas-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:213a5adf93d020b74327cb2c1b842884dbdd37f895f42dcc2f09d451d949f811\", size = 13439314, upload-time = \"2025-08-21T10:27:29.213Z\" },\n    { url = \"https://files.pythonhosted.org/packages/28/30/8114832daff7489f179971dbc1d854109b7f4365a546e3ea75b6516cea95/pandas-2.3.2-cp312-cp312-win_amd64.whl\", hash = \"sha256:8c13b81a9347eb8c7548f53fd9a4f08d4dfe996836543f805c987bafa03317ae\", size = 10983326, upload-time = \"2025-08-21T10:27:31.901Z\" },\n    { url = \"https://files.pythonhosted.org/packages/27/64/a2f7bf678af502e16b472527735d168b22b7824e45a4d7e96a4fbb634b59/pandas-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:0c6ecbac99a354a051ef21c5307601093cb9e0f4b1855984a084bfec9302699e\", size = 11531061, upload-time = \"2025-08-21T10:27:34.647Z\" },\n    { url = \"https://files.pythonhosted.org/packages/54/4c/c3d21b2b7769ef2f4c2b9299fcadd601efa6729f1357a8dbce8dd949ed70/pandas-2.3.2-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:c6f048aa0fd080d6a06cc7e7537c09b53be6642d330ac6f54a600c3ace857ee9\", size = 10668666, upload-time = \"2025-08-21T10:27:37.203Z\" },\n    { url = \"https://files.pythonhosted.org/packages/50/e2/f775ba76ecfb3424d7f5862620841cf0edb592e9abd2d2a5387d305fe7a8/pandas-2.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:0064187b80a5be6f2f9c9d6bdde29372468751dfa89f4211a3c5871854cfbf7a\", size = 11332835, upload-time = \"2025-08-21T10:27:40.188Z\" },\n    { url = \"https://files.pythonhosted.org/packages/8f/52/0634adaace9be2d8cac9ef78f05c47f3a675882e068438b9d7ec7ef0c13f/pandas-2.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:4ac8c320bded4718b298281339c1a50fb00a6ba78cb2a63521c39bec95b0209b\", size = 12057211, upload-time = \"2025-08-21T10:27:43.117Z\" },\n    { url = \"https://files.pythonhosted.org/packages/0b/9d/2df913f14b2deb9c748975fdb2491da1a78773debb25abbc7cbc67c6b549/pandas-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:114c2fe4f4328cf98ce5716d1532f3ab79c5919f95a9cfee81d9140064a2e4d6\", size = 12749277, upload-time = \"2025-08-21T10:27:45.474Z\" },\n    { url = \"https://files.pythonhosted.org/packages/87/af/da1a2417026bd14d98c236dba88e39837182459d29dcfcea510b2ac9e8a1/pandas-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:48fa91c4dfb3b2b9bfdb5c24cd3567575f4e13f9636810462ffed8925352be5a\", size = 13415256, upload-time = \"2025-08-21T10:27:49.885Z\" },\n    { url = \"https://files.pythonhosted.org/packages/22/3c/f2af1ce8840ef648584a6156489636b5692c162771918aa95707c165ad2b/pandas-2.3.2-cp313-cp313-win_amd64.whl\", hash = \"sha256:12d039facec710f7ba305786837d0225a3444af7bbd9c15c32ca2d40d157ed8b\", size = 10982579, upload-time = \"2025-08-21T10:28:08.435Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f3/98/8df69c4097a6719e357dc249bf437b8efbde808038268e584421696cbddf/pandas-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl\", hash = \"sha256:c624b615ce97864eb588779ed4046186f967374185c047070545253a52ab2d57\", size = 12028163, upload-time = \"2025-08-21T10:27:52.232Z\" },\n    { url = \"https://files.pythonhosted.org/packages/0e/23/f95cbcbea319f349e10ff90db488b905c6883f03cbabd34f6b03cbc3c044/pandas-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl\", hash = \"sha256:0cee69d583b9b128823d9514171cabb6861e09409af805b54459bd0c821a35c2\", size = 11391860, upload-time = \"2025-08-21T10:27:54.673Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ad/1b/6a984e98c4abee22058aa75bfb8eb90dce58cf8d7296f8bc56c14bc330b0/pandas-2.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:2319656ed81124982900b4c37f0e0c58c015af9a7bbc62342ba5ad07ace82ba9\", size = 11309830, upload-time = \"2025-08-21T10:27:56.957Z\" },\n    { url = \"https://files.pythonhosted.org/packages/15/d5/f0486090eb18dd8710bf60afeaf638ba6817047c0c8ae5c6a25598665609/pandas-2.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:b37205ad6f00d52f16b6d09f406434ba928c1a1966e2771006a9033c736d30d2\", size = 11883216, upload-time = \"2025-08-21T10:27:59.302Z\" },\n    { url = \"https://files.pythonhosted.org/packages/10/86/692050c119696da19e20245bbd650d8dfca6ceb577da027c3a73c62a047e/pandas-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl\", hash = \"sha256:837248b4fc3a9b83b9c6214699a13f069dc13510a6a6d7f9ba33145d2841a012\", size = 12699743, upload-time = \"2025-08-21T10:28:02.447Z\" },\n    { url = \"https://files.pythonhosted.org/packages/cd/d7/612123674d7b17cf345aad0a10289b2a384bff404e0463a83c4a3a59d205/pandas-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl\", hash = \"sha256:d2c3554bd31b731cd6490d94a28f3abb8dd770634a9e06eb6d2911b9827db370\", size = 13186141, upload-time = \"2025-08-21T10:28:05.377Z\" },\n]\n\n[[package]]\nname = \"parso\"\nversion = \"0.8.5\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz\", hash = \"sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a\", size = 401205, upload-time = \"2025-08-23T15:15:28.028Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl\", hash = \"sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887\", size = 106668, upload-time = \"2025-08-23T15:15:25.663Z\" },\n]\n\n[[package]]\nname = \"pexpect\"\nversion = \"4.9.0\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"ptyprocess\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz\", hash = \"sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f\", size = 166450, upload-time = \"2023-11-25T09:07:26.339Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl\", hash = \"sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523\", size = 63772, upload-time = \"2023-11-25T06:56:14.81Z\" },\n]\n\n[[package]]\nname = \"platformdirs\"\nversion = \"4.4.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz\", hash = \"sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf\", size = 21634, upload-time = \"2025-08-26T14:32:04.268Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl\", hash = \"sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85\", size = 18654, upload-time = \"2025-08-26T14:32:02.735Z\" },\n]\n\n[[package]]\nname = \"prompt-toolkit\"\nversion = \"3.0.52\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"wcwidth\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz\", hash = \"sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855\", size = 434198, upload-time = \"2025-08-27T15:24:02.057Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl\", hash = \"sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955\", size = 391431, upload-time = \"2025-08-27T15:23:59.498Z\" },\n]\n\n[[package]]\nname = \"psutil\"\nversion = \"7.0.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz\", hash = \"sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456\", size = 497003, upload-time = \"2025-02-13T21:54:07.946Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl\", hash = \"sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25\", size = 238051, upload-time = \"2025-02-13T21:54:12.36Z\" },\n    { url = \"https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl\", hash = \"sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da\", size = 239535, upload-time = \"2025-02-13T21:54:16.07Z\" },\n    { url = \"https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91\", size = 275004, upload-time = \"2025-02-13T21:54:18.662Z\" },\n    { url = \"https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34\", size = 277986, upload-time = \"2025-02-13T21:54:21.811Z\" },\n    { url = \"https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993\", size = 279544, upload-time = \"2025-02-13T21:54:24.68Z\" },\n    { url = \"https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl\", hash = \"sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99\", size = 241053, upload-time = \"2025-02-13T21:54:34.31Z\" },\n    { url = \"https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl\", hash = \"sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553\", size = 244885, upload-time = \"2025-02-13T21:54:37.486Z\" },\n]\n\n[[package]]\nname = \"ptyprocess\"\nversion = \"0.7.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz\", hash = \"sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220\", size = 70762, upload-time = \"2020-12-28T15:15:30.155Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl\", hash = \"sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35\", size = 13993, upload-time = \"2020-12-28T15:15:28.35Z\" },\n]\n\n[[package]]\nname = \"pure-eval\"\nversion = \"0.2.3\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz\", hash = \"sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42\", size = 19752, upload-time = \"2024-07-21T12:58:21.801Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl\", hash = \"sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0\", size = 11842, upload-time = \"2024-07-21T12:58:20.04Z\" },\n]\n\n[[package]]\nname = \"pyarrow\"\nversion = \"21.0.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz\", hash = \"sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc\", size = 1133487, upload-time = \"2025-07-18T00:57:31.761Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl\", hash = \"sha256:3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd\", size = 31160305, upload-time = \"2025-07-18T00:55:35.373Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl\", hash = \"sha256:b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876\", size = 32684264, upload-time = \"2025-07-18T00:55:39.303Z\" },\n    { url = \"https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl\", hash = \"sha256:e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d\", size = 41108099, upload-time = \"2025-07-18T00:55:42.889Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl\", hash = \"sha256:b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e\", size = 42829529, upload-time = \"2025-07-18T00:55:47.069Z\" },\n    { url = \"https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82\", size = 43367883, upload-time = \"2025-07-18T00:55:53.069Z\" },\n    { url = \"https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623\", size = 45133802, upload-time = \"2025-07-18T00:55:57.714Z\" },\n    { url = \"https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18\", size = 26203175, upload-time = \"2025-07-18T00:56:01.364Z\" },\n    { url = \"https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl\", hash = \"sha256:e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a\", size = 31154306, upload-time = \"2025-07-18T00:56:04.42Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl\", hash = \"sha256:d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe\", size = 32680622, upload-time = \"2025-07-18T00:56:07.505Z\" },\n    { url = \"https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl\", hash = \"sha256:f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd\", size = 41104094, upload-time = \"2025-07-18T00:56:10.994Z\" },\n    { url = \"https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl\", hash = \"sha256:69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61\", size = 42825576, upload-time = \"2025-07-18T00:56:15.569Z\" },\n    { url = \"https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d\", size = 43368342, upload-time = \"2025-07-18T00:56:19.531Z\" },\n    { url = \"https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99\", size = 45131218, upload-time = \"2025-07-18T00:56:23.347Z\" },\n    { url = \"https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636\", size = 26087551, upload-time = \"2025-07-18T00:56:26.758Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl\", hash = \"sha256:a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da\", size = 31290064, upload-time = \"2025-07-18T00:56:30.214Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl\", hash = \"sha256:1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7\", size = 32727837, upload-time = \"2025-07-18T00:56:33.935Z\" },\n    { url = \"https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl\", hash = \"sha256:65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6\", size = 41014158, upload-time = \"2025-07-18T00:56:37.528Z\" },\n    { url = \"https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl\", hash = \"sha256:3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8\", size = 42667885, upload-time = \"2025-07-18T00:56:41.483Z\" },\n    { url = \"https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl\", hash = \"sha256:fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503\", size = 43276625, upload-time = \"2025-07-18T00:56:48.002Z\" },\n    { url = \"https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl\", hash = \"sha256:6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79\", size = 44951890, upload-time = \"2025-07-18T00:56:52.568Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl\", hash = \"sha256:222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10\", size = 26371006, upload-time = \"2025-07-18T00:56:56.379Z\" },\n]\n\n[[package]]\nname = \"pycparser\"\nversion = \"2.23\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz\", hash = \"sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2\", size = 173734, upload-time = \"2025-09-09T13:23:47.91Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl\", hash = \"sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934\", size = 118140, upload-time = \"2025-09-09T13:23:46.651Z\" },\n]\n\n[[package]]\nname = \"pygments\"\nversion = \"2.19.2\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz\", hash = \"sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887\", size = 4968631, upload-time = \"2025-06-21T13:39:12.283Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl\", hash = \"sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b\", size = 1225217, upload-time = \"2025-06-21T13:39:07.939Z\" },\n]\n\n[[package]]\nname = \"python-dateutil\"\nversion = \"2.9.0.post0\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"six\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz\", hash = \"sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3\", size = 342432, upload-time = \"2024-03-01T18:36:20.211Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl\", hash = \"sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427\", size = 229892, upload-time = \"2024-03-01T18:36:18.57Z\" },\n]\n\n[[package]]\nname = \"pytz\"\nversion = \"2025.2\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz\", hash = \"sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3\", size = 320884, upload-time = \"2025-03-25T02:25:00.538Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl\", hash = \"sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00\", size = 509225, upload-time = \"2025-03-25T02:24:58.468Z\" },\n]\n\n[[package]]\nname = \"pywin32\"\nversion = \"311\"\nsource = { registry = \"https://pypi.org/simple\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl\", hash = \"sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31\", size = 8706543, upload-time = \"2025-07-14T20:13:20.765Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl\", hash = \"sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067\", size = 9495040, upload-time = \"2025-07-14T20:13:22.543Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl\", hash = \"sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852\", size = 8710102, upload-time = \"2025-07-14T20:13:24.682Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl\", hash = \"sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d\", size = 8705700, upload-time = \"2025-07-14T20:13:26.471Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl\", hash = \"sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d\", size = 9494700, upload-time = \"2025-07-14T20:13:28.243Z\" },\n    { url = \"https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl\", hash = \"sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a\", size = 8709318, upload-time = \"2025-07-14T20:13:30.348Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl\", hash = \"sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee\", size = 8840714, upload-time = \"2025-07-14T20:13:32.449Z\" },\n    { url = \"https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl\", hash = \"sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87\", size = 9656800, upload-time = \"2025-07-14T20:13:34.312Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl\", hash = \"sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42\", size = 8932540, upload-time = \"2025-07-14T20:13:36.379Z\" },\n]\n\n[[package]]\nname = \"pyzmq\"\nversion = \"27.1.0\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"cffi\", marker = \"implementation_name == 'pypy'\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz\", hash = \"sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540\", size = 281750, upload-time = \"2025-09-08T23:10:18.157Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl\", hash = \"sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc\", size = 1306279, upload-time = \"2025-09-08T23:08:03.807Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl\", hash = \"sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113\", size = 895645, upload-time = \"2025-09-08T23:08:05.301Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233\", size = 652574, upload-time = \"2025-09-08T23:08:06.828Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31\", size = 840995, upload-time = \"2025-09-08T23:08:08.396Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl\", hash = \"sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28\", size = 1642070, upload-time = \"2025-09-08T23:08:09.989Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl\", hash = \"sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856\", size = 2021121, upload-time = \"2025-09-08T23:08:11.907Z\" },\n    { url = \"https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl\", hash = \"sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496\", size = 1878550, upload-time = \"2025-09-08T23:08:13.513Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl\", hash = \"sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd\", size = 559184, upload-time = \"2025-09-08T23:08:15.163Z\" },\n    { url = \"https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl\", hash = \"sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf\", size = 619480, upload-time = \"2025-09-08T23:08:17.192Z\" },\n    { url = \"https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl\", hash = \"sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f\", size = 552993, upload-time = \"2025-09-08T23:08:18.926Z\" },\n    { url = \"https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl\", hash = \"sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5\", size = 1122436, upload-time = \"2025-09-08T23:08:20.801Z\" },\n    { url = \"https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl\", hash = \"sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6\", size = 1156301, upload-time = \"2025-09-08T23:08:22.47Z\" },\n    { url = \"https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl\", hash = \"sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7\", size = 1341197, upload-time = \"2025-09-08T23:08:24.286Z\" },\n    { url = \"https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl\", hash = \"sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05\", size = 897275, upload-time = \"2025-09-08T23:08:26.063Z\" },\n    { url = \"https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9\", size = 660469, upload-time = \"2025-09-08T23:08:27.623Z\" },\n    { url = \"https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128\", size = 847961, upload-time = \"2025-09-08T23:08:29.672Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl\", hash = \"sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39\", size = 1650282, upload-time = \"2025-09-08T23:08:31.349Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl\", hash = \"sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97\", size = 2024468, upload-time = \"2025-09-08T23:08:33.543Z\" },\n    { url = \"https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl\", hash = \"sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db\", size = 1885394, upload-time = \"2025-09-08T23:08:35.51Z\" },\n    { url = \"https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl\", hash = \"sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c\", size = 574964, upload-time = \"2025-09-08T23:08:37.178Z\" },\n    { url = \"https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl\", hash = \"sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2\", size = 641029, upload-time = \"2025-09-08T23:08:40.595Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl\", hash = \"sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e\", size = 561541, upload-time = \"2025-09-08T23:08:42.668Z\" },\n    { url = \"https://files.pythonhosted.org/packages/87/45/19efbb3000956e82d0331bafca5d9ac19ea2857722fa2caacefb6042f39d/pyzmq-27.1.0-cp314-cp314t-macosx_10_15_universal2.whl\", hash = \"sha256:ce980af330231615756acd5154f29813d553ea555485ae712c491cd483df6b7a\", size = 1341197, upload-time = \"2025-09-08T23:08:44.973Z\" },\n    { url = \"https://files.pythonhosted.org/packages/48/43/d72ccdbf0d73d1343936296665826350cb1e825f92f2db9db3e61c2162a2/pyzmq-27.1.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.whl\", hash = \"sha256:1779be8c549e54a1c38f805e56d2a2e5c009d26de10921d7d51cfd1c8d4632ea\", size = 897175, upload-time = \"2025-09-08T23:08:46.601Z\" },\n    { url = \"https://files.pythonhosted.org/packages/2f/2e/a483f73a10b65a9ef0161e817321d39a770b2acf8bcf3004a28d90d14a94/pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl\", hash = \"sha256:7200bb0f03345515df50d99d3db206a0a6bee1955fbb8c453c76f5bf0e08fb96\", size = 660427, upload-time = \"2025-09-08T23:08:48.187Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f5/d2/5f36552c2d3e5685abe60dfa56f91169f7a2d99bbaf67c5271022ab40863/pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl\", hash = \"sha256:01c0e07d558b06a60773744ea6251f769cd79a41a97d11b8bf4ab8f034b0424d\", size = 847929, upload-time = \"2025-09-08T23:08:49.76Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c4/2a/404b331f2b7bf3198e9945f75c4c521f0c6a3a23b51f7a4a401b94a13833/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl\", hash = \"sha256:80d834abee71f65253c91540445d37c4c561e293ba6e741b992f20a105d69146\", size = 1650193, upload-time = \"2025-09-08T23:08:51.7Z\" },\n    { url = \"https://files.pythonhosted.org/packages/1c/0b/f4107e33f62a5acf60e3ded67ed33d79b4ce18de432625ce2fc5093d6388/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_i686.whl\", hash = \"sha256:544b4e3b7198dde4a62b8ff6685e9802a9a1ebf47e77478a5eb88eca2a82f2fd\", size = 2024388, upload-time = \"2025-09-08T23:08:53.393Z\" },\n    { url = \"https://files.pythonhosted.org/packages/0d/01/add31fe76512642fd6e40e3a3bd21f4b47e242c8ba33efb6809e37076d9b/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl\", hash = \"sha256:cedc4c68178e59a4046f97eca31b148ddcf51e88677de1ef4e78cf06c5376c9a\", size = 1885316, upload-time = \"2025-09-08T23:08:55.702Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c4/59/a5f38970f9bf07cee96128de79590bb354917914a9be11272cfc7ff26af0/pyzmq-27.1.0-cp314-cp314t-win32.whl\", hash = \"sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92\", size = 587472, upload-time = \"2025-09-08T23:08:58.18Z\" },\n    { url = \"https://files.pythonhosted.org/packages/70/d8/78b1bad170f93fcf5e3536e70e8fadac55030002275c9a29e8f5719185de/pyzmq-27.1.0-cp314-cp314t-win_amd64.whl\", hash = \"sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0\", size = 661401, upload-time = \"2025-09-08T23:08:59.802Z\" },\n    { url = \"https://files.pythonhosted.org/packages/81/d6/4bfbb40c9a0b42fc53c7cf442f6385db70b40f74a783130c5d0a5aa62228/pyzmq-27.1.0-cp314-cp314t-win_arm64.whl\", hash = \"sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7\", size = 575170, upload-time = \"2025-09-08T23:09:01.418Z\" },\n]\n\n[[package]]\nname = \"six\"\nversion = \"1.17.0\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz\", hash = \"sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81\", size = 34031, upload-time = \"2024-12-04T17:35:28.174Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl\", hash = \"sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274\", size = 11050, upload-time = \"2024-12-04T17:35:26.475Z\" },\n]\n\n[[package]]\nname = \"stack-data\"\nversion = \"0.6.3\"\nsource = { registry = \"https://pypi.org/simple\" }\ndependencies = [\n    { name = \"asttokens\" },\n    { name = \"executing\" },\n    { name = \"pure-eval\" },\n]\nsdist = { url = \"https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz\", hash = \"sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9\", size = 44707, upload-time = \"2023-09-30T13:58:05.479Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl\", hash = \"sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695\", size = 24521, upload-time = \"2023-09-30T13:58:03.53Z\" },\n]\n\n[[package]]\nname = \"tornado\"\nversion = \"6.5.2\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz\", hash = \"sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0\", size = 510821, upload-time = \"2025-08-08T18:27:00.78Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl\", hash = \"sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6\", size = 442563, upload-time = \"2025-08-08T18:26:42.945Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl\", hash = \"sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef\", size = 440729, upload-time = \"2025-08-08T18:26:44.473Z\" },\n    { url = \"https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e\", size = 444295, upload-time = \"2025-08-08T18:26:46.021Z\" },\n    { url = \"https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882\", size = 443644, upload-time = \"2025-08-08T18:26:47.625Z\" },\n    { url = \"https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108\", size = 443878, upload-time = \"2025-08-08T18:26:50.599Z\" },\n    { url = \"https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl\", hash = \"sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c\", size = 444549, upload-time = \"2025-08-08T18:26:51.864Z\" },\n    { url = \"https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl\", hash = \"sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4\", size = 443973, upload-time = \"2025-08-08T18:26:53.625Z\" },\n    { url = \"https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl\", hash = \"sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04\", size = 443954, upload-time = \"2025-08-08T18:26:55.072Z\" },\n    { url = \"https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl\", hash = \"sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0\", size = 445023, upload-time = \"2025-08-08T18:26:56.677Z\" },\n    { url = \"https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl\", hash = \"sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f\", size = 445427, upload-time = \"2025-08-08T18:26:57.91Z\" },\n    { url = \"https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl\", hash = \"sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af\", size = 444456, upload-time = \"2025-08-08T18:26:59.207Z\" },\n]\n\n[[package]]\nname = \"traitlets\"\nversion = \"5.14.3\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz\", hash = \"sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7\", size = 161621, upload-time = \"2024-04-19T11:11:49.746Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl\", hash = \"sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f\", size = 85359, upload-time = \"2024-04-19T11:11:46.763Z\" },\n]\n\n[[package]]\nname = \"tzdata\"\nversion = \"2025.2\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz\", hash = \"sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9\", size = 196380, upload-time = \"2025-03-23T13:54:43.652Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl\", hash = \"sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8\", size = 347839, upload-time = \"2025-03-23T13:54:41.845Z\" },\n]\n\n[[package]]\nname = \"wcwidth\"\nversion = \"0.2.13\"\nsource = { registry = \"https://pypi.org/simple\" }\nsdist = { url = \"https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz\", hash = \"sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5\", size = 101301, upload-time = \"2024-01-06T02:10:57.829Z\" }\nwheels = [\n    { url = \"https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl\", hash = \"sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859\", size = 34166, upload-time = \"2024-01-06T02:10:55.763Z\" },\n]\n"
  },
  {
    "path": "tests/js/ffi.test.ts",
    "content": "import * as wasm from \"../../pkg/node/parquet_wasm\";\nimport { readFileSync } from \"fs\";\nimport * as arrow from \"apache-arrow\";\nimport {\n  testArrowTablesEqual,\n  readExpectedArrowData,\n  temporaryServer,\n} from \"./utils\";\nimport { parseTable, parseRecordBatch } from \"arrow-js-ffi\";\nimport { it } from \"vitest\";\n\n// Path from repo root\nconst dataDir = \"tests/data\";\n\nconst WASM_MEMORY = wasm.wasmMemory();\n\nit(\"read via FFI\", async (t) => {\n  const expectedTable = readExpectedArrowData();\n\n  const dataPath = `${dataDir}/1-partition-brotli.parquet`;\n  const buffer = readFileSync(dataPath);\n  const arr = new Uint8Array(buffer);\n  const ffiTable = wasm.readParquet(arr).intoFFI();\n\n  const table = parseTable(\n    WASM_MEMORY.buffer,\n    ffiTable.arrayAddrs(),\n    ffiTable.schemaAddr()\n  );\n  testArrowTablesEqual(expectedTable, table);\n});\n\nit(\"read file stream\", async (t) => {\n  const server = await temporaryServer();\n  const listeningPort = server.addresses()[0].port;\n  const rootUrl = `http://localhost:${listeningPort}`;\n\n  const expectedTable = readExpectedArrowData();\n\n  const url = `${rootUrl}/1-partition-brotli.parquet`;\n  const stream = (await wasm.readParquetStream(\n    url\n  )) as unknown as wasm.RecordBatch[];\n\n  const batches = [];\n  for await (const wasmRecordBatch of stream) {\n    const ffiRecordBatch = wasmRecordBatch.intoFFI();\n    const recordBatch = parseRecordBatch(\n      WASM_MEMORY.buffer,\n      ffiRecordBatch.arrayAddr(),\n      ffiRecordBatch.schemaAddr(),\n      true\n    );\n    batches.push(recordBatch);\n  }\n  const initialTable = new arrow.Table(batches);\n  testArrowTablesEqual(expectedTable, initialTable);\n  await server.close();\n});\n"
  },
  {
    "path": "tests/js/geo-metadata.test.ts",
    "content": "import * as wasm from \"../../pkg/node/parquet_wasm\";\nimport { readFileSync } from \"fs\";\nimport { tableFromIPC } from \"apache-arrow\";\nimport { it, expect } from \"vitest\";\n\n// Path from repo root\nconst dataDir = \"tests/data\";\nconst NATURALEARTH_CITIES_WKB = \"naturalearth_cities_wkb.parquet\";\nconst NATURALEARTH_CITIES_GEOARROW = \"naturalearth_cities_geoarrow.parquet\";\n\nconst EXPECTED_META_WKB = `\\\n{\"primary_column\": \"geometry\", \"columns\": {\"geometry\": {\"encoding\": \"WKB\", \"crs\": {\"$schema\": \"https://proj.org/schemas/v0.4/projjson.schema.json\", \"type\": \"GeographicCRS\", \"name\": \"WGS 84\", \"datum_ensemble\": {\"name\": \"World Geodetic System 1984 ensemble\", \"members\": [{\"name\": \"World Geodetic System 1984 (Transit)\"}, {\"name\": \"World Geodetic System 1984 (G730)\"}, {\"name\": \"World Geodetic System 1984 (G873)\"}, {\"name\": \"World Geodetic System 1984 (G1150)\"}, {\"name\": \"World Geodetic System 1984 (G1674)\"}, {\"name\": \"World Geodetic System 1984 (G1762)\"}, {\"name\": \"World Geodetic System 1984 (G2139)\"}], \"ellipsoid\": {\"name\": \"WGS 84\", \"semi_major_axis\": 6378137, \"inverse_flattening\": 298.257223563}, \"accuracy\": \"2.0\", \"id\": {\"authority\": \"EPSG\", \"code\": 6326}}, \"coordinate_system\": {\"subtype\": \"ellipsoidal\", \"axis\": [{\"name\": \"Geodetic latitude\", \"abbreviation\": \"Lat\", \"direction\": \"north\", \"unit\": \"degree\"}, {\"name\": \"Geodetic longitude\", \"abbreviation\": \"Lon\", \"direction\": \"east\", \"unit\": \"degree\"}]}, \"scope\": \"Horizontal component of 3D system.\", \"area\": \"World.\", \"bbox\": {\"south_latitude\": -90, \"west_longitude\": -180, \"north_latitude\": 90, \"east_longitude\": 180}, \"id\": {\"authority\": \"EPSG\", \"code\": 4326}}, \"geometry_type\": \"Point\", \"bbox\": [-175.22056447761656, -41.29997393927641, 179.21664709402887, 64.15002361973922]}}, \"version\": \"0.4.0\", \"creator\": {\"library\": \"geopandas\", \"version\": \"0.11.1\"}}`;\n\nconst EXPECTED_META_GEOARROW = `\\\n{\"primary_column\": \"geometry\", \"columns\": {\"geometry\": {\"encoding\": \"geoarrow\", \"crs\": {\"$schema\": \"https://proj.org/schemas/v0.4/projjson.schema.json\", \"type\": \"GeographicCRS\", \"name\": \"WGS 84\", \"datum_ensemble\": {\"name\": \"World Geodetic System 1984 ensemble\", \"members\": [{\"name\": \"World Geodetic System 1984 (Transit)\"}, {\"name\": \"World Geodetic System 1984 (G730)\"}, {\"name\": \"World Geodetic System 1984 (G873)\"}, {\"name\": \"World Geodetic System 1984 (G1150)\"}, {\"name\": \"World Geodetic System 1984 (G1674)\"}, {\"name\": \"World Geodetic System 1984 (G1762)\"}, {\"name\": \"World Geodetic System 1984 (G2139)\"}], \"ellipsoid\": {\"name\": \"WGS 84\", \"semi_major_axis\": 6378137, \"inverse_flattening\": 298.257223563}, \"accuracy\": \"2.0\", \"id\": {\"authority\": \"EPSG\", \"code\": 6326}}, \"coordinate_system\": {\"subtype\": \"ellipsoidal\", \"axis\": [{\"name\": \"Geodetic latitude\", \"abbreviation\": \"Lat\", \"direction\": \"north\", \"unit\": \"degree\"}, {\"name\": \"Geodetic longitude\", \"abbreviation\": \"Lon\", \"direction\": \"east\", \"unit\": \"degree\"}]}, \"scope\": \"Horizontal component of 3D system.\", \"area\": \"World.\", \"bbox\": {\"south_latitude\": -90, \"west_longitude\": -180, \"north_latitude\": 90, \"east_longitude\": 180}, \"id\": {\"authority\": \"EPSG\", \"code\": 4326}}, \"geometry_type\": \"Point\", \"bbox\": [-175.22056447761656, -41.29997393927641, 179.21664709402887, 64.15002361973922]}}, \"version\": \"0.4.0\", \"creator\": {\"library\": \"geopandas\", \"version\": \"0.11.1\"}}`;\n\n// We skip these test for now because it's not clear whether Parquet metadata\n// should be assigned onto the Arrow table metadata.\nit.skip(\"test geo-arrow-spec (wkb) metadata passed through\", (t) => {\n  const dataPath = `${dataDir}/${NATURALEARTH_CITIES_WKB}`;\n  const arr = new Uint8Array(readFileSync(dataPath));\n  const table = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n  expect(\n    table.schema.metadata.get(\"geo\"),\n    \"arrow table metadata should match expected\"\n  ).toStrictEqual(EXPECTED_META_WKB);\n});\n\nit.skip(\"test geo-arrow-spec (geoarrow encoding) metadata passed through\", (t) => {\n  const dataPath = `${dataDir}/${NATURALEARTH_CITIES_GEOARROW}`;\n  const arr = new Uint8Array(readFileSync(dataPath));\n  const table = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n\n  expect(\n    table.schema.metadata.get(\"geo\"),\n    \"arrow table metadata should match expected\"\n  ).toStrictEqual(EXPECTED_META_GEOARROW);\n\n  const firstCoord = table.getChild(\"geometry\").get(0).toArray();\n  expect(\n    isCloseEqual(firstCoord[0], 12.453386544971766),\n    \"Nested list should be read correctly\"\n  ).toBeTruthy();\n  expect(\n    isCloseEqual(firstCoord[1], 41.903282179960115),\n    \"Nested list should be read correctly\"\n  ).toBeTruthy();\n});\n\nfunction isCloseEqual(a: number, b: number, eps: number = 0.0001): boolean {\n  return Math.abs(a - b) < eps;\n}\n"
  },
  {
    "path": "tests/js/index.test.ts",
    "content": "import * as wasm from \"../../pkg/node/parquet_wasm\";\n\nwasm.setPanicHook();\n\nimport \"./read-write.test\";\nimport \"./ffi.test\";\nimport \"./geo-metadata.test\";\nimport \"./schema.test\";\n"
  },
  {
    "path": "tests/js/read-write.test.ts",
    "content": "import { DataType, tableFromIPC, tableToIPC } from \"apache-arrow\";\nimport { readFileSync } from \"fs\";\nimport { describe, expect, it } from \"vitest\";\nimport * as wasm from \"../../pkg/node/parquet_wasm\";\nimport {\n  readExpectedArrowData,\n  temporaryServer,\n  testArrowTablesEqual,\n} from \"./utils\";\n\n// Path from repo root\nconst dataDir = \"tests/data\";\nconst testFiles = [\n  \"1-partition-brotli.parquet\",\n  \"1-partition-gzip.parquet\",\n  // \"1-partition-lz4.parquet\",\n  \"1-partition-none.parquet\",\n  \"1-partition-snappy.parquet\",\n  \"1-partition-zstd.parquet\",\n  \"2-partition-brotli.parquet\",\n  \"2-partition-gzip.parquet\",\n  // \"2-partition-lz4.parquet\",\n  \"2-partition-none.parquet\",\n  \"2-partition-snappy.parquet\",\n  \"2-partition-zstd.parquet\",\n];\n\ndescribe(\"read file\", async (t) => {\n  const expectedTable = readExpectedArrowData();\n\n  for (const testFile of testFiles) {\n    it(testFile, () => {\n      const dataPath = `${dataDir}/${testFile}`;\n      const arr = new Uint8Array(readFileSync(dataPath));\n      const table = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n      testArrowTablesEqual(expectedTable, table);\n    });\n  }\n});\n\nit(\"read-write-read round trip (with writer properties)\", async (t) => {\n  const dataPath = `${dataDir}/1-partition-brotli.parquet`;\n  const buffer = readFileSync(dataPath);\n  const arr = new Uint8Array(buffer);\n  const initialTable = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n\n  const writerProperties = new wasm.WriterPropertiesBuilder().build();\n\n  const parquetBuffer = wasm.writeParquet(\n    wasm.Table.fromIPCStream(tableToIPC(initialTable, \"stream\")),\n    writerProperties\n  );\n  const table = tableFromIPC(wasm.readParquet(parquetBuffer).intoIPCStream());\n\n  testArrowTablesEqual(initialTable, table);\n});\n\nit(\"read-write-read round trip (no writer properties provided)\", async (t) => {\n  const dataPath = `${dataDir}/1-partition-brotli.parquet`;\n  const buffer = readFileSync(dataPath);\n  const arr = new Uint8Array(buffer);\n  const initialTable = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n\n  const parquetBuffer = wasm.writeParquet(\n    wasm.Table.fromIPCStream(tableToIPC(initialTable, \"stream\"))\n  );\n  const table = tableFromIPC(wasm.readParquet(parquetBuffer).intoIPCStream());\n\n  testArrowTablesEqual(initialTable, table);\n});\n\nit(\"error produced trying to read file with arrayBuffer\", (t) => {\n  const arrayBuffer = new ArrayBuffer(10);\n  try {\n    // @ts-expect-error input should be Uint8Array\n    wasm.readParquet(arrayBuffer);\n  } catch (err) {\n    expect(err instanceof Error, \"err expected to be an Error\").toBeTruthy();\n    expect(err.message, \"Expected error message\").toStrictEqual(\n      \"Empty input provided or not a Uint8Array.\"\n    );\n  }\n});\n\nit(\"reads empty file\", async (t) => {\n  const dataPath = `${dataDir}/empty.parquet`;\n  const buffer = readFileSync(dataPath);\n  const arr = new Uint8Array(buffer);\n  const table = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n\n  expect(table.schema.fields.length).toStrictEqual(0);\n  expect(table.numRows).toStrictEqual(0);\n  expect(table.numCols).toStrictEqual(0);\n  // console.log(\"empty table schema\", table.schema);\n});\n\nit(\"read stream-write stream-read stream round trip (no writer properties provided)\", async (t) => {\n  const server = await temporaryServer();\n  const listeningPort = server.addresses()[0].port;\n  const rootUrl = `http://localhost:${listeningPort}`;\n\n  const expectedTable = readExpectedArrowData();\n\n  const url = `${rootUrl}/1-partition-brotli.parquet`;\n  const originalStream = await wasm.readParquetStream(url);\n\n  const stream = await wasm.transformParquetStream(originalStream);\n  const accumulatedBuffer = new Uint8Array(\n    await new Response(stream).arrayBuffer()\n  );\n  const roundtripTable = tableFromIPC(\n    wasm.readParquet(accumulatedBuffer).intoIPCStream()\n  );\n\n  testArrowTablesEqual(expectedTable, roundtripTable);\n  await server.close();\n});\n\ndescribe(\"read string view file\", async (t) => {\n  it(\"synchronous read\", async (t) => {\n    const dataPath = `${dataDir}/string_view.parquet`;\n    const arr = new Uint8Array(readFileSync(dataPath));\n    const table = tableFromIPC(wasm.readParquet(arr).intoIPCStream());\n\n    const stringCol = table.getChild(\"string_view\")!;\n    expect(DataType.isUtf8(stringCol.type)).toBeTruthy();\n\n    const binaryCol = table.getChild(\"binary_view\")!;\n    expect(DataType.isBinary(binaryCol.type)).toBeTruthy();\n  });\n\n  it(\"asynchronous read\", async (t) => {\n    const server = await temporaryServer();\n    const listeningPort = server.addresses()[0].port;\n    const rootUrl = `http://localhost:${listeningPort}`;\n\n    const url = `${rootUrl}/string_view.parquet`;\n    let file = await wasm.ParquetFile.fromUrl(url);\n    let wasmTable = await file.read();\n    let jsTable = tableFromIPC(wasmTable.intoIPCStream());\n\n    const stringCol = jsTable.getChild(\"string_view\")!;\n    expect(DataType.isUtf8(stringCol.type)).toBeTruthy();\n\n    const binaryCol = jsTable.getChild(\"binary_view\")!;\n    expect(DataType.isBinary(binaryCol.type)).toBeTruthy();\n\n    await server.close();\n  });\n});\n"
  },
  {
    "path": "tests/js/schema.test.ts",
    "content": "import * as wasm from \"../../pkg/node/parquet_wasm\";\nimport { readFileSync } from \"fs\";\nimport * as arrow from \"apache-arrow\";\nimport { readExpectedArrowData } from \"./utils\";\nimport { parseSchema } from \"arrow-js-ffi\";\nimport { it, expect } from \"vitest\";\n\n// Path from repo root\nconst dataDir = \"tests/data\";\n\nconst WASM_MEMORY = wasm.wasmMemory();\n\nit(\"read schema via FFI\", async (t) => {\n  const expectedTable = readExpectedArrowData();\n\n  const dataPath = `${dataDir}/1-partition-brotli.parquet`;\n  const buffer = readFileSync(dataPath);\n  const arr = new Uint8Array(buffer);\n  const ffiSchema = wasm.readSchema(arr).intoFFI();\n\n  const schema = parseSchema(WASM_MEMORY.buffer, ffiSchema.addr());\n\n  expect(expectedTable.schema.fields.length).toStrictEqual(\n    schema.fields.length\n  );\n});\n\nit(\"read schema via IPC\", async (t) => {\n  const expectedTable = readExpectedArrowData();\n\n  const dataPath = `${dataDir}/1-partition-brotli.parquet`;\n  const buffer = readFileSync(dataPath);\n  const arr = new Uint8Array(buffer);\n  const ipcSchema = wasm.readSchema(arr).intoIPCStream();\n\n  const schema = arrow.tableFromIPC(ipcSchema).schema;\n\n  expect(expectedTable.schema.fields.length).toStrictEqual(\n    schema.fields.length\n  );\n});\n"
  },
  {
    "path": "tests/js/utils.ts",
    "content": "import { expect } from \"vitest\";\nimport { readFileSync } from \"fs\";\nimport { tableFromIPC, Table } from \"apache-arrow\";\nimport fastify, { FastifyInstance } from \"fastify\";\nimport fastifyStatic from \"@fastify/static\";\nimport { join } from \"path\";\nconst dataDir = \"tests/data\";\n\n/** Test that two Arrow tables are equal */\nexport function testArrowTablesEqual(table1: Table, table2: Table): void {\n  expect(table1.schema.metadata).toStrictEqual(table2.schema.metadata);\n  expect(table1.schema.fields.length).toStrictEqual(\n    table2.schema.fields.length\n  );\n\n  // Note that calling deepEquals on the schema object correctly can fail when in one schema the\n  // type is Int_ with bitWidth 32 and the other has Int32.\n  for (let i = 0; i < table1.schema.fields.length; i++) {\n    const field1 = table1.schema.fields[i];\n    const field2 = table2.schema.fields[i];\n    expect(field1.name).toStrictEqual(field2.name);\n    expect(field1.nullable).toStrictEqual(field2.nullable);\n    // Note that calling deepEquals on the type fails! Instead you have to check the typeId\n    // t.deepEquals(field1.type, field2.type);\n    expect(field1.typeId).toStrictEqual(field2.typeId);\n  }\n\n  // However deepEquals on the table itself can give false negatives because Arrow tables can have\n  // different underlying memory for the same data representation, i.e. if one table has one record\n  // batch and the other has two\n  const fieldNames = table1.schema.fields.map((f) => f.name);\n  for (const fieldName of fieldNames) {\n    const vector1 = table1.getChild(fieldName);\n    const vector2 = table2.getChild(fieldName);\n\n    // Ideally we'd be checking vector1.toArray() against vector2.toArray(), but there's apparently\n    //   a bug in arrow JS, so for now we use .toJSON() to check for comparison :shrug:\n    //   not ok 23 RangeError: offset is out of bounds\n    // ---\n    //   operator: error\n    //   stack: |-\n    //     RangeError: offset is out of bounds\n    //         at Uint8Array.set (<anonymous>)\n    //         at data.reduce.array (/Users/kyle/github/rust/parquet-wasm/node_modules/apache-arrow/src/vector.ts:256:36)\n    //         at Array.reduce (<anonymous>)\n    //         at Vector.toArray (/Users/kyle/github/rust/parquet-wasm/node_modules/apache-arrow/src/vector.ts:255:42)\n    //         at testArrowTablesEqual (/Users/kyle/github/rust/parquet-wasm/tests/js/utils.ts:25:15)\n    //         at /Users/kyle/github/rust/parquet-wasm/tests/js/arrow1.ts:46:25\n    //         at step (/Users/kyle/github/rust/parquet-wasm/tests/js/arrow1.ts:33:23)\n    //         at Object.next (/Users/kyle/github/rust/parquet-wasm/tests/js/arrow1.ts:14:53)\n    //         at /Users/kyle/github/rust/parquet-wasm/tests/js/arrow1.ts:8:71\n    //         at new Promise (<anonymous>)\n    // ...\n    expect(\n      vector1.toJSON(),\n      `data arrays should be equal for column ${fieldName}`\n    ).toStrictEqual(vector2.toJSON());\n  }\n}\n\n/** Load expected arrow data written from Python in Arrow IPC File format */\nexport function readExpectedArrowData(): Table {\n  const expectedArrowPath = `${dataDir}/data.arrow`;\n  const buffer = readFileSync(expectedArrowPath);\n  return tableFromIPC(buffer);\n}\n\nexport async function temporaryServer() {\n  const server = fastify().register(fastifyStatic, {\n    root: join(__dirname, \"../data\"),\n  });\n  await server.listen({\n    port: 0,\n    host: \"localhost\",\n  });\n  return server as FastifyInstance;\n}\n"
  },
  {
    "path": "tests/web.rs",
    "content": "//! Test suite for the Web and headless browsers.\n\n#![cfg(target_arch = \"wasm32\")]\n// Necessary for the assert_eq! which now fails clippy\n#![allow(clippy::eq_op)]\n\nextern crate wasm_bindgen_test;\nuse wasm_bindgen_test::*;\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n#[wasm_bindgen_test]\nfn pass() {\n    assert_eq!(1 + 1, 2);\n}\n"
  },
  {
    "path": "tsconfig.docs.json",
    "content": "{\n  \"include\": [\"pkg/**/*.d.ts\"]\n}\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"module\": \"commonjs\",\n    \"moduleResolution\": \"node\"\n  },\n  \"include\": [\n    \"tests/**/*\"\n  ],\n  \"exclude\": [\n    \"node_modules\"\n  ]\n}\n"
  },
  {
    "path": "typedoc.json",
    "content": "{\n  \"name\": \"parquet-wasm\",\n  \"cleanOutputDir\": true,\n  \"darkHighlightTheme\": \"github-dark\",\n  \"entryPoints\": [\n    \"pkg/bundler/parquet_wasm.d.ts\",\n    \"pkg/esm/parquet_wasm.d.ts\",\n    \"pkg/node/parquet_wasm.d.ts\"\n  ],\n  \"lightHighlightTheme\": \"github-light\",\n  \"tsconfig\": \"tsconfig.docs.json\",\n  \"out\": \"docs_build\",\n  \"excludePrivate\": true,\n  \"excludeProtected\": true,\n  \"excludeExternals\": true,\n  \"includeVersion\": true\n}\n"
  }
]