main f635cad80ea8 cached
2133 files
19.8 MB
5.3M tokens
4143 symbols
1 requests
Copy disabled (too large) Download .txt
Showing preview only (21,371K chars total). Download the full file to get everything.
Repository: apache/arrow-datafusion-comet
Branch: main
Commit: f635cad80ea8
Files: 2133
Total size: 19.8 MB

Directory structure:
gitextract_2d7o17rp/

├── .asf.yaml
├── .claude/
│   └── skills/
│       ├── audit-comet-expression/
│       │   └── SKILL.md
│       └── review-comet-pr/
│           └── SKILL.md
├── .dockerignore
├── .github/
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.yml
│   │   └── feature_request.yml
│   ├── actions/
│   │   ├── java-test/
│   │   │   └── action.yaml
│   │   ├── rust-test/
│   │   │   └── action.yaml
│   │   ├── setup-builder/
│   │   │   └── action.yaml
│   │   ├── setup-iceberg-builder/
│   │   │   └── action.yaml
│   │   ├── setup-macos-builder/
│   │   │   └── action.yaml
│   │   └── setup-spark-builder/
│   │       └── action.yaml
│   ├── dependabot.yml
│   ├── pull_request_template.md
│   └── workflows/
│       ├── codeql.yml
│       ├── docker-publish.yml
│       ├── docs.yaml
│       ├── iceberg_spark_test.yml
│       ├── label_new_issues.yml
│       ├── miri.yml
│       ├── pr_benchmark_check.yml
│       ├── pr_build_linux.yml
│       ├── pr_build_macos.yml
│       ├── pr_markdown_format.yml
│       ├── pr_missing_suites.yml
│       ├── pr_rat_check.yml
│       ├── pr_title_check.yml
│       ├── spark_sql_test.yml
│       ├── spark_sql_test_native_iceberg_compat.yml
│       ├── stale.yml
│       ├── take.yml
│       └── validate_workflows.yml
├── .gitignore
├── .mvn/
│   └── wrapper/
│       ├── maven-wrapper.jar
│       └── maven-wrapper.properties
├── .scalafix.conf
├── CHANGELOG.md
├── LICENSE.txt
├── Makefile
├── NOTICE.txt
├── README.md
├── benchmarks/
│   ├── Dockerfile
│   ├── README.md
│   ├── pyspark/
│   │   ├── README.md
│   │   ├── benchmarks/
│   │   │   ├── __init__.py
│   │   │   ├── base.py
│   │   │   └── shuffle.py
│   │   ├── generate_data.py
│   │   ├── run_all_benchmarks.sh
│   │   └── run_benchmark.py
│   └── tpc/
│       ├── .gitignore
│       ├── README.md
│       ├── create-iceberg-tables.py
│       ├── drop-caches.sh
│       ├── engines/
│       │   ├── comet-hashjoin.toml
│       │   ├── comet-iceberg-hashjoin.toml
│       │   ├── comet-iceberg.toml
│       │   ├── comet.toml
│       │   ├── gluten.toml
│       │   └── spark.toml
│       ├── generate-comparison.py
│       ├── infra/
│       │   └── docker/
│       │       ├── Dockerfile
│       │       ├── Dockerfile.build-comet
│       │       ├── docker-compose-laptop.yml
│       │       └── docker-compose.yml
│       ├── queries/
│       │   ├── tpcds/
│       │   │   ├── q1.sql
│       │   │   ├── q10.sql
│       │   │   ├── q11.sql
│       │   │   ├── q12.sql
│       │   │   ├── q13.sql
│       │   │   ├── q14.sql
│       │   │   ├── q15.sql
│       │   │   ├── q16.sql
│       │   │   ├── q17.sql
│       │   │   ├── q18.sql
│       │   │   ├── q19.sql
│       │   │   ├── q2.sql
│       │   │   ├── q20.sql
│       │   │   ├── q21.sql
│       │   │   ├── q22.sql
│       │   │   ├── q23.sql
│       │   │   ├── q24.sql
│       │   │   ├── q25.sql
│       │   │   ├── q26.sql
│       │   │   ├── q27.sql
│       │   │   ├── q28.sql
│       │   │   ├── q29.sql
│       │   │   ├── q3.sql
│       │   │   ├── q30.sql
│       │   │   ├── q31.sql
│       │   │   ├── q32.sql
│       │   │   ├── q33.sql
│       │   │   ├── q34.sql
│       │   │   ├── q35.sql
│       │   │   ├── q36.sql
│       │   │   ├── q37.sql
│       │   │   ├── q38.sql
│       │   │   ├── q39.sql
│       │   │   ├── q4.sql
│       │   │   ├── q40.sql
│       │   │   ├── q41.sql
│       │   │   ├── q42.sql
│       │   │   ├── q43.sql
│       │   │   ├── q44.sql
│       │   │   ├── q45.sql
│       │   │   ├── q46.sql
│       │   │   ├── q47.sql
│       │   │   ├── q48.sql
│       │   │   ├── q49.sql
│       │   │   ├── q5.sql
│       │   │   ├── q50.sql
│       │   │   ├── q51.sql
│       │   │   ├── q52.sql
│       │   │   ├── q53.sql
│       │   │   ├── q54.sql
│       │   │   ├── q55.sql
│       │   │   ├── q56.sql
│       │   │   ├── q57.sql
│       │   │   ├── q58.sql
│       │   │   ├── q59.sql
│       │   │   ├── q6.sql
│       │   │   ├── q60.sql
│       │   │   ├── q61.sql
│       │   │   ├── q62.sql
│       │   │   ├── q63.sql
│       │   │   ├── q64.sql
│       │   │   ├── q65.sql
│       │   │   ├── q66.sql
│       │   │   ├── q67.sql
│       │   │   ├── q68.sql
│       │   │   ├── q69.sql
│       │   │   ├── q7.sql
│       │   │   ├── q70.sql
│       │   │   ├── q71.sql
│       │   │   ├── q72.sql
│       │   │   ├── q73.sql
│       │   │   ├── q74.sql
│       │   │   ├── q75.sql
│       │   │   ├── q76.sql
│       │   │   ├── q77.sql
│       │   │   ├── q78.sql
│       │   │   ├── q79.sql
│       │   │   ├── q8.sql
│       │   │   ├── q80.sql
│       │   │   ├── q81.sql
│       │   │   ├── q82.sql
│       │   │   ├── q83.sql
│       │   │   ├── q84.sql
│       │   │   ├── q85.sql
│       │   │   ├── q86.sql
│       │   │   ├── q87.sql
│       │   │   ├── q88.sql
│       │   │   ├── q89.sql
│       │   │   ├── q9.sql
│       │   │   ├── q90.sql
│       │   │   ├── q91.sql
│       │   │   ├── q92.sql
│       │   │   ├── q93.sql
│       │   │   ├── q94.sql
│       │   │   ├── q95.sql
│       │   │   ├── q96.sql
│       │   │   ├── q97.sql
│       │   │   ├── q98.sql
│       │   │   └── q99.sql
│       │   └── tpch/
│       │       ├── q1.sql
│       │       ├── q10.sql
│       │       ├── q11.sql
│       │       ├── q12.sql
│       │       ├── q13.sql
│       │       ├── q14.sql
│       │       ├── q15.sql
│       │       ├── q16.sql
│       │       ├── q17.sql
│       │       ├── q18.sql
│       │       ├── q19.sql
│       │       ├── q2.sql
│       │       ├── q20.sql
│       │       ├── q21.sql
│       │       ├── q22.sql
│       │       ├── q3.sql
│       │       ├── q4.sql
│       │       ├── q5.sql
│       │       ├── q6.sql
│       │       ├── q7.sql
│       │       ├── q8.sql
│       │       └── q9.sql
│       ├── run.py
│       └── tpcbench.py
├── common/
│   ├── pom.xml
│   └── src/
│       ├── main/
│       │   ├── java/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── arrow/
│       │   │           │   └── c/
│       │   │           │       ├── AbstractCometSchemaImporter.java
│       │   │           │       └── ArrowImporter.java
│       │   │           └── comet/
│       │   │               ├── CometNativeException.java
│       │   │               ├── CometOutOfMemoryError.java
│       │   │               ├── CometRuntimeException.java
│       │   │               ├── CometSchemaImporter.java
│       │   │               ├── IcebergApi.java
│       │   │               ├── NativeBase.java
│       │   │               ├── ParquetRuntimeException.java
│       │   │               ├── exceptions/
│       │   │               │   └── CometQueryExecutionException.java
│       │   │               ├── parquet/
│       │   │               │   ├── AbstractColumnReader.java
│       │   │               │   ├── ArrowConstantColumnReader.java
│       │   │               │   ├── ArrowRowIndexColumnReader.java
│       │   │               │   ├── BloomFilterReader.java
│       │   │               │   ├── ColumnIndexReader.java
│       │   │               │   ├── ColumnPageReader.java
│       │   │               │   ├── ColumnReader.java
│       │   │               │   ├── CometFileKeyUnwrapper.java
│       │   │               │   ├── CometInputFile.java
│       │   │               │   ├── DictionaryPageReader.java
│       │   │               │   ├── FileReader.java
│       │   │               │   ├── FooterReader.java
│       │   │               │   ├── IcebergCometNativeBatchReader.java
│       │   │               │   ├── IndexFilter.java
│       │   │               │   ├── LazyColumnReader.java
│       │   │               │   ├── Native.java
│       │   │               │   ├── NativeBatchReader.java
│       │   │               │   ├── NativeColumnReader.java
│       │   │               │   ├── ParquetColumnSpec.java
│       │   │               │   ├── ParquetMetadataSerializer.java
│       │   │               │   ├── ReadOptions.java
│       │   │               │   ├── RowGroupFilter.java
│       │   │               │   ├── RowGroupReader.java
│       │   │               │   ├── TypeUtil.java
│       │   │               │   ├── Utils.java
│       │   │               │   ├── WrappedInputFile.java
│       │   │               │   └── WrappedSeekableInputStream.java
│       │   │               └── vector/
│       │   │                   ├── CometDecodedVector.java
│       │   │                   ├── CometDelegateVector.java
│       │   │                   ├── CometDictionary.java
│       │   │                   ├── CometDictionaryVector.java
│       │   │                   ├── CometLazyVector.java
│       │   │                   ├── CometListVector.java
│       │   │                   ├── CometMapVector.java
│       │   │                   ├── CometPlainVector.java
│       │   │                   ├── CometSelectionVector.java
│       │   │                   ├── CometStructVector.java
│       │   │                   └── CometVector.java
│       │   ├── resources/
│       │   │   └── log4j2.properties
│       │   ├── scala/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   ├── CometConf.scala
│       │   │           │   ├── Constants.scala
│       │   │           │   ├── objectstore/
│       │   │           │   │   └── NativeConfig.scala
│       │   │           │   ├── package.scala
│       │   │           │   ├── parquet/
│       │   │           │   │   ├── CometParquetUtils.scala
│       │   │           │   │   └── CometReaderThreadPool.scala
│       │   │           │   └── vector/
│       │   │           │       ├── NativeUtil.scala
│       │   │           │       └── StreamReader.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       ├── CastOverflowException.scala
│       │   │                       ├── execution/
│       │   │                       │   └── arrow/
│       │   │                       │       ├── ArrowReaderIterator.scala
│       │   │                       │       ├── ArrowWriters.scala
│       │   │                       │       └── CometArrowConverters.scala
│       │   │                       ├── parquet/
│       │   │                       │   ├── CometParquetReadSupport.scala
│       │   │                       │   └── CometSparkToParquetSchemaConverter.scala
│       │   │                       └── util/
│       │   │                           └── Utils.scala
│       │   ├── spark-3.4/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── ShimBatchReader.scala
│       │   │           │       └── ShimFileFormat.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           └── ShimTaskMetrics.scala
│       │   ├── spark-3.5/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── ShimBatchReader.scala
│       │   │           │       └── ShimFileFormat.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           └── ShimTaskMetrics.scala
│       │   ├── spark-3.x/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           └── comet/
│       │   │               └── shims/
│       │   │                   ├── CometTypeShim.scala
│       │   │                   └── ShimCometConf.scala
│       │   └── spark-4.0/
│       │       └── org/
│       │           └── apache/
│       │               ├── comet/
│       │               │   └── shims/
│       │               │       ├── CometTypeShim.scala
│       │               │       ├── ShimBatchReader.scala
│       │               │       ├── ShimCometConf.scala
│       │               │       └── ShimFileFormat.scala
│       │               └── spark/
│       │                   └── sql/
│       │                       └── comet/
│       │                           └── shims/
│       │                               └── ShimTaskMetrics.scala
│       └── test/
│           ├── java/
│           │   └── org/
│           │       └── apache/
│           │           └── comet/
│           │               └── parquet/
│           │                   ├── TestColumnReader.java
│           │                   ├── TestCometInputFile.java
│           │                   ├── TestFileReader.java
│           │                   └── TestUtils.java
│           └── resources/
│               ├── log4j.properties
│               └── log4j2.properties
├── conf/
│   └── log4rs.yaml
├── dev/
│   ├── cargo.config
│   ├── changelog/
│   │   ├── 0.1.0.md
│   │   ├── 0.10.0.md
│   │   ├── 0.11.0.md
│   │   ├── 0.12.0.md
│   │   ├── 0.13.0.md
│   │   ├── 0.14.0.md
│   │   ├── 0.14.1.md
│   │   ├── 0.2.0.md
│   │   ├── 0.3.0.md
│   │   ├── 0.4.0.md
│   │   ├── 0.5.0.md
│   │   ├── 0.6.0.md
│   │   ├── 0.7.0.md
│   │   ├── 0.8.0.md
│   │   ├── 0.9.0.md
│   │   └── 0.9.1.md
│   ├── checkstyle-suppressions.xml
│   ├── ci/
│   │   ├── check-suites.py
│   │   └── check-working-tree-clean.sh
│   ├── copyright/
│   │   └── java-header.txt
│   ├── diffs/
│   │   ├── 3.4.3.diff
│   │   ├── 3.5.8.diff
│   │   ├── 4.0.1.diff
│   │   └── iceberg/
│   │       ├── 1.10.0.diff
│   │       ├── 1.8.1.diff
│   │       └── 1.9.1.diff
│   ├── ensure-jars-have-correct-contents.sh
│   ├── generate-release-docs.sh
│   ├── regenerate-golden-files.sh
│   ├── release/
│   │   ├── build-release-comet.sh
│   │   ├── check-rat-report.py
│   │   ├── comet-rm/
│   │   │   ├── Dockerfile
│   │   │   └── build-comet-native-libs.sh
│   │   ├── create-tarball.sh
│   │   ├── generate-changelog.py
│   │   ├── publish-to-maven.sh
│   │   ├── rat_exclude_files.txt
│   │   ├── release-tarball.sh
│   │   ├── requirements.txt
│   │   ├── run-rat.sh
│   │   ├── verify-release-candidate.sh
│   │   └── verifying-release-candidates.md
│   └── scalastyle-config.xml
├── docs/
│   ├── .gitignore
│   ├── Makefile
│   ├── README.md
│   ├── build.sh
│   ├── generate-versions.py
│   ├── make.bat
│   ├── requirements.txt
│   ├── source/
│   │   ├── _static/
│   │   │   └── theme_overrides.css
│   │   ├── _templates/
│   │   │   ├── docs-sidebar.html
│   │   │   └── layout.html
│   │   ├── about/
│   │   │   ├── gluten_comparison.md
│   │   │   └── index.md
│   │   ├── asf/
│   │   │   └── index.md
│   │   ├── conf.py
│   │   ├── contributor-guide/
│   │   │   ├── adding_a_new_expression.md
│   │   │   ├── adding_a_new_operator.md
│   │   │   ├── benchmark-results/
│   │   │   │   ├── blaze-0.5.0-tpcds.json
│   │   │   │   ├── blaze-0.5.0-tpch.json
│   │   │   │   ├── gluten-1.4.0-tpcds.json
│   │   │   │   ├── gluten-1.4.0-tpch.json
│   │   │   │   ├── spark-3.5.3-tpcds.json
│   │   │   │   ├── spark-3.5.3-tpch.json
│   │   │   │   ├── tpc-ds.md
│   │   │   │   └── tpc-h.md
│   │   │   ├── benchmarking.md
│   │   │   ├── benchmarking_aws_ec2.md
│   │   │   ├── benchmarking_macos.md
│   │   │   ├── benchmarking_spark_sql_perf.md
│   │   │   ├── bug_triage.md
│   │   │   ├── contributing.md
│   │   │   ├── debugging.md
│   │   │   ├── development.md
│   │   │   ├── expression-audit-log.md
│   │   │   ├── ffi.md
│   │   │   ├── iceberg-spark-tests.md
│   │   │   ├── index.md
│   │   │   ├── jvm_shuffle.md
│   │   │   ├── native_shuffle.md
│   │   │   ├── parquet_scans.md
│   │   │   ├── plugin_overview.md
│   │   │   ├── profiling.md
│   │   │   ├── release_process.md
│   │   │   ├── roadmap.md
│   │   │   ├── spark-sql-tests.md
│   │   │   ├── sql-file-tests.md
│   │   │   ├── sql_error_propagation.md
│   │   │   └── tracing.md
│   │   ├── index.md
│   │   └── user-guide/
│   │       ├── index.md
│   │       └── latest/
│   │           ├── compatibility.md
│   │           ├── configs.md
│   │           ├── datasources.md
│   │           ├── datatypes.md
│   │           ├── expressions.md
│   │           ├── iceberg.md
│   │           ├── index.rst
│   │           ├── installation.md
│   │           ├── kubernetes.md
│   │           ├── metrics.md
│   │           ├── operators.md
│   │           ├── source.md
│   │           └── tuning.md
│   └── spark_expressions_support.md
├── fuzz-testing/
│   ├── .gitignore
│   ├── README.md
│   ├── pom.xml
│   ├── run.sh
│   └── src/
│       └── main/
│           └── scala/
│               └── org/
│                   └── apache/
│                       └── comet/
│                           └── fuzz/
│                               ├── ComparisonTool.scala
│                               ├── Main.scala
│                               ├── Meta.scala
│                               ├── QueryGen.scala
│                               ├── QueryRunner.scala
│                               └── Utils.scala
├── kube/
│   ├── Dockerfile
│   └── local/
│       ├── hadoop.env
│       └── hdfs-docker-compose.yml
├── mvnw
├── mvnw.cmd
├── native/
│   ├── Cargo.toml
│   ├── README.md
│   ├── common/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       ├── bin/
│   │       │   └── analyze_trace.rs
│   │       ├── error.rs
│   │       ├── lib.rs
│   │       ├── query_context.rs
│   │       ├── tracing.rs
│   │       └── utils.rs
│   ├── core/
│   │   ├── Cargo.toml
│   │   ├── benches/
│   │   │   ├── array_element_append.rs
│   │   │   ├── bit_util.rs
│   │   │   ├── common.rs
│   │   │   ├── parquet_decode.rs
│   │   │   ├── parquet_read.rs
│   │   │   └── perf.rs
│   │   └── src/
│   │       ├── common/
│   │       │   ├── bit.rs
│   │       │   ├── buffer.rs
│   │       │   └── mod.rs
│   │       ├── execution/
│   │       │   ├── columnar_to_row.rs
│   │       │   ├── expressions/
│   │       │   │   ├── arithmetic.rs
│   │       │   │   ├── bitwise.rs
│   │       │   │   ├── comparison.rs
│   │       │   │   ├── logical.rs
│   │       │   │   ├── mod.rs
│   │       │   │   ├── nullcheck.rs
│   │       │   │   ├── partition.rs
│   │       │   │   ├── random.rs
│   │       │   │   ├── strings.rs
│   │       │   │   ├── subquery.rs
│   │       │   │   └── temporal.rs
│   │       │   ├── jni_api.rs
│   │       │   ├── memory_pools/
│   │       │   │   ├── config.rs
│   │       │   │   ├── fair_pool.rs
│   │       │   │   ├── logging_pool.rs
│   │       │   │   ├── mod.rs
│   │       │   │   ├── task_shared.rs
│   │       │   │   └── unified_pool.rs
│   │       │   ├── metrics/
│   │       │   │   ├── mod.rs
│   │       │   │   └── utils.rs
│   │       │   ├── mod.rs
│   │       │   ├── operators/
│   │       │   │   ├── copy.rs
│   │       │   │   ├── csv_scan.rs
│   │       │   │   ├── expand.rs
│   │       │   │   ├── iceberg_scan.rs
│   │       │   │   ├── mod.rs
│   │       │   │   ├── parquet_writer.rs
│   │       │   │   ├── projection.rs
│   │       │   │   ├── scan.rs
│   │       │   │   └── shuffle_scan.rs
│   │       │   ├── planner/
│   │       │   │   ├── expression_registry.rs
│   │       │   │   ├── macros.rs
│   │       │   │   └── operator_registry.rs
│   │       │   ├── planner.rs
│   │       │   ├── serde.rs
│   │       │   ├── sort.rs
│   │       │   ├── spark_config.rs
│   │       │   ├── spark_plan.rs
│   │       │   ├── tracing.rs
│   │       │   └── utils.rs
│   │       ├── lib.rs
│   │       └── parquet/
│   │           ├── cast_column.rs
│   │           ├── data_type.rs
│   │           ├── encryption_support.rs
│   │           ├── mod.rs
│   │           ├── mutable_vector.rs
│   │           ├── objectstore/
│   │           │   ├── mod.rs
│   │           │   └── s3.rs
│   │           ├── parquet_exec.rs
│   │           ├── parquet_read_cached_factory.rs
│   │           ├── parquet_support.rs
│   │           ├── read/
│   │           │   ├── column.rs
│   │           │   ├── levels.rs
│   │           │   ├── mod.rs
│   │           │   └── values.rs
│   │           ├── schema_adapter.rs
│   │           └── util/
│   │               ├── bit_packing.rs
│   │               ├── buffer.rs
│   │               ├── jni.rs
│   │               ├── memory.rs
│   │               ├── mod.rs
│   │               └── test_common/
│   │                   ├── mod.rs
│   │                   ├── page_util.rs
│   │                   └── rand_gen.rs
│   ├── fs-hdfs/
│   │   ├── Cargo.toml
│   │   ├── LICENSE.txt
│   │   ├── README.md
│   │   ├── build.rs
│   │   ├── c_src/
│   │   │   ├── libhdfs/
│   │   │   │   ├── config.h
│   │   │   │   ├── exception.c
│   │   │   │   ├── exception.h
│   │   │   │   ├── hdfs.c
│   │   │   │   ├── hdfs.h
│   │   │   │   ├── htable.c
│   │   │   │   ├── htable.h
│   │   │   │   ├── jni_helper.c
│   │   │   │   ├── jni_helper.h
│   │   │   │   └── os/
│   │   │   │       ├── mutexes.h
│   │   │   │       ├── posix/
│   │   │   │       │   ├── mutexes.c
│   │   │   │       │   ├── platform.h
│   │   │   │       │   ├── thread.c
│   │   │   │       │   └── thread_local_storage.c
│   │   │   │       ├── thread.h
│   │   │   │       └── thread_local_storage.h
│   │   │   ├── libminidfs/
│   │   │   │   ├── native_mini_dfs.c
│   │   │   │   └── native_mini_dfs.h
│   │   │   └── wrapper.h
│   │   ├── rustfmt.toml
│   │   └── src/
│   │       ├── err.rs
│   │       ├── hdfs.rs
│   │       ├── lib.rs
│   │       ├── minidfs.rs
│   │       ├── native.rs
│   │       ├── util.rs
│   │       └── walkdir/
│   │           ├── mod.rs
│   │           └── tree_iter.rs
│   ├── hdfs/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       ├── lib.rs
│   │       └── object_store/
│   │           ├── hdfs.rs
│   │           └── mod.rs
│   ├── jni-bridge/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   ├── batch_iterator.rs
│   │   │   ├── comet_exec.rs
│   │   │   ├── comet_metric_node.rs
│   │   │   ├── comet_task_memory_manager.rs
│   │   │   ├── errors.rs
│   │   │   ├── lib.rs
│   │   │   └── shuffle_block_iterator.rs
│   │   └── testdata/
│   │       ├── backtrace.txt
│   │       └── stacktrace.txt
│   ├── proto/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── build.rs
│   │   └── src/
│   │       ├── lib.rs
│   │       └── proto/
│   │           ├── config.proto
│   │           ├── expr.proto
│   │           ├── literal.proto
│   │           ├── metric.proto
│   │           ├── operator.proto
│   │           ├── partitioning.proto
│   │           └── types.proto
│   ├── rustfmt.toml
│   ├── shuffle/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── benches/
│   │   │   ├── row_columnar.rs
│   │   │   └── shuffle_writer.rs
│   │   └── src/
│   │       ├── bin/
│   │       │   └── shuffle_bench.rs
│   │       ├── comet_partitioning.rs
│   │       ├── ipc.rs
│   │       ├── lib.rs
│   │       ├── metrics.rs
│   │       ├── partitioners/
│   │       │   ├── empty_schema.rs
│   │       │   ├── mod.rs
│   │       │   ├── multi_partition.rs
│   │       │   ├── partitioned_batch_iterator.rs
│   │       │   ├── single_partition.rs
│   │       │   └── traits.rs
│   │       ├── shuffle_writer.rs
│   │       ├── spark_crc32c_hasher.rs
│   │       ├── spark_unsafe/
│   │       │   ├── list.rs
│   │       │   ├── map.rs
│   │       │   ├── mod.rs
│   │       │   ├── row.rs
│   │       │   └── unsafe_object.rs
│   │       └── writers/
│   │           ├── buf_batch_writer.rs
│   │           ├── checksum.rs
│   │           ├── mod.rs
│   │           ├── shuffle_block_writer.rs
│   │           └── spill.rs
│   └── spark-expr/
│       ├── Cargo.toml
│       ├── README.md
│       ├── benches/
│       │   ├── aggregate.rs
│       │   ├── bloom_filter_agg.rs
│       │   ├── cast_from_boolean.rs
│       │   ├── cast_from_string.rs
│       │   ├── cast_int_to_timestamp.rs
│       │   ├── cast_non_int_numeric_timestamp.rs
│       │   ├── cast_numeric.rs
│       │   ├── conditional.rs
│       │   ├── date_trunc.rs
│       │   ├── decimal_div.rs
│       │   ├── normalize_nan.rs
│       │   ├── padding.rs
│       │   ├── to_csv.rs
│       │   └── wide_decimal.rs
│       ├── src/
│       │   ├── agg_funcs/
│       │   │   ├── avg.rs
│       │   │   ├── avg_decimal.rs
│       │   │   ├── correlation.rs
│       │   │   ├── covariance.rs
│       │   │   ├── mod.rs
│       │   │   ├── stddev.rs
│       │   │   ├── sum_decimal.rs
│       │   │   ├── sum_int.rs
│       │   │   └── variance.rs
│       │   ├── array_funcs/
│       │   │   ├── array_compact.rs
│       │   │   ├── array_insert.rs
│       │   │   ├── get_array_struct_fields.rs
│       │   │   ├── list_extract.rs
│       │   │   ├── mod.rs
│       │   │   └── size.rs
│       │   ├── bloom_filter/
│       │   │   ├── bit.rs
│       │   │   ├── bloom_filter_agg.rs
│       │   │   ├── bloom_filter_might_contain.rs
│       │   │   ├── mod.rs
│       │   │   ├── spark_bit_array.rs
│       │   │   └── spark_bloom_filter.rs
│       │   ├── comet_scalar_funcs.rs
│       │   ├── conditional_funcs/
│       │   │   ├── if_expr.rs
│       │   │   └── mod.rs
│       │   ├── conversion_funcs/
│       │   │   ├── boolean.rs
│       │   │   ├── cast.rs
│       │   │   ├── mod.rs
│       │   │   ├── numeric.rs
│       │   │   ├── string.rs
│       │   │   ├── temporal.rs
│       │   │   └── utils.rs
│       │   ├── csv_funcs/
│       │   │   ├── csv_write_options.rs
│       │   │   ├── mod.rs
│       │   │   └── to_csv.rs
│       │   ├── datetime_funcs/
│       │   │   ├── date_diff.rs
│       │   │   ├── date_from_unix_date.rs
│       │   │   ├── date_trunc.rs
│       │   │   ├── extract_date_part.rs
│       │   │   ├── hours.rs
│       │   │   ├── make_date.rs
│       │   │   ├── mod.rs
│       │   │   ├── timestamp_trunc.rs
│       │   │   └── unix_timestamp.rs
│       │   ├── error.rs
│       │   ├── hash_funcs/
│       │   │   ├── mod.rs
│       │   │   ├── murmur3.rs
│       │   │   ├── utils.rs
│       │   │   └── xxhash64.rs
│       │   ├── json_funcs/
│       │   │   ├── from_json.rs
│       │   │   ├── mod.rs
│       │   │   └── to_json.rs
│       │   ├── kernels/
│       │   │   ├── mod.rs
│       │   │   ├── strings.rs
│       │   │   └── temporal.rs
│       │   ├── lib.rs
│       │   ├── math_funcs/
│       │   │   ├── abs.rs
│       │   │   ├── ceil.rs
│       │   │   ├── checked_arithmetic.rs
│       │   │   ├── div.rs
│       │   │   ├── floor.rs
│       │   │   ├── internal/
│       │   │   │   ├── checkoverflow.rs
│       │   │   │   ├── decimal_rescale_check.rs
│       │   │   │   ├── make_decimal.rs
│       │   │   │   ├── mod.rs
│       │   │   │   ├── normalize_nan.rs
│       │   │   │   └── unscaled_value.rs
│       │   │   ├── log.rs
│       │   │   ├── mod.rs
│       │   │   ├── modulo_expr.rs
│       │   │   ├── negative.rs
│       │   │   ├── round.rs
│       │   │   ├── unhex.rs
│       │   │   ├── utils.rs
│       │   │   └── wide_decimal_binary_expr.rs
│       │   ├── nondetermenistic_funcs/
│       │   │   ├── internal/
│       │   │   │   ├── mod.rs
│       │   │   │   └── rand_utils.rs
│       │   │   ├── mod.rs
│       │   │   ├── monotonically_increasing_id.rs
│       │   │   ├── rand.rs
│       │   │   └── randn.rs
│       │   ├── predicate_funcs/
│       │   │   ├── is_nan.rs
│       │   │   ├── mod.rs
│       │   │   └── rlike.rs
│       │   ├── query_context.rs
│       │   ├── static_invoke/
│       │   │   ├── char_varchar_utils/
│       │   │   │   ├── mod.rs
│       │   │   │   └── read_side_padding.rs
│       │   │   └── mod.rs
│       │   ├── string_funcs/
│       │   │   ├── contains.rs
│       │   │   ├── get_json_object.rs
│       │   │   ├── mod.rs
│       │   │   ├── split.rs
│       │   │   └── substring.rs
│       │   ├── struct_funcs/
│       │   │   ├── create_named_struct.rs
│       │   │   ├── get_struct_field.rs
│       │   │   └── mod.rs
│       │   ├── test_common/
│       │   │   ├── file_util.rs
│       │   │   └── mod.rs
│       │   ├── timezone.rs
│       │   ├── unbound.rs
│       │   └── utils.rs
│       └── tests/
│           └── spark_expr_reg.rs
├── pom.xml
├── rust-toolchain.toml
├── scalafmt.conf
├── spark/
│   ├── README.md
│   ├── inspections/
│   │   ├── CometTPCDSQueriesList-results.txt
│   │   └── CometTPCHQueriesList-results.txt
│   ├── pom.xml
│   └── src/
│       ├── main/
│       │   ├── java/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   ├── CometBatchIterator.java
│       │   │           │   ├── CometShuffleBlockIterator.java
│       │   │           │   └── NativeColumnarToRowInfo.java
│       │   │           ├── parquet/
│       │   │           │   └── filter2/
│       │   │           │       └── predicate/
│       │   │           │           └── SparkFilterApi.java
│       │   │           └── spark/
│       │   │               ├── CometTaskMemoryManager.java
│       │   │               ├── shuffle/
│       │   │               │   ├── comet/
│       │   │               │   │   ├── CometBoundedShuffleMemoryAllocator.java
│       │   │               │   │   ├── CometShuffleChecksumSupport.java
│       │   │               │   │   ├── CometShuffleMemoryAllocator.java
│       │   │               │   │   ├── CometShuffleMemoryAllocatorTrait.java
│       │   │               │   │   ├── CometUnifiedShuffleMemoryAllocator.java
│       │   │               │   │   └── TooLargePageException.java
│       │   │               │   └── sort/
│       │   │               │       ├── CometShuffleExternalSorter.java
│       │   │               │       ├── CometShuffleExternalSorterAsync.java
│       │   │               │       ├── CometShuffleExternalSorterSync.java
│       │   │               │       └── SpillSorter.java
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       ├── CometScalarSubquery.java
│       │   │                       └── execution/
│       │   │                           └── shuffle/
│       │   │                               ├── CometBypassMergeSortShuffleWriter.java
│       │   │                               ├── CometDiskBlockWriter.java
│       │   │                               ├── CometUnsafeShuffleWriter.java
│       │   │                               ├── ExposedByteArrayOutputStream.java
│       │   │                               ├── ShuffleThreadPool.java
│       │   │                               ├── SpillInfo.java
│       │   │                               └── SpillWriter.java
│       │   ├── resources/
│       │   │   └── log4j2.properties
│       │   ├── scala/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   ├── CometExecIterator.scala
│       │   │           │   ├── CometFallback.scala
│       │   │           │   ├── CometMetricsListener.scala
│       │   │           │   ├── CometSparkSessionExtensions.scala
│       │   │           │   ├── DataTypeSupport.scala
│       │   │           │   ├── ExtendedExplainInfo.scala
│       │   │           │   ├── GenerateDocs.scala
│       │   │           │   ├── MetricsSupport.scala
│       │   │           │   ├── Native.scala
│       │   │           │   ├── NativeColumnarToRowConverter.scala
│       │   │           │   ├── SparkErrorConverter.scala
│       │   │           │   ├── Tracing.scala
│       │   │           │   ├── expressions/
│       │   │           │   │   ├── CometCast.scala
│       │   │           │   │   ├── CometEvalMode.scala
│       │   │           │   │   └── RegExp.scala
│       │   │           │   ├── iceberg/
│       │   │           │   │   └── IcebergReflection.scala
│       │   │           │   ├── parquet/
│       │   │           │   │   ├── CometParquetFileFormat.scala
│       │   │           │   │   ├── ParquetFilters.scala
│       │   │           │   │   └── SourceFilterSerde.scala
│       │   │           │   ├── rules/
│       │   │           │   │   ├── CometExecRule.scala
│       │   │           │   │   ├── CometScanRule.scala
│       │   │           │   │   ├── EliminateRedundantTransitions.scala
│       │   │           │   │   └── RewriteJoin.scala
│       │   │           │   ├── serde/
│       │   │           │   │   ├── CometAggregateExpressionSerde.scala
│       │   │           │   │   ├── CometBloomFilterMightContain.scala
│       │   │           │   │   ├── CometExpressionSerde.scala
│       │   │           │   │   ├── CometOperatorSerde.scala
│       │   │           │   │   ├── CometScalarFunction.scala
│       │   │           │   │   ├── CometScalarSubquery.scala
│       │   │           │   │   ├── CometSortOrder.scala
│       │   │           │   │   ├── QueryPlanSerde.scala
│       │   │           │   │   ├── SupportLevel.scala
│       │   │           │   │   ├── aggregates.scala
│       │   │           │   │   ├── arithmetic.scala
│       │   │           │   │   ├── arrays.scala
│       │   │           │   │   ├── bitwise.scala
│       │   │           │   │   ├── collectionOperations.scala
│       │   │           │   │   ├── conditional.scala
│       │   │           │   │   ├── contraintExpressions.scala
│       │   │           │   │   ├── datetime.scala
│       │   │           │   │   ├── decimalExpressions.scala
│       │   │           │   │   ├── hash.scala
│       │   │           │   │   ├── literals.scala
│       │   │           │   │   ├── maps.scala
│       │   │           │   │   ├── math.scala
│       │   │           │   │   ├── namedExpressions.scala
│       │   │           │   │   ├── nondetermenistic.scala
│       │   │           │   │   ├── operator/
│       │   │           │   │   │   ├── CometDataWritingCommand.scala
│       │   │           │   │   │   ├── CometIcebergNativeScan.scala
│       │   │           │   │   │   ├── CometNativeScan.scala
│       │   │           │   │   │   ├── CometSink.scala
│       │   │           │   │   │   └── package.scala
│       │   │           │   │   ├── predicates.scala
│       │   │           │   │   ├── statics.scala
│       │   │           │   │   ├── strings.scala
│       │   │           │   │   ├── structs.scala
│       │   │           │   │   └── unixtime.scala
│       │   │           │   └── testing/
│       │   │           │       ├── FuzzDataGenerator.scala
│       │   │           │       └── ParquetGenerator.scala
│       │   │           └── spark/
│       │   │               ├── CometSource.scala
│       │   │               ├── Plugins.scala
│       │   │               ├── shuffle/
│       │   │               │   └── sort/
│       │   │               │       └── RowPartition.scala
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       ├── CometBatchScanExec.scala
│       │   │                       ├── CometBroadcastExchangeExec.scala
│       │   │                       ├── CometCoalesceExec.scala
│       │   │                       ├── CometCollectLimitExec.scala
│       │   │                       ├── CometColumnarToRowExec.scala
│       │   │                       ├── CometCsvNativeScanExec.scala
│       │   │                       ├── CometExecRDD.scala
│       │   │                       ├── CometExecUtils.scala
│       │   │                       ├── CometIcebergNativeScanExec.scala
│       │   │                       ├── CometLocalTableScanExec.scala
│       │   │                       ├── CometMetricNode.scala
│       │   │                       ├── CometNativeColumnarToRowExec.scala
│       │   │                       ├── CometNativeScanExec.scala
│       │   │                       ├── CometNativeWriteExec.scala
│       │   │                       ├── CometPlan.scala
│       │   │                       ├── CometScanExec.scala
│       │   │                       ├── CometScanUtils.scala
│       │   │                       ├── CometSparkToColumnarExec.scala
│       │   │                       ├── CometTakeOrderedAndProjectExec.scala
│       │   │                       ├── CometWindowExec.scala
│       │   │                       ├── DecimalPrecision.scala
│       │   │                       ├── execution/
│       │   │                       │   └── shuffle/
│       │   │                       │       ├── CometBlockStoreShuffleReader.scala
│       │   │                       │       ├── CometNativeShuffleWriter.scala
│       │   │                       │       ├── CometShuffleDependency.scala
│       │   │                       │       ├── CometShuffleExchangeExec.scala
│       │   │                       │       ├── CometShuffleManager.scala
│       │   │                       │       ├── CometShuffledRowRDD.scala
│       │   │                       │       └── NativeBatchDecoderIterator.scala
│       │   │                       ├── operators.scala
│       │   │                       └── plans/
│       │   │                           └── AliasAwareOutputExpression.scala
│       │   ├── spark-3.4/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── CometExprShim.scala
│       │   │           │       ├── ShimCometBroadcastExchangeExec.scala
│       │   │           │       ├── ShimSQLConf.scala
│       │   │           │       └── ShimSubqueryBroadcast.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           ├── ShimCometScanExec.scala
│       │   │                           └── ShimSparkErrorConverter.scala
│       │   ├── spark-3.5/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── CometExprShim.scala
│       │   │           │       ├── ShimCometBroadcastExchangeExec.scala
│       │   │           │       ├── ShimSQLConf.scala
│       │   │           │       └── ShimSubqueryBroadcast.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           ├── ShimCometScanExec.scala
│       │   │                           └── ShimSparkErrorConverter.scala
│       │   ├── spark-3.x/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── ShimCometShuffleExchangeExec.scala
│       │   │           │       └── ShimCometSparkSessionExtensions.scala
│       │   │           └── spark/
│       │   │               ├── comet/
│       │   │               │   └── shims/
│       │   │               │       └── ShimCometDriverPlugin.scala
│       │   │               └── sql/
│       │   │                   ├── ExtendedExplainGenerator.scala
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           ├── ShimCometShuffleWriteProcessor.scala
│       │   │                           └── ShimStreamSourceAwareSparkPlan.scala
│       │   └── spark-4.0/
│       │       └── org/
│       │           └── apache/
│       │               ├── comet/
│       │               │   └── shims/
│       │               │       ├── CometExprShim.scala
│       │               │       ├── ShimCometBroadcastExchangeExec.scala
│       │               │       ├── ShimCometShuffleExchangeExec.scala
│       │               │       ├── ShimCometSparkSessionExtensions.scala
│       │               │       ├── ShimSQLConf.scala
│       │               │       └── ShimSubqueryBroadcast.scala
│       │               └── spark/
│       │                   ├── comet/
│       │                   │   └── shims/
│       │                   │       └── ShimCometDriverPlugin.scala
│       │                   └── sql/
│       │                       └── comet/
│       │                           └── shims/
│       │                               ├── ShimCometScanExec.scala
│       │                               ├── ShimCometShuffleWriteProcessor.scala
│       │                               ├── ShimSparkErrorConverter.scala
│       │                               └── ShimStreamSourceAwareSparkPlan.scala
│       └── test/
│           ├── java/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   ├── IntegrationTestSuite.java
│           │           │   └── hadoop/
│           │           │       └── fs/
│           │           │           └── FakeHDFSFileSystem.java
│           │           └── iceberg/
│           │               └── rest/
│           │                   └── RESTCatalogAdapter.java
│           ├── resources/
│           │   ├── log4j.properties
│           │   ├── log4j2.properties
│           │   ├── sql-tests/
│           │   │   └── expressions/
│           │   │       ├── aggregate/
│           │   │       │   ├── aggregate_filter.sql
│           │   │       │   ├── avg.sql
│           │   │       │   ├── bit_agg.sql
│           │   │       │   ├── corr.sql
│           │   │       │   ├── count.sql
│           │   │       │   ├── covariance.sql
│           │   │       │   ├── first_last.sql
│           │   │       │   ├── min_max.sql
│           │   │       │   ├── stddev.sql
│           │   │       │   ├── sum.sql
│           │   │       │   └── variance.sql
│           │   │       ├── array/
│           │   │       │   ├── array_append.sql
│           │   │       │   ├── array_compact.sql
│           │   │       │   ├── array_concat.sql
│           │   │       │   ├── array_contains.sql
│           │   │       │   ├── array_distinct.sql
│           │   │       │   ├── array_except.sql
│           │   │       │   ├── array_filter.sql
│           │   │       │   ├── array_insert.sql
│           │   │       │   ├── array_insert_legacy.sql
│           │   │       │   ├── array_intersect.sql
│           │   │       │   ├── array_join.sql
│           │   │       │   ├── array_max.sql
│           │   │       │   ├── array_min.sql
│           │   │       │   ├── array_remove.sql
│           │   │       │   ├── array_repeat.sql
│           │   │       │   ├── array_union.sql
│           │   │       │   ├── arrays_overlap.sql
│           │   │       │   ├── create_array.sql
│           │   │       │   ├── element_at.sql
│           │   │       │   ├── element_at_ansi.sql
│           │   │       │   ├── flatten.sql
│           │   │       │   ├── get_array_item.sql
│           │   │       │   ├── get_array_item_ansi.sql
│           │   │       │   ├── get_array_struct_fields.sql
│           │   │       │   ├── size.sql
│           │   │       │   └── sort_array.sql
│           │   │       ├── bitwise/
│           │   │       │   └── bitwise.sql
│           │   │       ├── cast/
│           │   │       │   ├── cast.sql
│           │   │       │   ├── cast_decimal_to_primitive.sql
│           │   │       │   └── cast_double_to_string.sql
│           │   │       ├── conditional/
│           │   │       │   ├── boolean.sql
│           │   │       │   ├── case_when.sql
│           │   │       │   ├── coalesce.sql
│           │   │       │   ├── if_expr.sql
│           │   │       │   ├── in_set.sql
│           │   │       │   ├── is_not_null.sql
│           │   │       │   ├── is_null.sql
│           │   │       │   └── predicates.sql
│           │   │       ├── datetime/
│           │   │       │   ├── date_add.sql
│           │   │       │   ├── date_diff.sql
│           │   │       │   ├── date_format.sql
│           │   │       │   ├── date_format_enabled.sql
│           │   │       │   ├── date_from_unix_date.sql
│           │   │       │   ├── date_sub.sql
│           │   │       │   ├── datetime.sql
│           │   │       │   ├── from_unix_time.sql
│           │   │       │   ├── from_unix_time_enabled.sql
│           │   │       │   ├── hour.sql
│           │   │       │   ├── last_day.sql
│           │   │       │   ├── make_date.sql
│           │   │       │   ├── minute.sql
│           │   │       │   ├── next_day.sql
│           │   │       │   ├── second.sql
│           │   │       │   ├── trunc_date.sql
│           │   │       │   ├── trunc_timestamp.sql
│           │   │       │   ├── unix_date.sql
│           │   │       │   └── unix_timestamp.sql
│           │   │       ├── decimal/
│           │   │       │   ├── decimal_div.sql
│           │   │       │   ├── decimal_div_ansi.sql
│           │   │       │   └── decimal_ops.sql
│           │   │       ├── hash/
│           │   │       │   ├── crc32.sql
│           │   │       │   └── hash.sql
│           │   │       ├── map/
│           │   │       │   ├── get_map_value.sql
│           │   │       │   ├── map_contains_key.sql
│           │   │       │   ├── map_entries.sql
│           │   │       │   ├── map_from_arrays.sql
│           │   │       │   ├── map_from_entries.sql
│           │   │       │   ├── map_keys.sql
│           │   │       │   └── map_values.sql
│           │   │       ├── math/
│           │   │       │   ├── abs.sql
│           │   │       │   ├── abs_ansi.sql
│           │   │       │   ├── acos.sql
│           │   │       │   ├── arithmetic.sql
│           │   │       │   ├── arithmetic_ansi.sql
│           │   │       │   ├── asin.sql
│           │   │       │   ├── atan.sql
│           │   │       │   ├── atan2.sql
│           │   │       │   ├── bin.sql
│           │   │       │   ├── ceil.sql
│           │   │       │   ├── cos.sql
│           │   │       │   ├── cosh.sql
│           │   │       │   ├── cot.sql
│           │   │       │   ├── exp.sql
│           │   │       │   ├── expm1.sql
│           │   │       │   ├── floor.sql
│           │   │       │   ├── isnan.sql
│           │   │       │   ├── log.sql
│           │   │       │   ├── log10.sql
│           │   │       │   ├── log2.sql
│           │   │       │   ├── pow.sql
│           │   │       │   ├── round.sql
│           │   │       │   ├── signum.sql
│           │   │       │   ├── sin.sql
│           │   │       │   ├── sinh.sql
│           │   │       │   ├── sqrt.sql
│           │   │       │   ├── tan.sql
│           │   │       │   └── tanh.sql
│           │   │       ├── misc/
│           │   │       │   ├── parquet_default_values.sql
│           │   │       │   ├── scalar_subquery.sql
│           │   │       │   └── width_bucket.sql
│           │   │       ├── string/
│           │   │       │   ├── ascii.sql
│           │   │       │   ├── bit_length.sql
│           │   │       │   ├── chr.sql
│           │   │       │   ├── concat.sql
│           │   │       │   ├── concat_ws.sql
│           │   │       │   ├── contains.sql
│           │   │       │   ├── ends_with.sql
│           │   │       │   ├── get_json_object.sql
│           │   │       │   ├── hex.sql
│           │   │       │   ├── init_cap.sql
│           │   │       │   ├── init_cap_enabled.sql
│           │   │       │   ├── left.sql
│           │   │       │   ├── length.sql
│           │   │       │   ├── like.sql
│           │   │       │   ├── lower.sql
│           │   │       │   ├── lower_enabled.sql
│           │   │       │   ├── luhn_check.sql
│           │   │       │   ├── octet_length.sql
│           │   │       │   ├── regexp_replace.sql
│           │   │       │   ├── regexp_replace_enabled.sql
│           │   │       │   ├── reverse.sql
│           │   │       │   ├── right.sql
│           │   │       │   ├── rlike.sql
│           │   │       │   ├── rlike_enabled.sql
│           │   │       │   ├── starts_with.sql
│           │   │       │   ├── string.sql
│           │   │       │   ├── string_instr.sql
│           │   │       │   ├── string_lpad.sql
│           │   │       │   ├── string_repeat.sql
│           │   │       │   ├── string_replace.sql
│           │   │       │   ├── string_rpad.sql
│           │   │       │   ├── string_space.sql
│           │   │       │   ├── string_translate.sql
│           │   │       │   ├── string_trim.sql
│           │   │       │   ├── substring.sql
│           │   │       │   ├── unhex.sql
│           │   │       │   ├── upper.sql
│           │   │       │   └── upper_enabled.sql
│           │   │       ├── struct/
│           │   │       │   ├── create_named_struct.sql
│           │   │       │   ├── get_struct_field.sql
│           │   │       │   ├── json_to_structs.sql
│           │   │       │   └── structs_to_json.sql
│           │   │       └── window/
│           │   │           └── lag_lead.sql
│           │   ├── test-data/
│           │   │   ├── before_1582_date_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_date_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_date_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_dict_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_dict_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_dict_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_plain_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_plain_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_plain_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_micros_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_micros_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_micros_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_millis_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_millis_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_millis_v3_2_0.snappy.parquet
│           │   │   ├── csv-test-1.csv
│           │   │   ├── csv-test-2.csv
│           │   │   ├── dec-in-fixed-len.parquet
│           │   │   ├── decimal32-written-as-64-bit-dict.snappy.parquet
│           │   │   ├── decimal32-written-as-64-bit.snappy.parquet
│           │   │   └── json-test-1.ndjson
│           │   ├── tpcds-extended/
│           │   │   └── q72.sql
│           │   ├── tpcds-micro-benchmarks/
│           │   │   ├── add_decimals.sql
│           │   │   ├── add_many_decimals.sql
│           │   │   ├── add_many_integers.sql
│           │   │   ├── agg_high_cardinality.sql
│           │   │   ├── agg_low_cardinality.sql
│           │   │   ├── agg_stddev.sql
│           │   │   ├── agg_sum_decimals_no_grouping.sql
│           │   │   ├── agg_sum_integers_no_grouping.sql
│           │   │   ├── agg_sum_integers_with_grouping.sql
│           │   │   ├── case_when_column_or_null.sql
│           │   │   ├── case_when_scalar.sql
│           │   │   ├── char_type.sql
│           │   │   ├── explode.sql
│           │   │   ├── filter_highly_selective.sql
│           │   │   ├── filter_less_selective.sql
│           │   │   ├── if_column_or_null.sql
│           │   │   ├── join_anti.sql
│           │   │   ├── join_condition.sql
│           │   │   ├── join_exploding_output.sql
│           │   │   ├── join_inner.sql
│           │   │   ├── join_left_outer.sql
│           │   │   ├── join_semi.sql
│           │   │   ├── rlike.sql
│           │   │   ├── scan_decimal.sql
│           │   │   └── to_json.sql
│           │   ├── tpcds-plan-stability/
│           │   │   ├── approved-plans-v1_4/
│           │   │   │   ├── q1.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q1.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q99.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q99.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v1_4-spark3_5/
│           │   │   │   ├── q1.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q1.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q99.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q99.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v1_4-spark4_0/
│           │   │   │   ├── q1.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q1.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.ansi.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.ansi.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q99.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q99.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v2_7/
│           │   │   │   ├── q10a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q98.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v2_7-spark3_5/
│           │   │   │   ├── q10a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q98.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   └── approved-plans-v2_7-spark4_0/
│           │   │       ├── q10a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q10a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q11.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q11.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q12.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q12.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q14.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q14.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q14a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q14a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q18a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q18a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q20.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q20.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q22.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q22.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q22a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q22a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q24.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q24.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q27a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q27a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q34.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q34.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q35.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q35.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q35a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q35a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q36a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q36a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q47.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q47.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q49.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q49.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q51a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q51a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q57.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q57.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q5a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q5a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q6.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q6.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q64.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q64.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q67a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q67a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q70a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q70a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q72.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q72.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q74.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q74.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q75.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q75.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q77a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q77a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q78.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q78.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q80a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q80a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q86a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q86a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q98.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       └── q98.native_iceberg_compat/
│           │   │           └── extended.txt
│           │   ├── tpcds-query-results/
│           │   │   ├── extended/
│           │   │   │   └── q72.sql.out
│           │   │   ├── v1_4/
│           │   │   │   ├── q1.sql.out
│           │   │   │   ├── q10.sql.out
│           │   │   │   ├── q11.sql.out
│           │   │   │   ├── q12.sql.out
│           │   │   │   ├── q13.sql.out
│           │   │   │   ├── q14a.sql.out
│           │   │   │   ├── q14b.sql.out
│           │   │   │   ├── q15.sql.out
│           │   │   │   ├── q16.sql.out
│           │   │   │   ├── q17.sql.out
│           │   │   │   ├── q18.sql.out
│           │   │   │   ├── q19.sql.out
│           │   │   │   ├── q2.sql.out
│           │   │   │   ├── q20.sql.out
│           │   │   │   ├── q21.sql.out
│           │   │   │   ├── q22.sql.out
│           │   │   │   ├── q23a.sql.out
│           │   │   │   ├── q23b.sql.out
│           │   │   │   ├── q24a.sql.out
│           │   │   │   ├── q24b.sql.out
│           │   │   │   ├── q25.sql.out
│           │   │   │   ├── q26.sql.out
│           │   │   │   ├── q27.sql.out
│           │   │   │   ├── q28.sql.out
│           │   │   │   ├── q29.sql.out
│           │   │   │   ├── q3.sql.out
│           │   │   │   ├── q30.sql.out
│           │   │   │   ├── q31.sql.out
│           │   │   │   ├── q32.sql.out
│           │   │   │   ├── q33.sql.out
│           │   │   │   ├── q34.sql.out
│           │   │   │   ├── q35.sql.out
│           │   │   │   ├── q36.sql.out
│           │   │   │   ├── q37.sql.out
│           │   │   │   ├── q38.sql.out
│           │   │   │   ├── q39a.sql.out
│           │   │   │   ├── q39b.sql.out
│           │   │   │   ├── q4.sql.out
│           │   │   │   ├── q40.sql.out
│           │   │   │   ├── q41.sql.out
│           │   │   │   ├── q42.sql.out
│           │   │   │   ├── q43.sql.out
│           │   │   │   ├── q44.sql.out
│           │   │   │   ├── q45.sql.out
│           │   │   │   ├── q46.sql.out
│           │   │   │   ├── q47.sql.out
│           │   │   │   ├── q48.sql.out
│           │   │   │   ├── q49.sql.out
│           │   │   │   ├── q5.sql.out
│           │   │   │   ├── q50.sql.out
│           │   │   │   ├── q51.sql.out
│           │   │   │   ├── q52.sql.out
│           │   │   │   ├── q53.sql.out
│           │   │   │   ├── q54.sql.out
│           │   │   │   ├── q55.sql.out
│           │   │   │   ├── q56.sql.out
│           │   │   │   ├── q57.sql.out
│           │   │   │   ├── q58.sql.out
│           │   │   │   ├── q59.sql.out
│           │   │   │   ├── q6.sql.out
│           │   │   │   ├── q60.sql.out
│           │   │   │   ├── q61.sql.out
│           │   │   │   ├── q62.sql.out
│           │   │   │   ├── q63.sql.out
│           │   │   │   ├── q64.sql.out
│           │   │   │   ├── q65.sql.out
│           │   │   │   ├── q66.sql.out
│           │   │   │   ├── q67.sql.out
│           │   │   │   ├── q68.sql.out
│           │   │   │   ├── q69.sql.out
│           │   │   │   ├── q7.sql.out
│           │   │   │   ├── q70.sql.out
│           │   │   │   ├── q71.sql.out
│           │   │   │   ├── q72.sql.out
│           │   │   │   ├── q73.sql.out
│           │   │   │   ├── q74.sql.out
│           │   │   │   ├── q75.sql.out
│           │   │   │   ├── q76.sql.out
│           │   │   │   ├── q77.sql.out
│           │   │   │   ├── q78.sql.out
│           │   │   │   ├── q79.sql.out
│           │   │   │   ├── q8.sql.out
│           │   │   │   ├── q80.sql.out
│           │   │   │   ├── q81.sql.out
│           │   │   │   ├── q82.sql.out
│           │   │   │   ├── q83.sql.out
│           │   │   │   ├── q84.sql.out
│           │   │   │   ├── q85.sql.out
│           │   │   │   ├── q86.sql.out
│           │   │   │   ├── q87.sql.out
│           │   │   │   ├── q88.sql.out
│           │   │   │   ├── q89.sql.out
│           │   │   │   ├── q9.sql.out
│           │   │   │   ├── q90.sql.out
│           │   │   │   ├── q91.sql.out
│           │   │   │   ├── q92.sql.out
│           │   │   │   ├── q93.sql.out
│           │   │   │   ├── q94.sql.out
│           │   │   │   ├── q95.sql.out
│           │   │   │   ├── q96.sql.out
│           │   │   │   ├── q97.sql.out
│           │   │   │   ├── q98.sql.out
│           │   │   │   └── q99.sql.out
│           │   │   ├── v2_7/
│           │   │   │   ├── q10a.sql.out
│           │   │   │   ├── q11.sql.out
│           │   │   │   ├── q12.sql.out
│           │   │   │   ├── q14.sql.out
│           │   │   │   ├── q14a.sql.out
│           │   │   │   ├── q18a.sql.out
│           │   │   │   ├── q20.sql.out
│           │   │   │   ├── q22.sql.out
│           │   │   │   ├── q22a.sql.out
│           │   │   │   ├── q24.sql.out
│           │   │   │   ├── q27a.sql.out
│           │   │   │   ├── q34.sql.out
│           │   │   │   ├── q35.sql.out
│           │   │   │   ├── q35a.sql.out
│           │   │   │   ├── q36a.sql.out
│           │   │   │   ├── q47.sql.out
│           │   │   │   ├── q49.sql.out
│           │   │   │   ├── q51a.sql.out
│           │   │   │   ├── q57.sql.out
│           │   │   │   ├── q5a.sql.out
│           │   │   │   ├── q6.sql.out
│           │   │   │   ├── q64.sql.out
│           │   │   │   ├── q67a.sql.out
│           │   │   │   ├── q70a.sql.out
│           │   │   │   ├── q72.sql.out
│           │   │   │   ├── q74.sql.out
│           │   │   │   ├── q75.sql.out
│           │   │   │   ├── q77a.sql.out
│           │   │   │   ├── q78.sql.out
│           │   │   │   ├── q80a.sql.out
│           │   │   │   ├── q86a.sql.out
│           │   │   │   └── q98.sql.out
│           │   │   └── v2_7-spark4_0/
│           │   │       └── q36a.sql.out
│           │   ├── tpch-extended/
│           │   │   └── q1.sql
│           │   └── tpch-query-results/
│           │       ├── q1.sql.out
│           │       ├── q10.sql.out
│           │       ├── q11.sql.out
│           │       ├── q12.sql.out
│           │       ├── q13.sql.out
│           │       ├── q14.sql.out
│           │       ├── q15.sql.out
│           │       ├── q16.sql.out
│           │       ├── q17.sql.out
│           │       ├── q18.sql.out
│           │       ├── q19.sql.out
│           │       ├── q2.sql.out
│           │       ├── q20.sql.out
│           │       ├── q21.sql.out
│           │       ├── q22.sql.out
│           │       ├── q3.sql.out
│           │       ├── q4.sql.out
│           │       ├── q5.sql.out
│           │       ├── q6.sql.out
│           │       ├── q7.sql.out
│           │       ├── q8.sql.out
│           │       └── q9.sql.out
│           ├── scala/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   ├── CometArrayExpressionSuite.scala
│           │           │   ├── CometBitwiseExpressionSuite.scala
│           │           │   ├── CometCastSuite.scala
│           │           │   ├── CometCsvExpressionSuite.scala
│           │           │   ├── CometDateTimeUtilsSuite.scala
│           │           │   ├── CometExpressionSuite.scala
│           │           │   ├── CometFuzzAggregateSuite.scala
│           │           │   ├── CometFuzzIcebergBase.scala
│           │           │   ├── CometFuzzIcebergSuite.scala
│           │           │   ├── CometFuzzMathSuite.scala
│           │           │   ├── CometFuzzTestBase.scala
│           │           │   ├── CometFuzzTestSuite.scala
│           │           │   ├── CometHashExpressionSuite.scala
│           │           │   ├── CometIcebergNativeSuite.scala
│           │           │   ├── CometJsonExpressionSuite.scala
│           │           │   ├── CometMapExpressionSuite.scala
│           │           │   ├── CometMathExpressionSuite.scala
│           │           │   ├── CometNativeSuite.scala
│           │           │   ├── CometS3TestBase.scala
│           │           │   ├── CometSparkSessionExtensionsSuite.scala
│           │           │   ├── CometSqlFileTestSuite.scala
│           │           │   ├── CometStringExpressionSuite.scala
│           │           │   ├── CometTemporalExpressionSuite.scala
│           │           │   ├── DataGenerator.scala
│           │           │   ├── DataGeneratorSuite.scala
│           │           │   ├── IcebergReadFromS3Suite.scala
│           │           │   ├── SparkErrorConverterSuite.scala
│           │           │   ├── SqlFileTestParser.scala
│           │           │   ├── WithHdfsCluster.scala
│           │           │   ├── csv/
│           │           │   │   └── CometCsvNativeReadSuite.scala
│           │           │   ├── exec/
│           │           │   │   ├── CometAggregateSuite.scala
│           │           │   │   ├── CometColumnarShuffleSuite.scala
│           │           │   │   ├── CometExec3_4PlusSuite.scala
│           │           │   │   ├── CometExecSuite.scala
│           │           │   │   ├── CometGenerateExecSuite.scala
│           │           │   │   ├── CometJoinSuite.scala
│           │           │   │   ├── CometNativeColumnarToRowSuite.scala
│           │           │   │   ├── CometNativeReaderSuite.scala
│           │           │   │   ├── CometNativeShuffleSuite.scala
│           │           │   │   └── CometWindowExecSuite.scala
│           │           │   ├── expressions/
│           │           │   │   └── conditional/
│           │           │   │       ├── CometCaseWhenSuite.scala
│           │           │   │       ├── CometCoalesceSuite.scala
│           │           │   │       └── CometIfSuite.scala
│           │           │   ├── objectstore/
│           │           │   │   └── NativeConfigSuite.scala
│           │           │   ├── parquet/
│           │           │   │   ├── CometParquetWriterSuite.scala
│           │           │   │   ├── ParquetReadFromFakeHadoopFsSuite.scala
│           │           │   │   ├── ParquetReadFromS3Suite.scala
│           │           │   │   └── ParquetReadSuite.scala
│           │           │   └── rules/
│           │           │       ├── CometExecRuleSuite.scala
│           │           │       └── CometScanRuleSuite.scala
│           │           └── spark/
│           │               ├── CometPluginsSuite.scala
│           │               ├── shuffle/
│           │               │   └── sort/
│           │               │       └── SpillSorterSuite.scala
│           │               └── sql/
│           │                   ├── CometSQLQueryTestHelper.scala
│           │                   ├── CometTPCDSQueriesList.scala
│           │                   ├── CometTPCDSQuerySuite.scala
│           │                   ├── CometTPCDSQueryTestSuite.scala
│           │                   ├── CometTPCHQueriesList.scala
│           │                   ├── CometTPCHQuerySuite.scala
│           │                   ├── CometTPCQueryBase.scala
│           │                   ├── CometTPCQueryListBase.scala
│           │                   ├── CometTestBase.scala
│           │                   ├── GenTPCHData.scala
│           │                   ├── TPCDSQueries.scala
│           │                   ├── TPCH.scala
│           │                   ├── Tables.scala
│           │                   ├── benchmark/
│           │                   │   ├── CometAggregateExpressionBenchmark.scala
│           │                   │   ├── CometArithmeticBenchmark.scala
│           │                   │   ├── CometArrayExpressionBenchmark.scala
│           │                   │   ├── CometBenchmarkBase.scala
│           │                   │   ├── CometCastBooleanBenchmark.scala
│           │                   │   ├── CometCastNumericToNumericBenchmark.scala
│           │                   │   ├── CometCastNumericToStringBenchmark.scala
│           │                   │   ├── CometCastNumericToTemporalBenchmark.scala
│           │                   │   ├── CometCastStringToNumericBenchmark.scala
│           │                   │   ├── CometCastStringToTemporalBenchmark.scala
│           │                   │   ├── CometCastTemporalToNumericBenchmark.scala
│           │                   │   ├── CometCastTemporalToStringBenchmark.scala
│           │                   │   ├── CometCastTemporalToTemporalBenchmark.scala
│           │                   │   ├── CometColumnarToRowBenchmark.scala
│           │                   │   ├── CometComparisonExpressionBenchmark.scala
│           │                   │   ├── CometConditionalExpressionBenchmark.scala
│           │                   │   ├── CometCsvExpressionBenchmark.scala
│           │                   │   ├── CometDatetimeExpressionBenchmark.scala
│           │                   │   ├── CometExecBenchmark.scala
│           │                   │   ├── CometGetJsonObjectBenchmark.scala
│           │                   │   ├── CometHashExpressionBenchmark.scala
│           │                   │   ├── CometIcebergReadBenchmark.scala
│           │                   │   ├── CometJsonExpressionBenchmark.scala
│           │                   │   ├── CometOperatorSerdeBenchmark.scala
│           │                   │   ├── CometPartitionColumnBenchmark.scala
│           │                   │   ├── CometPredicateExpressionBenchmark.scala
│           │                   │   ├── CometReadBenchmark.scala
│           │                   │   ├── CometShuffleBenchmark.scala
│           │                   │   ├── CometStringExpressionBenchmark.scala
│           │                   │   ├── CometTPCDSMicroBenchmark.scala
│           │                   │   ├── CometTPCDSQueryBenchmark.scala
│           │                   │   ├── CometTPCHQueryBenchmark.scala
│           │                   │   └── CometTPCQueryBenchmarkBase.scala
│           │                   └── comet/
│           │                       ├── CometDppFallbackRepro3949Suite.scala
│           │                       ├── CometPlanChecker.scala
│           │                       ├── CometPlanStabilitySuite.scala
│           │                       ├── CometShuffleFallbackStickinessSuite.scala
│           │                       ├── CometTaskMetricsSuite.scala
│           │                       └── ParquetEncryptionITCase.scala
│           ├── spark-3.4/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   └── shims/
│           │           │       └── ShimCometTPCHQuerySuite.scala
│           │           └── spark/
│           │               └── sql/
│           │                   └── ShimCometTestBase.scala
│           ├── spark-3.5/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   └── shims/
│           │           │       └── ShimCometTPCHQuerySuite.scala
│           │           └── spark/
│           │               └── sql/
│           │                   ├── CometToPrettyStringSuite.scala
│           │                   └── ShimCometTestBase.scala
│           ├── spark-3.x/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   └── iceberg/
│           │           │       └── RESTCatalogHelper.scala
│           │           ├── iceberg/
│           │           │   └── rest/
│           │           │       └── RESTCatalogServlet.java
│           │           └── spark/
│           │               └── sql/
│           │                   └── comet/
│           │                       └── shims/
│           │                           └── ShimCometTPCDSQuerySuite.scala
│           └── spark-4.0/
│               └── org/
│                   └── apache/
│                       ├── comet/
│                       │   ├── exec/
│                       │   │   └── CometShuffle4_0Suite.scala
│                       │   ├── iceberg/
│                       │   │   └── RESTCatalogHelper.scala
│                       │   └── shims/
│                       │       └── ShimCometTPCHQuerySuite.scala
│                       ├── iceberg/
│                       │   └── rest/
│                       │       └── RESTCatalogServlet.java
│                       └── spark/
│                           ├── comet/
│                           │   └── shims/
│                           │       └── ShimTestUtils.scala
│                           └── sql/
│                               ├── CometToPrettyStringSuite.scala
│                               ├── ShimCometTestBase.scala
│                               └── comet/
│                                   └── shims/
│                                       └── ShimCometTPCDSQuerySuite.scala
└── spark-integration/
    └── pom.xml

================================================
FILE CONTENTS
================================================

================================================
FILE: .asf.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

# This file controls the settings of this repository
#
# See more details at
# https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features

notifications:
  commits: commits@datafusion.apache.org
  issues: github@datafusion.apache.org
  pullrequests: github@datafusion.apache.org
  discussions: github@datafusion.apache.org
  jira_options: link label worklog
github:
  description: "Apache DataFusion Comet Spark Accelerator"
  homepage: https://datafusion.apache.org/comet
  labels:
    - arrow
    - datafusion
    - rust
    - spark
  enabled_merge_buttons:
    squash: true
    merge: false
    rebase: false
  features:
    issues: true
    discussions: true
  protected_branches:
    main:
      required_pull_request_reviews:
        required_approving_review_count: 1
  pull_requests:
    allow_update_branch: true
# publishes the content of the `asf-site` branch to
# https://datafusion.apache.org/comet/
publish:
  whoami: asf-site
  subdir: comet


================================================
FILE: .claude/skills/audit-comet-expression/SKILL.md
================================================
---
name: audit-comet-expression
description: Audit an existing Comet expression for correctness and test coverage. Studies the Spark implementation across versions 3.4.3, 3.5.8, and 4.0.1, reviews the Comet and DataFusion implementations, identifies missing test coverage, and offers to implement additional tests.
argument-hint: <expression-name>
---

Audit the Comet implementation of the `$ARGUMENTS` expression for correctness and test coverage.

## Overview

This audit covers:

1. Spark implementation across versions 3.4.3, 3.5.8, and 4.0.1
2. Comet Scala serde implementation
3. Comet Rust / DataFusion implementation
4. Existing test coverage (SQL file tests and Scala tests)
5. Gap analysis and test recommendations

---

## Step 1: Locate the Spark Implementations

Clone specific Spark version tags (use shallow clones to avoid polluting the workspace). Only clone a version if it is not already present.

```bash
set -eu -o pipefail
for tag in v3.4.3 v3.5.8 v4.0.1; do
  dir="/tmp/spark-${tag}"
  if [ ! -d "$dir" ]; then
    git clone --depth 1 --branch "$tag" https://github.com/apache/spark.git "$dir"
  fi
done
```

### Find the expression class in each Spark version

Search the Catalyst SQL expressions source:

```bash
for tag in v3.4.3 v3.5.8 v4.0.1; do
  dir="/tmp/spark-${tag}"
  echo "=== $tag ==="
  find "$dir/sql/catalyst/src/main/scala" -name "*.scala" | \
    xargs grep -l "case class $ARGUMENTS\b\|object $ARGUMENTS\b" 2>/dev/null
done
```

If the expression is not found in catalyst, also check core:

```bash
for tag in v3.4.3 v3.5.8 v4.0.1; do
  dir="/tmp/spark-${tag}"
  echo "=== $tag ==="
  find "$dir/sql" -name "*.scala" | \
    xargs grep -l "case class $ARGUMENTS\b\|object $ARGUMENTS\b" 2>/dev/null
done
```

### Read the Spark source for each version

For each Spark version, read the expression file and note:

- The `eval`, `nullSafeEval`, and `doGenCode` / `doGenCodeSafe` methods
- The `inputTypes` and `dataType` fields (accepted input types, return type)
- Null handling strategy (`nullable`, `nullSafeEval`)
- ANSI mode behavior (`ansiEnabled`, `failOnError`)
- Special cases, guards, `require` assertions, and runtime exceptions
- Any constants or configuration the expression reads

### Compare across Spark versions

Produce a concise diff summary of what changed between:

- 3.4.3 → 3.5.8
- 3.5.8 → 4.0.1

Pay attention to:

- New input types added or removed
- Behavior changes for edge cases (null, overflow, empty, boundary)
- New ANSI mode branches
- New parameters or configuration
- Breaking API changes that Comet must shim

---

## Step 2: Locate the Spark Tests

```bash
for tag in v3.4.3 v3.5.8 v4.0.1; do
  dir="/tmp/spark-${tag}"
  echo "=== $tag ==="
  find "$dir/sql" -name "*.scala" -path "*/test/*" | \
    xargs grep -l "$ARGUMENTS" 2>/dev/null
done
```

Read the relevant Spark test files and produce a list of:

- Input types covered
- Edge cases exercised (null, empty, overflow, negative, boundary values, special characters, etc.)
- ANSI mode tests
- Error cases

This list will be the reference for the coverage gap analysis in Step 5.

---

## Step 3: Locate the Comet Implementation

### Scala serde

```bash
# Find the serde object
grep -r "$ARGUMENTS" spark/src/main/scala/org/apache/comet/serde/ --include="*.scala" -l
grep -r "$ARGUMENTS" spark/src/main/scala/org/apache/comet/ --include="*.scala" -l
```

Read the serde implementation and check:

- Which Spark versions the serde handles
- Whether `getSupportLevel` is implemented and accurate
- Whether all input types are handled
- Whether any types are explicitly marked `Unsupported`

### Shims

```bash
find spark/src/main -name "CometExprShim.scala" | xargs grep -l "$ARGUMENTS" 2>/dev/null
```

If shims exist, read them and note any version-specific handling.

### Rust / DataFusion implementation

```bash
# Search for the function in native/spark-expr
grep -r "$ARGUMENTS" native/spark-expr/src/ --include="*.rs" -l
grep -r "$ARGUMENTS" native/core/src/ --include="*.rs" -l
```

If the expression delegates to DataFusion, find it there too. Set `$DATAFUSION_SRC` to a local DataFusion checkout, or fall back to searching the cargo registry:

```bash
if [ -n "${DATAFUSION_SRC:-}" ]; then
  grep -r "$ARGUMENTS" "$DATAFUSION_SRC" --include="*.rs" -l 2>/dev/null | head -10
else
  # Fall back to cargo registry (may include unrelated crates)
  grep -r "$ARGUMENTS" ~/.cargo/registry/src/*/datafusion* --include="*.rs" -l 2>/dev/null | head -10
fi
```

Read the Rust implementation and check:

- Null handling (does it propagate nulls correctly?)
- Overflow and underflow handling (returns `Err` vs panics)
- Type dispatch (does it handle all types that Spark supports?)
- ANSI / fail-on-error mode

---

## Step 4: Locate Existing Comet Tests

### SQL file tests

```bash
# Find SQL test files for this expression
find spark/src/test/resources/sql-tests/expressions/ -name "*.sql" | \
  xargs grep -l "$ARGUMENTS" 2>/dev/null

# Also check if there's a dedicated file
find spark/src/test/resources/sql-tests/expressions/ -name "*$(echo $ARGUMENTS | tr '[:upper:]' '[:lower:]')*"
```

Read every SQL test file found and list:

- Table schemas and data values used
- Queries exercised
- Query modes used (`query`, `spark_answer_only`, `tolerance`, `ignore`, `expect_error`)
- Any ConfigMatrix directives

### Scala tests

```bash
grep -r "$ARGUMENTS" spark/src/test/scala/ --include="*.scala" -l
```

Read the relevant Scala test files and list:

- Input types covered
- Edge cases exercised
- Whether constant folding is disabled for literal tests

---

## Step 5: Gap Analysis

Compare the Spark test coverage (Step 2) against the Comet test coverage (Step 4). Produce a structured gap report:

### Coverage matrix

For each of the following dimensions, note whether it is covered in Comet tests or missing:

| Dimension                                                                                              | Spark tests it | Comet SQL test | Comet Scala test | Gap? |
| ------------------------------------------------------------------------------------------------------ | -------------- | -------------- | ---------------- | ---- |
| Column reference argument(s)                                                                           |                |                |                  |      |
| Literal argument(s)                                                                                    |                |                |                  |      |
| NULL input                                                                                             |                |                |                  |      |
| Empty string / empty array / empty map                                                                 |                |                |                  |      |
| Array/map with NULL elements                                                                           |                |                |                  |      |
| Zero, negative zero, negative values (numeric)                                                         |                |                |                  |      |
| Underflow, overflow                                                                                    |                |                |                  |      |
| Boundary values (INT_MIN, INT_MAX, Long.MinValue, minimum positive, etc.)                              |                |                |                  |      |
| NaN, Infinity, -Infinity, subnormal (float/double)                                                     |                |                |                  |      |
| Multibyte / special UTF-8 (composed vs decomposed, e.g. `é` U+00E9 vs `e` + U+0301, non-Latin scripts) |                |                |                  |      |
| ANSI mode (failOnError=true)                                                                           |                |                |                  |      |
| Non-ANSI mode (failOnError=false)                                                                      |                |                |                  |      |
| All supported input types                                                                              |                |                |                  |      |
| Parquet dictionary encoding (ConfigMatrix)                                                             |                |                |                  |      |
| Cross-version behavior differences                                                                     |                |                |                  |      |

### Implementation gaps

Also review the Comet implementation (Step 3) against the Spark behavior (Step 1):

- Are there input types that Spark supports but `getSupportLevel` returns `Unsupported` without comment?
- Are there behavioral differences that are NOT marked `Incompatible` but should be?
- Are there behavioral differences between Spark versions that the Comet implementation does not account for (missing shim)?
- Does the Rust implementation match the Spark behavior for all edge cases?

---

## Step 6: Recommendations

Summarize findings as a prioritized list.

### High priority

Issues where Comet may silently produce wrong results compared to Spark.

### Medium priority

Missing test coverage for edge cases that could expose bugs.

### Low priority

Minor gaps, cosmetic improvements, or nice-to-have tests.

---

## Step 7: Offer to Implement Missing Tests

After presenting the gap analysis, ask the user:

> I found the following missing test cases. Would you like me to implement them?
>
> - [list each missing test case]
>
> I can add them as SQL file tests in `spark/src/test/resources/sql-tests/expressions/<category>/$ARGUMENTS.sql`
> (or as Scala tests in `CometExpressionSuite` for cases that require programmatic setup).

If the user says yes, implement the missing tests following the SQL file test format described in
`docs/source/contributor-guide/sql-file-tests.md`. Prefer SQL file tests over Scala tests.

### SQL file test template

```sql
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements.  See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership.  The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License.  You may obtain a copy of the License at
--
--   http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied.  See the License for the
-- specific language governing permissions and limitations
-- under the License.

-- ConfigMatrix: parquet.enable.dictionary=false,true

statement
CREATE TABLE test_$ARGUMENTS(...) USING parquet

statement
INSERT INTO test_$ARGUMENTS VALUES
  (...),
  (NULL)

-- column argument
query
SELECT $ARGUMENTS(col) FROM test_$ARGUMENTS

-- literal arguments
query
SELECT $ARGUMENTS('value'), $ARGUMENTS(''), $ARGUMENTS(NULL)
```

### Verify the tests pass

After implementing tests, tell the user how to run them:

```bash
./mvnw test -DwildcardSuites="CometSqlFileTestSuite" -Dsuites="org.apache.comet.CometSqlFileTestSuite $ARGUMENTS" -Dtest=none
```

---

## Step 8: Update the Expression Audit Log

After completing the audit (whether or not tests were added), append a row to the audit log at
`docs/source/contributor-guide/expression-audit-log.md`.

The row should include:

- Expression name
- Spark versions checked (e.g. 3.4.3, 3.5.8, 4.0.1)
- Today's date
- A brief summary of findings (behavioral differences, bugs found/fixed, tests added, known incompatibilities)

---

## Output Format

Present the audit as:

1. **Expression Summary** - Brief description of what `$ARGUMENTS` does, its input/output types, and null behavior
2. **Spark Version Differences** - Summary of any behavioral or API differences across Spark 3.4.3, 3.5.8, and 4.0.1
3. **Comet Implementation Notes** - Summary of how Comet implements this expression and any concerns
4. **Coverage Gap Analysis** - The gap table from Step 5, plus implementation gaps
5. **Recommendations** - Prioritized list from Step 6
6. **Offer to add tests** - The prompt from Step 7

## Tone and Style

- Write in clear, concise prose
- Use backticks around code references (function names, file paths, class names, types, config keys)
- Avoid robotic or formulaic language
- Be constructive and acknowledge what is already well-covered before raising gaps
- Avoid em dashes and semicolons; use separate sentences instead


================================================
FILE: .claude/skills/review-comet-pr/SKILL.md
================================================
---
name: review-comet-pr
description: Review a DataFusion Comet pull request for Spark compatibility and implementation correctness. Provides guidance to a reviewer rather than posting comments directly.
argument-hint: <pr-number>
---

Review Comet PR #$ARGUMENTS

## Before You Start

### Gather PR Metadata

Fetch the PR details to understand the scope:

```bash
gh pr view $ARGUMENTS --repo apache/datafusion-comet --json title,body,author,isDraft,state,files
```

### Review Existing Comments First

Before forming your review:

1. **Read all existing review comments** on the PR
2. **Check the conversation tab** for any discussion
3. **Avoid duplicating feedback** that others have already provided
4. **Build on existing discussions** rather than starting new threads on the same topic
5. **If you have no additional concerns beyond what's already discussed, say so**
6. **Ignore Copilot reviews** - do not reference or build upon comments from GitHub Copilot

```bash
# View existing comments on a PR
gh pr view $ARGUMENTS --repo apache/datafusion-comet --comments
```

---

## Review Workflow

### 1. Gather Context

Read the changed files and understand the area of the codebase being modified:

```bash
# View the diff
gh pr diff $ARGUMENTS --repo apache/datafusion-comet
```

For expression PRs, check how similar expressions are implemented in the codebase. Look at the serde files in `spark/src/main/scala/org/apache/comet/serde/` and Rust implementations in `native/spark-expr/src/`.

### 2. Read Spark Source (Expression PRs)

**For any PR that adds or modifies an expression, you must read the Spark source code to understand the canonical behavior.** This is the authoritative reference for what Comet must match.

1. **Clone or update the Spark repo:**

   ```bash
   # Clone if not already present (use /tmp to avoid polluting the workspace)
   if [ ! -d /tmp/spark ]; then
     git clone --depth 1 https://github.com/apache/spark.git /tmp/spark
   fi
   ```

2. **Find the expression implementation in Spark:**

   ```bash
   # Search for the expression class (e.g., for "Conv", "Hex", "Substring")
   find /tmp/spark/sql/catalyst/src/main/scala -name "*.scala" | xargs grep -l "case class <ExpressionName>"
   ```

3. **Read the Spark implementation carefully.** Pay attention to:
   - The `eval` and `doGenEval`/`nullSafeEval` methods. These define the exact behavior.
   - The `inputTypes` and `dataType` fields. These define which types Spark accepts and what it returns.
   - Null handling. Does it use `nullable = true`? Does `nullSafeEval` handle nulls implicitly?
   - Special cases, guards, and `require` assertions.
   - ANSI mode branches (look for `SQLConf.get.ansiEnabled` or `failOnError`).

4. **Read the Spark tests for the expression:**

   ```bash
   # Find test files
   find /tmp/spark/sql -name "*.scala" -path "*/test/*" | xargs grep -l "<ExpressionName>"
   ```

5. **Compare the Spark behavior against the Comet implementation in the PR.** Identify:
   - Edge cases tested in Spark but not in the PR
   - Data types supported in Spark but not handled in the PR
   - Behavioral differences that should be marked `Incompatible`

6. **Suggest additional tests** for any edge cases or type combinations covered in Spark's tests that are missing from the PR's tests.

### 3. Spark Compatibility Check

**This is the most critical aspect of Comet reviews.** Comet must produce identical results to Spark.

For expression PRs, verify against the Spark source you read in step 2:

1. **Check edge cases**
   - Null handling
   - Overflow behavior
   - Empty input behavior
   - Type-specific behavior

2. **Verify all data types are handled**
   - Does Spark support this type? (Check `inputTypes` in Spark source)
   - Does the PR handle all Spark-supported types?

3. **Check for ANSI mode differences**
   - Spark behavior may differ between legacy and ANSI modes
   - PR should handle both or mark as `Incompatible`

### 4. Check Against Implementation Guidelines

**Always verify PRs follow the implementation guidelines.**

#### Scala Serde (`spark/src/main/scala/org/apache/comet/serde/`)

- [ ] Expression class correctly identified
- [ ] All child expressions converted via `exprToProtoInternal`
- [ ] Return type correctly serialized
- [ ] `getSupportLevel` reflects true compatibility:
  - `Compatible()` - matches Spark exactly
  - `Incompatible(Some("reason"))` - differs in documented ways
  - `Unsupported(Some("reason"))` - cannot be implemented
- [ ] Serde in appropriate file (`datetime.scala`, `strings.scala`, `arithmetic.scala`, etc.)

#### Registration (`QueryPlanSerde.scala`)

- [ ] Added to correct map (temporal, string, arithmetic, etc.)
- [ ] No duplicate registrations
- [ ] Import statement added

#### Rust Implementation (if applicable)

Location: `native/spark-expr/src/`

- [ ] Matches DataFusion and Arrow conventions
- [ ] Null handling is correct
- [ ] No panics. Use `Result` types.
- [ ] Efficient array operations (avoid row-by-row)

#### Tests - Prefer SQL File-Based Framework

**Expression tests should use the SQL file-based framework (`CometSqlFileTestSuite`) where possible.** This framework automatically runs each query through both Spark and Comet and compares results. No Scala code is needed. Only fall back to Scala tests in `CometExpressionSuite` when the SQL framework cannot express the test. Examples include complex `DataFrame` setup, programmatic data generation, or non-expression tests.

**SQL file test location:** `spark/src/test/resources/sql-tests/expressions/<category>/`

Categories include: `aggregate/`, `array/`, `string/`, `math/`, `struct/`, `map/`, `datetime/`, `hash/`, etc.

**SQL file structure:**

```sql
-- Create test data
statement
CREATE TABLE test_crc32(col string, a int, b float) USING parquet

statement
INSERT INTO test_crc32 VALUES ('Spark', 10, 1.5), (NULL, NULL, NULL), ('', 0, 0.0)

-- Default mode: verifies native Comet execution + result matches Spark
query
SELECT crc32(col) FROM test_crc32

-- spark_answer_only: compares results without requiring native execution
query spark_answer_only
SELECT crc32(cast(a as string)) FROM test_crc32

-- tolerance: allows numeric variance for floating-point results
query tolerance=0.0001
SELECT cos(v) FROM test_trig

-- expect_fallback: asserts fallback to Spark occurs
query expect_fallback(unsupported expression)
SELECT unsupported_func(v) FROM test_table

-- expect_error: verifies both engines throw matching exceptions
query expect_error(ARITHMETIC_OVERFLOW)
SELECT 2147483647 + 1

-- ignore: skip queries with known bugs (include GitHub issue link)
query ignore(https://github.com/apache/datafusion-comet/issues/NNNN)
SELECT known_buggy_expr(v) FROM test_table
```

**Running SQL file tests:**

```bash
# All SQL file tests
./mvnw test -Dsuites="org.apache.comet.CometSqlFileTestSuite" -Dtest=none

# Specific test file (substring match)
./mvnw test -Dsuites="org.apache.comet.CometSqlFileTestSuite crc32" -Dtest=none
```

**CRITICAL: Verify all test requirements (regardless of framework):**

- [ ] Basic functionality tested (column data, not just literals)
- [ ] Null handling tested (`SELECT expression(NULL)`)
- [ ] Edge cases tested (empty input, overflow, boundary values)
- [ ] Both literal values and column references tested (they use different code paths)
- [ ] For timestamp/datetime expressions, timezone handling is tested (e.g., UTC, non-UTC session timezone, timestamps with and without timezone)
- [ ] One expression per SQL file for easier debugging
- [ ] If using Scala tests instead, literal tests MUST disable constant folding:
  ```scala
  withSQLConf(SQLConf.OPTIMIZER_EXCLUDED_RULES.key ->
      "org.apache.spark.sql.catalyst.optimizer.ConstantFolding") {
    checkSparkAnswerAndOperator("SELECT func(literal)")
  }
  ```

### 5. Performance Review (Expression PRs)

**For PRs that add new expressions, performance is not optional.** The whole point of Comet is to be faster than Spark. If a new expression is not faster, it may not be worth adding.

1. **Check that the PR includes microbenchmark results.** The PR description should contain benchmark numbers comparing Comet vs Spark for the new expression. If benchmark results are missing, flag this as a required addition.

2. **Look for a microbenchmark implementation.** Expression benchmarks live in `spark/src/test/scala/org/apache/spark/sql/benchmark/`. Check whether the PR adds a benchmark for the new expression.

3. **Review the benchmark results if provided:**
   - Is Comet actually faster than Spark for this expression?
   - Are the benchmarks representative? They should test with realistic data sizes, not just trivial inputs.
   - Are different data types benchmarked if the expression supports multiple types?

4. **Review the Rust implementation for performance concerns:**
   - Unnecessary allocations or copies
   - Row-by-row processing where batch/array operations are possible
   - Redundant type conversions
   - Inefficient string handling (e.g., repeated UTF-8 validation)
   - Missing use of Arrow compute kernels where they exist

5. **If benchmark results show Comet is slower than Spark**, flag this clearly. The PR should explain why the regression is acceptable or include a plan to optimize.

### 6. Check CI Test Failures

**Always check the CI status and summarize any test failures in your review.**

```bash
# View CI check status
gh pr checks $ARGUMENTS --repo apache/datafusion-comet

# View failed check details
gh pr checks $ARGUMENTS --repo apache/datafusion-comet --failed
```

### 7. Documentation Check

Check whether the PR requires updates to user-facing documentation in `docs/`:

- **Compatibility guide** (`docs/source/user-guide/compatibility.md`): New expressions or operators should be listed. Incompatible behaviors should be documented.
- **Configuration guide** (`docs/source/user-guide/configs.md`): New config options should be documented.
- **Expressions list** (`docs/source/user-guide/expressions.md`): New expressions should be added.

If the PR adds a new expression or operator but does not update the relevant docs, flag this as something that needs to be addressed.

### 8. Common Comet Review Issues

1. **Incomplete type support**: Spark expression supports types not handled in PR
2. **Missing edge cases**: Null, overflow, empty string, negative values
3. **Wrong return type**: Return type must match Spark exactly
4. **Tests in wrong framework**: Expression tests should use the SQL file-based framework (`CometSqlFileTestSuite`) rather than adding to Scala test suites like `CometExpressionSuite`. Suggest migration if the PR adds Scala tests for expressions that could use SQL files instead.
5. **Stale native code**: PR might need `./mvnw install -pl common -DskipTests`
6. **Missing `getSupportLevel`**: Edge cases should be marked as `Incompatible`

---

## Output Format

Present your review as guidance for the reviewer. Structure your output as:

1. **PR Summary** - Brief description of what the PR does
2. **CI Status** - Summary of CI check results
3. **Findings** - Your analysis organized by area (Spark compatibility, implementation, tests, etc.)
4. **Suggested Review Comments** - Specific comments the reviewer could leave on the PR, with file and line references where applicable

## Review Tone and Style

Write reviews that sound human and conversational. Avoid:

- Robotic or formulaic language
- Em dashes. Use separate sentences instead.
- Semicolons. Use separate sentences instead.

Instead:

- Write in flowing paragraphs using simple grammar
- Keep sentences short and separate rather than joining them with punctuation
- Be kind and constructive, even when raising concerns
- Use backticks around any code references (function names, file paths, class names, types, config keys, etc.)
- **Suggest** adding tests rather than stating tests are missing (e.g., "It might be worth adding a test for X" not "Tests are missing for X")
- **Ask questions** about edge cases rather than asserting they aren't handled (e.g., "Does this handle the case where X is null?" not "This doesn't handle null")
- Frame concerns as questions or suggestions when possible
- Acknowledge what the PR does well before raising concerns

## Do Not Post Comments

**IMPORTANT: Never post comments or reviews on the PR directly.** This skill is for providing guidance to a human reviewer. Present all findings and suggested comments to the user. The user will decide what to post.


================================================
FILE: .dockerignore
================================================
.git
.github
.idea
bin
conf
docs/build
docs/temp
docs/venv
metastore_db
target
common/target
spark-integration/target
fuzz-testing/target
spark/target
native/target
core/target
spark-warehouse
venv


================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Bug report
description: Create a bug report
labels: bug
body:
  - type: textarea
    attributes:
      label: Describe the bug
      description: Describe the bug.
      placeholder: >
        A clear and concise description of what the bug is.
    validations:
      required: true
  - type: textarea
    attributes:
      label: Steps to reproduce
      placeholder: >
        Describe steps to reproduce the bug:
  - type: textarea
    attributes:
      label: Expected behavior
      placeholder: >
        A clear and concise description of what you expected to happen.
  - type: textarea
    attributes:
      label: Additional context
      placeholder: >
        Add any other context about the problem here.


================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Feature request
description: Suggest an idea for this project
labels: enhancement
body:
  - type: textarea
    attributes:
      label: What is the problem the feature request solves?
      description: Please describe how feature request improves the Comet.
      placeholder: >
        A clear and concise description of what the improvement is. Ex. I'm always frustrated when [...]
        (This section helps Comet developers understand the context and *why* for this feature, in addition to the *what*)
  - type: textarea
    attributes:
      label: Describe the potential solution
      placeholder: >
        A clear and concise description of what you want to happen.
  - type: textarea
    attributes:
      label: Additional context
      placeholder: >
        Add any other context or screenshots about the feature request here.


================================================
FILE: .github/actions/java-test/action.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: "Java Test"
description: "Run Java tests"
inputs:
  artifact_name:
    description: "Unique name for uploaded artifacts for this run"
    required: true
  suites:
    description: 'Which Scalatest test suites to run'
    required: false
    default: ''
  maven_opts:
    description: 'Maven options passed to the mvn command'
    required: false
    default: ''
  scan_impl:
    description: 'The default Parquet scan implementation'
    required: false
    default: 'auto'
  upload-test-reports:
    description: 'Whether to upload test results including coverage to GitHub'
    required: false
    default: 'false'
  skip-native-build:
    description: 'Skip native build (when using pre-built artifact)'
    required: false
    default: 'false'

runs:
  using: "composite"
  steps:
    - name: Run Cargo release build
      if: ${{ inputs.skip-native-build != 'true' }}
      shell: bash
      # it is important that we run the Scala tests against a release build rather than a debug build
      # to make sure that no tests are relying on overflow checks that are present only in debug builds
      run: |
        cd native
        cargo build --release

    - name: Cache Maven dependencies
      # TODO: remove next line after working again
      #  temporarily work around https://github.com/actions/runner-images/issues/13341
      #  by disabling caching for macOS
      if: ${{ runner.os != 'macOS' }}
      uses: actions/cache@v5
      with:
        path: |
          ~/.m2/repository
          /root/.m2/repository
        key: ${{ runner.os }}-java-maven-${{ hashFiles('**/pom.xml') }}
        restore-keys: |
          ${{ runner.os }}-java-maven-

    - name: Run all tests
      shell: bash
      if: ${{ inputs.suites == '' }}
      env:
        COMET_PARQUET_SCAN_IMPL: ${{ inputs.scan_impl }}
        SPARK_LOCAL_HOSTNAME: "localhost"
        SPARK_LOCAL_IP: "127.0.0.1"
      run: |
        MAVEN_OPTS="-Xmx4G -Xms2G -XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B -Prelease install ${{ inputs.maven_opts }}
    - name: Run specified tests
      shell: bash
      if: ${{ inputs.suites != '' }}
      env:
        COMET_PARQUET_SCAN_IMPL: ${{ inputs.scan_impl }}
        SPARK_LOCAL_HOSTNAME: "localhost"
        SPARK_LOCAL_IP: "127.0.0.1"
      run: |
        MAVEN_SUITES="$(echo "${{ inputs.suites }}" | paste -sd, -)"
        echo "Running with MAVEN_SUITES=$MAVEN_SUITES"
        MAVEN_OPTS="-Xmx4G -Xms2G -DwildcardSuites=$MAVEN_SUITES -XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B -Prelease install ${{ inputs.maven_opts }}
    - name: Upload crash logs
      if: failure()
      uses: actions/upload-artifact@v6
      with:
        name: crash-logs-${{ inputs.artifact_name }}
        path: "**/hs_err_pid*.log"
    - name: Debug listing
      if: failure()
      shell: bash
      run: |  
        echo "CWD: $(pwd)"
        ls -lah .
        ls -lah target
        find . -name 'unit-tests.log'
    - name: Upload unit-tests.log
      if: failure()
      uses: actions/upload-artifact@v6
      with:
        name: unit-tests-${{ inputs.artifact_name }}
        path: "**/target/unit-tests.log"
    - name: Upload test results
      if: ${{ inputs.upload-test-reports == 'true' }}
      uses: actions/upload-artifact@v6
      with:
         name: java-test-reports-${{ inputs.artifact_name }}
         path: "**/target/surefire-reports/*.txt"
         retention-days: 7 # 1 week for test reports
         overwrite: true


================================================
FILE: .github/actions/rust-test/action.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: "Rust Test"
description: "Run Rust tests"

runs:
  using: "composite"
  steps:
    # Note: cargo fmt check is now handled by the lint job that gates this workflow

    - name: Check Cargo clippy
      shell: bash
      run: |
        cd native
        cargo clippy --color=never --all-targets --workspace -- -D warnings

    - name: Check compilation
      shell: bash
      run: |
        cd native
        cargo check --benches

    - name: Check unused dependencies
      shell: bash
      run: |
        cd native
        cargo install cargo-machete --version 0.7.0 && cargo machete

    - name: Cache Maven dependencies
      uses: actions/cache@v4
      with:
        path: |
          ~/.m2/repository
          /root/.m2/repository
        key: ${{ runner.os }}-rust-maven-${{ hashFiles('**/pom.xml') }}
        restore-keys: |
          ${{ runner.os }}-rust-maven-

    - name: Build common module (pre-requisite for Rust tests)
      shell: bash
      run: |
        cd common
        ../mvnw -B clean compile -DskipTests

    - name: Install nextest
      shell: bash
      run: |
        cargo install cargo-nextest --locked

    - name: Run Cargo test
      shell: bash
      run: |
        cd native
        # Set LD_LIBRARY_PATH to include JVM library path for tests that use JNI
        export LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${LD_LIBRARY_PATH}
        RUST_BACKTRACE=1 cargo nextest run



================================================
FILE: .github/actions/setup-builder/action.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Prepare Builder
description: 'Prepare Build Environment'
inputs:
  rust-version:
    description: 'version of rust to install (e.g. nightly)'
    required: true
    default: 'stable'
  jdk-version:
    description: 'jdk version to install (e.g., 17)'
    required: true
    default: '17'
runs:
  using: "composite"
  steps:
    - name: Install Build Dependencies
      shell: bash
      run: |
        apt-get update
        apt-get install -y protobuf-compiler
        apt-get install -y clang

    - name: Install JDK ${{inputs.jdk-version}}
      uses: actions/setup-java@v4
      with:
        # distribution is chosen to be zulu as it still offers JDK 8 with Silicon support, which
        # is not available in the adopt distribution
        distribution: 'zulu'
        java-version: ${{inputs.jdk-version}}

    - name: Set JAVA_HOME
      shell: bash
      run: echo "JAVA_HOME=$(echo ${JAVA_HOME})" >> $GITHUB_ENV

    - name: Setup Rust toolchain
      shell: bash
      # rustfmt is needed for the substrait build script
      run: |
        echo "Installing ${{inputs.rust-version}}"
        rustup toolchain install ${{inputs.rust-version}}
        rustup default ${{inputs.rust-version}}
        rustup component add rustfmt clippy


================================================
FILE: .github/actions/setup-iceberg-builder/action.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Setup Iceberg Builder
description: 'Setup Apache Iceberg to run Spark SQL tests'
inputs:
  iceberg-version:
    description: 'The Apache Iceberg version (e.g., 1.8.1) to build'
    required: true
runs:
  using: "composite"
  steps:
    - name: Clone Iceberg repo
      uses: actions/checkout@v6
      with:
        repository: apache/iceberg
        path: apache-iceberg
        ref: apache-iceberg-${{inputs.iceberg-version}}
        fetch-depth: 1

    - name: Setup Iceberg for Comet
      shell: bash
      run: |
        cd apache-iceberg
        git apply ../dev/diffs/iceberg/${{inputs.iceberg-version}}.diff


================================================
FILE: .github/actions/setup-macos-builder/action.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Prepare Builder for MacOS
description: 'Prepare Build Environment'
inputs:
  rust-version:
    description: 'version of rust to install (e.g. nightly)'
    required: true
    default: 'stable'
  jdk-version:
    description: 'jdk version to install (e.g., 17)'
    required: true
    default: '17'
  jdk-architecture:
    description: 'OS architecture for the JDK'
    required: true
    default: 'x64'
  protoc-architecture:
    description: 'OS architecture for protobuf compiler'
    required: true
    default: 'x86_64'

runs:
  using: "composite"
  steps:
    - name: Install Build Dependencies
      shell: bash
      run: |
        # Install protobuf
        mkdir -p $HOME/d/protoc
        cd $HOME/d/protoc
        export PROTO_ZIP="protoc-21.4-osx-${{inputs.protoc-architecture}}.zip"
        curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v21.4/$PROTO_ZIP
        unzip $PROTO_ZIP
        echo "$HOME/d/protoc/bin" >> $GITHUB_PATH
        export PATH=$PATH:$HOME/d/protoc/bin
        # install openssl and setup DYLD_LIBRARY_PATH
        brew install openssl
        OPENSSL_LIB_PATH=`brew --prefix openssl`/lib
        echo "openssl lib path is: ${OPENSSL_LIB_PATH}"
        echo "DYLD_LIBRARY_PATH=$OPENSSL_LIB_PATH:$DYLD_LIBRARY_PATH" >> $GITHUB_ENV
        # output the current status of SIP for later debugging
        csrutil status || true

    - name: Install JDK ${{inputs.jdk-version}}
      uses: actions/setup-java@v4
      with:
        # distribution is chosen to be zulu as it still offers JDK 8 with Silicon support, which
        # is not available in the adopt distribution
        distribution: 'zulu'
        java-version: ${{inputs.jdk-version}}
        architecture: ${{inputs.jdk-architecture}}

    - name: Set JAVA_HOME
      shell: bash
      run: echo "JAVA_HOME=$(echo ${JAVA_HOME})" >> $GITHUB_ENV

    - name: Setup Rust toolchain
      shell: bash
      # rustfmt is needed for the substrait build script
      run: |
        echo "Installing ${{inputs.rust-version}}"
        rustup toolchain install ${{inputs.rust-version}}
        rustup default ${{inputs.rust-version}}
        rustup component add rustfmt clippy


================================================
FILE: .github/actions/setup-spark-builder/action.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Setup Spark Builder
description: 'Setup Apache Spark to run SQL tests'
inputs:
  spark-short-version:
    description: 'The Apache Spark short version (e.g., 3.5) to build'
    required: true
  spark-version:
    description: 'The Apache Spark version (e.g., 3.5.8) to build'
    required: true
  skip-native-build:
    description: 'Skip native build (when using pre-built artifact)'
    required: false
    default: 'false'
runs:
  using: "composite"
  steps:
    - name: Clone Spark repo
      uses: actions/checkout@v6
      with:
        repository: apache/spark
        path: apache-spark
        ref: v${{inputs.spark-version}}
        fetch-depth: 1

    - name: Setup Spark for Comet
      shell: bash
      run: |
        cd apache-spark
        git apply ../dev/diffs/${{inputs.spark-version}}.diff

    - name: Cache Maven dependencies
      uses: actions/cache@v4
      with:
        path: |
          ~/.m2/repository
          /root/.m2/repository
        key: ${{ runner.os }}-spark-sql-${{ hashFiles('spark/**/pom.xml', 'common/**/pom.xml') }}
        restore-keys: |
          ${{ runner.os }}-spark-sql-

    - name: Build Comet (with native)
      if: ${{ inputs.skip-native-build != 'true' }}
      shell: bash
      run: |
        PROFILES="-Pspark-${{inputs.spark-short-version}}" make release

    - name: Build Comet (Maven only, skip native)
      if: ${{ inputs.skip-native-build == 'true' }}
      shell: bash
      run: |
        # Native library should already be in native/target/release/
        ./mvnw install -Prelease -DskipTests -Pspark-${{inputs.spark-short-version}}


================================================
FILE: .github/dependabot.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

version: 2
updates:
  - package-ecosystem: cargo
    directory: "/native"
    schedule:
      interval: weekly
    target-branch: main
    labels: [dependencies]
    ignore:
      # major version bumps of datafusion*, arrow*, parquet are handled manually
      - dependency-name: "arrow*"
        update-types: ["version-update:semver-major"]
      - dependency-name: "parquet"
        update-types: ["version-update:semver-major"]
      - dependency-name: "datafusion*"
        update-types: ["version-update:semver-major"]
    groups:
      proto:
        applies-to: version-updates
        patterns:
          - "prost*"
          - "pbjson*"
      # Catch-all: group only minor/patch into a single PR,
      # excluding deps we want always separate (and excluding arrow/parquet which have their own group)
      all-other-cargo-deps:
        applies-to: version-updates
        patterns:
          - "*"
        exclude-patterns:
          - "arrow*"
          - "parquet"
          - "object_store"
          - "sqlparser"
          - "prost*"
          - "pbjson*"
        update-types:
          - "minor"
          - "patch"
  - package-ecosystem: "github-actions"
    directory: "/"
    schedule:
      interval: "weekly"
    open-pull-requests-limit: 10
    labels: [dependencies]


================================================
FILE: .github/pull_request_template.md
================================================
## Which issue does this PR close?

<!--
We generally require a GitHub issue to be filed for all bug fixes and enhancements and this helps us generate change logs for our releases. You can link an issue to this PR using the GitHub syntax. For example `Closes #123` indicates that this PR will close issue #123.
-->

Closes #.

## Rationale for this change

<!--
 Why are you proposing this change? If this is already explained clearly in the issue then this section is not needed.
 Explaining clearly why changes are proposed helps reviewers understand your changes and offer better suggestions for fixes.
-->

## What changes are included in this PR?

<!--
There is no need to duplicate the description in the issue here but it is sometimes worth providing a summary of the individual changes in this PR.
-->

## How are these changes tested?

<!--
We typically require tests for all PRs in order to:
1. Prevent the code from being accidentally broken by subsequent changes
2. Serve as another way to document the expected behavior of the code

If tests are not included in your PR, please explain why (for example, are they covered by existing tests)?
-->


================================================
FILE: .github/workflows/codeql.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#

name: "CodeQL"

on:
  push:
    branches: [ "main" ]
  pull_request:
    branches: [ "main" ]
  schedule:
    - cron: '16 4 * * 1'

permissions:
  contents: read

jobs:
  analyze:
    name: Analyze Actions
    runs-on: ubuntu-latest
    permissions:
      contents: read
      security-events: write
      packages: read

    steps:
    - name: Checkout repository
      uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
      with:
        persist-credentials: false

    - name: Initialize CodeQL
      uses: github/codeql-action/init@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4
      with:
        languages: actions

    - name: Perform CodeQL Analysis
      uses: github/codeql-action/analyze@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4
      with:
        category: "/language:actions"


================================================
FILE: .github/workflows/docker-publish.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Publish Docker images

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  push:
    tags:
      - '*.*.*'
      - '*.*.*-rc*'
      - 'test-docker-publish-*'

jobs:
  docker:
    name: Docker
    if: ${{ startsWith(github.repository, 'apache/') }}
    runs-on: ubuntu-22.04
    permissions:
      contents: read
      packages: write
    steps:
      - name: Remove unnecessary files
        run: |
          echo "Disk space before cleanup:"
          df -h
          docker system prune -af
          sudo rm -rf /tmp/*
          sudo rm -rf /opt/hostedtoolcache
          sudo rm -rf "$AGENT_TOOLSDIRECTORY"
          sudo apt-get clean
          echo "Disk space after cleanup:"
          df -h
      - name: Set up Java
        uses: actions/setup-java@v5
        with:
          java-version: '17'
          distribution: 'temurin'
          cache: 'maven'
      - name: Extract Comet version
        id: extract_version
        run: |
          # use the tag that triggered this workflow as the Comet version e.g. 0.4.0-rc1
          echo "COMET_VERSION=${GITHUB_REF##*/}" >> $GITHUB_ENV
      - name: Echo Comet version
        run: echo "The current Comet version is ${{ env.COMET_VERSION }}"
      - name: Set up Docker Buildx
        uses: docker/setup-buildx-action@v4
      - name: Login to GitHub Container Registry
        uses: docker/login-action@v4
        with:
          registry: ghcr.io
          username: ${{ github.actor }}
          password: ${{ secrets.GITHUB_TOKEN }}
      - name: Build and push
        uses: docker/build-push-action@v7
        with:
          platforms: linux/amd64,linux/arm64
          push: true
          tags: ghcr.io/apache/datafusion-comet:spark-3.5-scala-2.12-${{ env.COMET_VERSION }}
          file: kube/Dockerfile
          no-cache: true


================================================
FILE: .github/workflows/docs.yaml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

on:
  push:
    branches:
      - main
    paths:
      - .asf.yaml
      - .github/workflows/docs.yaml
      - docs/**

name: Deploy DataFusion Comet site

jobs:
  build-docs:
    name: Build docs
    if: ${{ startsWith(github.repository, 'apache/') }}
    runs-on: ubuntu-latest
    steps:
      - name: Checkout docs sources
        uses: actions/checkout@v6

      - name: Checkout asf-site branch
        uses: actions/checkout@v6
        with:
          ref: asf-site
          path: asf-site

      - name: Setup Python
        uses: actions/setup-python@v6
        with:
          python-version: "3.10"

      - name: Setup Java
        uses: actions/setup-java@v5
        with:
          distribution: 'temurin'
          java-version: '17'
          cache: 'maven'

      - name: Install dependencies
        run: |
          set -x
          python3 -m venv venv
          source venv/bin/activate
          pip install -r docs/requirements.txt

      - name: Build docs
        run: |
          set -x
          source venv/bin/activate
          cd docs
          ./build.sh

      - name: Copy & push the generated HTML
        run: |
          set -x
          cd asf-site/
          rsync \
            -a \
            --delete \
            --exclude '/.git/' \
            ../docs/build/html/ \
            ./
          cp ../.asf.yaml .
          touch .nojekyll
          git status --porcelain
          if [ "$(git status --porcelain)" != "" ]; then
            git config user.name "github-actions[bot]"
            git config user.email "github-actions[bot]@users.noreply.github.com"
            git add --all
            git commit -m 'Publish built docs triggered by ${{ github.sha }}'
            git push || git push --force
          fi

================================================
FILE: .github/workflows/iceberg_spark_test.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Iceberg Spark SQL Tests

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  push:
    branches:
      - main
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  pull_request:
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  # manual trigger
  # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
  workflow_dispatch:

env:
  RUST_VERSION: stable
  RUST_BACKTRACE: 1

jobs:
  # Build native library once and share with all test jobs
  build-native:
    name: Build Native Library
    runs-on: ubuntu-24.04
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 17

      - name: Restore Cargo cache
        uses: actions/cache/restore@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}
          restore-keys: |
            ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-

      - name: Build native library
        # Use CI profile for faster builds (no LTO) and to share cache with pr_build_linux.yml.
        run: |
          cd native && cargo build --profile ci
        env:
          RUSTFLAGS: "-Ctarget-cpu=x86-64-v3"

      - name: Save Cargo cache
        uses: actions/cache/save@v5
        if: github.ref == 'refs/heads/main'
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}

      - name: Upload native library
        uses: actions/upload-artifact@v7
        with:
          name: native-lib-iceberg
          path: native/target/ci/libcomet.so
          retention-days: 1

  iceberg-spark:
    needs: build-native
    strategy:
      matrix:
        os: [ubuntu-24.04]
        java-version: [11, 17]
        iceberg-version: [{short: '1.8', full: '1.8.1'}, {short: '1.9', full: '1.9.1'}, {short: '1.10', full: '1.10.0'}]
        spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full: '3.5.8'}]
        scala-version: ['2.13']
      fail-fast: false
    name: iceberg-spark/${{ matrix.os }}/iceberg-${{ matrix.iceberg-version.full }}/spark-${{ matrix.spark-version.full }}/scala-${{ matrix.scala-version }}/java-${{ matrix.java-version }}
    runs-on: ${{ matrix.os }}
    container:
      image: amd64/rust
    env:
      SPARK_LOCAL_IP: localhost
    steps:
      - uses: actions/checkout@v6
      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{env.RUST_VERSION}}
          jdk-version: ${{ matrix.java-version }}
      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-iceberg
          path: native/target/release/
      - name: Build Comet
        run: |
          ./mvnw install -Prelease -DskipTests -Pspark-${{ matrix.spark-version.short }} -Pscala-${{ matrix.scala-version }}
      - name: Setup Iceberg
        uses: ./.github/actions/setup-iceberg-builder
        with:
          iceberg-version: ${{ matrix.iceberg-version.full }}
      - name: Run Iceberg Spark tests
        run: |
          cd apache-iceberg
          rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
          ENABLE_COMET=true ENABLE_COMET_ONHEAP=true ./gradlew -DsparkVersions=${{ matrix.spark-version.short }} -DscalaVersion=${{ matrix.scala-version }} -DflinkVersions= -DkafkaVersions= \
            :iceberg-spark:iceberg-spark-${{ matrix.spark-version.short }}_${{ matrix.scala-version }}:test \
            -Pquick=true -x javadoc

  iceberg-spark-extensions:
    needs: build-native
    strategy:
      matrix:
        os: [ubuntu-24.04]
        java-version: [11, 17]
        iceberg-version: [{short: '1.8', full: '1.8.1'}, {short: '1.9', full: '1.9.1'}, {short: '1.10', full: '1.10.0'}]
        spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full: '3.5.8'}]
        scala-version: ['2.13']
      fail-fast: false
    name: iceberg-spark-extensions/${{ matrix.os }}/iceberg-${{ matrix.iceberg-version.full }}/spark-${{ matrix.spark-version.full }}/scala-${{ matrix.scala-version }}/java-${{ matrix.java-version }}
    runs-on: ${{ matrix.os }}
    container:
      image: amd64/rust
    env:
      SPARK_LOCAL_IP: localhost
    steps:
      - uses: actions/checkout@v6
      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{env.RUST_VERSION}}
          jdk-version: ${{ matrix.java-version }}
      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-iceberg
          path: native/target/release/
      - name: Build Comet
        run: |
          ./mvnw install -Prelease -DskipTests -Pspark-${{ matrix.spark-version.short }} -Pscala-${{ matrix.scala-version }}
      - name: Setup Iceberg
        uses: ./.github/actions/setup-iceberg-builder
        with:
          iceberg-version: ${{ matrix.iceberg-version.full }}
      - name: Run Iceberg Spark extensions tests
        run: |
          cd apache-iceberg
          rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
          ENABLE_COMET=true ENABLE_COMET_ONHEAP=true ./gradlew -DsparkVersions=${{ matrix.spark-version.short }} -DscalaVersion=${{ matrix.scala-version }} -DflinkVersions= -DkafkaVersions= \
            :iceberg-spark:iceberg-spark-extensions-${{ matrix.spark-version.short }}_${{ matrix.scala-version }}:test \
            -Pquick=true -x javadoc

  iceberg-spark-runtime:
    needs: build-native
    strategy:
      matrix:
        os: [ubuntu-24.04]
        java-version: [11, 17]
        iceberg-version: [{short: '1.8', full: '1.8.1'}, {short: '1.9', full: '1.9.1'}, {short: '1.10', full: '1.10.0'}]
        spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full: '3.5.8'}]
        scala-version: ['2.13']
      fail-fast: false
    name: iceberg-spark-runtime/${{ matrix.os }}/iceberg-${{ matrix.iceberg-version.full }}/spark-${{ matrix.spark-version.full }}/scala-${{ matrix.scala-version }}/java-${{ matrix.java-version }}
    runs-on: ${{ matrix.os }}
    container:
      image: amd64/rust
    env:
      SPARK_LOCAL_IP: localhost
    steps:
      - uses: actions/checkout@v6
      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{env.RUST_VERSION}}
          jdk-version: ${{ matrix.java-version }}
      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-iceberg
          path: native/target/release/
      - name: Build Comet
        run: |
          ./mvnw install -Prelease -DskipTests -Pspark-${{ matrix.spark-version.short }} -Pscala-${{ matrix.scala-version }}
      - name: Setup Iceberg
        uses: ./.github/actions/setup-iceberg-builder
        with:
          iceberg-version: ${{ matrix.iceberg-version.full }}
      - name: Run Iceberg Spark runtime tests
        run: |
          cd apache-iceberg
          rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
          ENABLE_COMET=true ENABLE_COMET_ONHEAP=true ./gradlew -DsparkVersions=${{ matrix.spark-version.short }} -DscalaVersion=${{ matrix.scala-version }} -DflinkVersions= -DkafkaVersions= \
            :iceberg-spark:iceberg-spark-runtime-${{ matrix.spark-version.short }}_${{ matrix.scala-version }}:integrationTest \
            -Pquick=true -x javadoc


================================================
FILE: .github/workflows/label_new_issues.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Label new issues with requires-triage

on:
  issues:
    types: [opened]

permissions:
  issues: write

jobs:
  add-triage-label:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/github-script@v9
        with:
          script: |
            await github.rest.issues.addLabels({
              owner: context.repo.owner,
              repo: context.repo.repo,
              issue_number: context.issue.number,
              labels: ['requires-triage']
            })


================================================
FILE: .github/workflows/miri.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Run Miri Safety Checks

on:
  push:
    branches:
      - main
    paths-ignore:
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  pull_request:
    paths-ignore:
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  # manual trigger
  # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
  workflow_dispatch:

jobs:
  miri:
    name: "Miri"
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v6
      - name: Install Build Dependencies
        shell: bash
        run: |
          sudo apt-get update
          sudo apt-get install -y protobuf-compiler
          sudo apt-get install -y clang
      - name: Install Miri
        run: |
          rustup toolchain install nightly --component miri
          rustup override set nightly
          cargo miri setup
      - name: Test with Miri
        run: |
          cd native
          MIRIFLAGS="-Zmiri-disable-isolation" cargo miri test --lib --bins --tests --examples


================================================
FILE: .github/workflows/pr_benchmark_check.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

# Lightweight CI for benchmark-only changes - verifies compilation and linting
# without running full test suites

name: PR Benchmark Check

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  push:
    branches:
      - main
    paths:
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  pull_request:
    paths:
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  workflow_dispatch:

env:
  RUST_VERSION: stable
  RUST_BACKTRACE: 1

jobs:
  benchmark-check:
    name: Benchmark Compile & Lint Check
    runs-on: ubuntu-latest
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 17

      - name: Check Cargo fmt
        run: |
          cd native
          cargo fmt --all -- --check --color=never

      - name: Check Cargo clippy
        run: |
          cd native
          cargo clippy --color=never --all-targets --workspace -- -D warnings

      - name: Check benchmark compilation
        run: |
          cd native
          cargo check --benches

      - name: Cache Maven dependencies
        uses: actions/cache@v5
        with:
          path: |
            ~/.m2/repository
            /root/.m2/repository
          key: ${{ runner.os }}-benchmark-maven-${{ hashFiles('**/pom.xml') }}
          restore-keys: |
            ${{ runner.os }}-benchmark-maven-

      - name: Check Scala compilation and linting
        run: |
          ./mvnw -B compile test-compile scalafix:scalafix -Dscalafix.mode=CHECK -Psemanticdb -DskipTests


================================================
FILE: .github/workflows/pr_build_linux.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: PR Build (Linux)

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  push:
    branches:
      - main
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  pull_request:
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  # manual trigger
  # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
  workflow_dispatch:

env:
  RUST_VERSION: stable
  RUST_BACKTRACE: 1

jobs:

  # Fast lint check - gates all other jobs
  lint:
    name: Lint
    runs-on: ubuntu-latest
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6

      - name: Check Rust formatting
        run: |
          rustup component add rustfmt
          cd native && cargo fmt --all -- --check

  lint-java:
    needs: lint
    name: Lint Java (${{ matrix.profile.name }})
    runs-on: ubuntu-latest
    container:
      image: amd64/rust
      env:
        JAVA_TOOL_OPTIONS: ${{ matrix.profile.java_version == '17' && '--add-exports=java.base/sun.nio.ch=ALL-UNNAMED --add-exports=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED' || '' }}
    strategy:
      matrix:
        profile:
          - name: "Spark 3.4, JDK 11, Scala 2.12"
            java_version: "11"
            maven_opts: "-Pspark-3.4 -Pscala-2.12"
          - name: "Spark 3.5, JDK 17, Scala 2.12"
            java_version: "17"
            maven_opts: "-Pspark-3.5 -Pscala-2.12"
          - name: "Spark 4.0, JDK 17"
            java_version: "17"
            maven_opts: "-Pspark-4.0"
      fail-fast: false
    steps:
      - uses: runs-on/action@742bf56072eb4845a0f94b3394673e4903c90ff0  # v2.1.0
      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: ${{ matrix.profile.java_version }}

      - name: Cache Maven dependencies
        uses: actions/cache@v5
        with:
          path: |
            ~/.m2/repository
            /root/.m2/repository
          key: ${{ runner.os }}-java-maven-${{ hashFiles('**/pom.xml') }}-lint
          restore-keys: |
            ${{ runner.os }}-java-maven-

      - name: Run scalafix check
        run: |
          ./mvnw -B package -DskipTests scalafix:scalafix -Dscalafix.mode=CHECK -Psemanticdb ${{ matrix.profile.maven_opts }}

      - name: Setup Node.js
        uses: actions/setup-node@v6
        with:
          node-version: '24'

      - name: Install prettier
        run: |
          npm install -g prettier

      - name: Run prettier
        run: |
          npx prettier "**/*.md" --write

      - name: Mark workspace as safe for git
        run: |
          git config --global --add safe.directory "$GITHUB_WORKSPACE"

      - name: Check for any local git changes (such as generated docs)
        run: |
          ./dev/ci/check-working-tree-clean.sh

  # Build native library once and share with all test jobs
  build-native:
    needs: lint
    name: Build Native Library
    runs-on: ${{ github.repository_owner == 'apache' && format('runs-on={0},family=m8a+m7a+c8a,cpu=8,image=ubuntu24-full-x64,extras=s3-cache,disk=large,tag=datafusion-comet', github.run_id) || 'ubuntu-latest' }}
    container:
      image: amd64/rust
    steps:
      - uses: runs-on/action@742bf56072eb4845a0f94b3394673e4903c90ff0  # v2.1.0
      - uses: actions/checkout@v6
      - name: Setup Rust toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 17  # JDK only needed for common module proto generation

      - name: Restore Cargo cache
        uses: actions/cache/restore@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}
          restore-keys: |
            ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-

      - name: Build native library (CI profile)
        run: |
          cd native
          # CI profile: same overflow behavior as release, but faster compilation
          # (no LTO, parallel codegen)
          cargo build --profile ci
        env:
          RUSTFLAGS: "-Ctarget-cpu=x86-64-v3"

      - name: Upload native library
        uses: actions/upload-artifact@v7
        with:
          name: native-lib-linux
          path: native/target/ci/libcomet.so
          retention-days: 1

      - name: Save Cargo cache
        uses: actions/cache/save@v5
        if: github.ref == 'refs/heads/main'
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}

  # Run Rust tests (runs in parallel with build-native, uses debug builds)
  linux-test-rust:
    needs: lint
    name: ubuntu-latest/rust-test
    runs-on: ${{ github.repository_owner == 'apache' && format('runs-on={0},family=m8a+m7a+c8a,cpu=16,image=ubuntu24-full-x64,extras=s3-cache,disk=large,tag=datafusion-comet', github.run_id) || 'ubuntu-latest' }}
    container:
      image: amd64/rust
    steps:
      - uses: runs-on/action@742bf56072eb4845a0f94b3394673e4903c90ff0  # v2.1.0

      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 17

      - name: Restore Cargo cache
        uses: actions/cache/restore@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          # Note: Java version intentionally excluded - Rust target is JDK-independent
          key: ${{ runner.os }}-cargo-debug-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}
          restore-keys: |
            ${{ runner.os }}-cargo-debug-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-

      - name: Rust test steps
        uses: ./.github/actions/rust-test

      - name: Save Cargo cache
        uses: actions/cache/save@v5
        if: github.ref == 'refs/heads/main'
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-debug-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}

  linux-test:
    needs: build-native
    strategy:
      matrix:
        # the goal with these profiles is to get coverage of all Java, Scala, and Spark
        # versions without testing all possible combinations, which would be overkill
        profile:
          - name: "Spark 3.4, JDK 11, Scala 2.12"
            java_version: "11"
            maven_opts: "-Pspark-3.4 -Pscala-2.12"
            scan_impl: "auto"

          - name: "Spark 3.5.5, JDK 17, Scala 2.13"
            java_version: "17"
            maven_opts: "-Pspark-3.5 -Dspark.version=3.5.5 -Pscala-2.13"
            scan_impl: "auto"

          - name: "Spark 3.5.6, JDK 17, Scala 2.13"
            java_version: "17"
            maven_opts: "-Pspark-3.5 -Dspark.version=3.5.6 -Pscala-2.13"
            scan_impl: "auto"

          - name: "Spark 3.5, JDK 17, Scala 2.12"
            java_version: "17"
            maven_opts: "-Pspark-3.5 -Pscala-2.12"
            scan_impl: "native_iceberg_compat"

          - name: "Spark 4.0, JDK 17"
            java_version: "17"
            maven_opts: "-Pspark-4.0"
            scan_impl: "auto"
        suite:
          - name: "fuzz"
            value: |
              org.apache.comet.CometFuzzTestSuite
              org.apache.comet.CometFuzzAggregateSuite
              org.apache.comet.CometFuzzIcebergSuite
              org.apache.comet.CometFuzzMathSuite
              org.apache.comet.DataGeneratorSuite
          - name: "shuffle"
            value: |
              org.apache.comet.exec.CometShuffleSuite
              org.apache.comet.exec.CometShuffle4_0Suite
              org.apache.comet.exec.CometNativeColumnarToRowSuite
              org.apache.comet.exec.CometNativeShuffleSuite
              org.apache.comet.exec.CometShuffleEncryptionSuite
              org.apache.comet.exec.CometShuffleManagerSuite
              org.apache.comet.exec.CometAsyncShuffleSuite
              org.apache.comet.exec.DisableAQECometShuffleSuite
              org.apache.comet.exec.DisableAQECometAsyncShuffleSuite
              org.apache.spark.shuffle.sort.SpillSorterSuite
          - name: "parquet"
            value: |
              org.apache.comet.parquet.CometParquetWriterSuite
              org.apache.comet.parquet.ParquetReadV1Suite
              org.apache.comet.parquet.ParquetReadV2Suite
              org.apache.comet.parquet.ParquetReadFromFakeHadoopFsSuite
              org.apache.spark.sql.comet.ParquetDatetimeRebaseV1Suite
              org.apache.spark.sql.comet.ParquetDatetimeRebaseV2Suite
              org.apache.spark.sql.comet.ParquetEncryptionITCase
              org.apache.comet.exec.CometNativeReaderSuite
              org.apache.comet.CometIcebergNativeSuite
          - name: "csv"
            value: |
              org.apache.comet.csv.CometCsvNativeReadSuite
          - name: "exec"
            value: |
              org.apache.comet.exec.CometAggregateSuite
              org.apache.comet.exec.CometExec3_4PlusSuite
              org.apache.comet.exec.CometExecSuite
              org.apache.comet.exec.CometGenerateExecSuite
              org.apache.comet.exec.CometWindowExecSuite
              org.apache.comet.exec.CometJoinSuite
              org.apache.comet.CometNativeSuite
              org.apache.comet.CometSparkSessionExtensionsSuite
              org.apache.spark.CometPluginsSuite
              org.apache.spark.CometPluginsDefaultSuite
              org.apache.spark.CometPluginsNonOverrideSuite
              org.apache.spark.CometPluginsUnifiedModeOverrideSuite
              org.apache.comet.rules.CometScanRuleSuite
              org.apache.comet.rules.CometExecRuleSuite
              org.apache.spark.sql.CometTPCDSQuerySuite
              org.apache.spark.sql.CometTPCDSQueryTestSuite
              org.apache.spark.sql.CometTPCHQuerySuite
              org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite
              org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite
              org.apache.spark.sql.comet.CometTaskMetricsSuite
              org.apache.spark.sql.comet.CometDppFallbackRepro3949Suite
              org.apache.spark.sql.comet.CometShuffleFallbackStickinessSuite
              org.apache.comet.objectstore.NativeConfigSuite
          - name: "expressions"
            value: |
              org.apache.comet.CometExpressionSuite
              org.apache.comet.CometSqlFileTestSuite
              org.apache.comet.CometExpressionCoverageSuite
              org.apache.comet.CometHashExpressionSuite
              org.apache.comet.CometTemporalExpressionSuite
              org.apache.comet.CometArrayExpressionSuite
              org.apache.comet.CometCastSuite
              org.apache.comet.CometDateTimeUtilsSuite
              org.apache.comet.CometMathExpressionSuite
              org.apache.comet.CometStringExpressionSuite
              org.apache.comet.CometBitwiseExpressionSuite
              org.apache.comet.CometMapExpressionSuite
              org.apache.comet.CometCsvExpressionSuite
              org.apache.comet.CometJsonExpressionSuite
              org.apache.comet.CometDateTimeUtilsSuite
              org.apache.comet.SparkErrorConverterSuite
              org.apache.comet.expressions.conditional.CometIfSuite
              org.apache.comet.expressions.conditional.CometCoalesceSuite
              org.apache.comet.expressions.conditional.CometCaseWhenSuite
          - name: "sql"
            value: |
              org.apache.spark.sql.CometToPrettyStringSuite
      fail-fast: false
    name: ${{ matrix.profile.name }}/${{ matrix.profile.scan_impl }} [${{ matrix.suite.name }}]
    runs-on: ${{ github.repository_owner == 'apache' && format('runs-on={0},family=m8a+m7a+c8a,cpu=16,image=ubuntu24-full-x64,extras=s3-cache,disk=large,tag=datafusion-comet', github.run_id) || 'ubuntu-latest' }}
    container:
      image: amd64/rust
      env:
        JAVA_TOOL_OPTIONS: ${{ matrix.profile.java_version == '17' && '--add-exports=java.base/sun.nio.ch=ALL-UNNAMED --add-exports=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED' || '' }}

    steps:
      - uses: runs-on/action@742bf56072eb4845a0f94b3394673e4903c90ff0  # v2.1.0
      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: ${{ matrix.profile.java_version }}

      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-linux
          # Download to release/ since Maven's -Prelease expects libcomet.so there
          path: native/target/release/

      # Restore cargo registry cache (for any cargo commands that might run)
      - name: Cache Cargo registry
        uses: actions/cache@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
          key: ${{ runner.os }}-cargo-registry-${{ hashFiles('native/**/Cargo.lock') }}
          restore-keys: |
            ${{ runner.os }}-cargo-registry-

      - name: Java test steps
        uses: ./.github/actions/java-test
        with:
          artifact_name: ${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}-${{ matrix.profile.scan_impl }}
          suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }}
          maven_opts: ${{ matrix.profile.maven_opts }}
          scan_impl: ${{ matrix.profile.scan_impl }}
          upload-test-reports: true
          skip-native-build: true

  # TPC-H correctness test - verifies benchmark queries produce correct results
  verify-benchmark-results-tpch:
    needs: build-native
    name: Verify TPC-H Results
    runs-on: ${{ github.repository_owner == 'apache' && format('runs-on={0},family=m8a+m7a+c8a,cpu=16,image=ubuntu24-full-x64,extras=s3-cache,disk=large,tag=datafusion-comet', github.run_id) || 'ubuntu-latest' }}
    container:
      image: amd64/rust
    steps:
      - uses: runs-on/action@742bf56072eb4845a0f94b3394673e4903c90ff0  # v2.1.0

      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 11

      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-linux
          path: native/target/release/

      - name: Cache Maven dependencies
        uses: actions/cache@v5
        with:
          path: |
            ~/.m2/repository
            /root/.m2/repository
          key: ${{ runner.os }}-java-maven-${{ hashFiles('**/pom.xml') }}
          restore-keys: |
            ${{ runner.os }}-java-maven-

      - name: Cache TPC-H data
        id: cache-tpch
        uses: actions/cache@v5
        with:
          path: ./tpch
          key: tpch-${{ hashFiles('.github/workflows/pr_build_linux.yml') }}

      - name: Build project
        run: |
          ./mvnw -B -Prelease install -DskipTests

      - name: Generate TPC-H data (SF=1)
        if: steps.cache-tpch.outputs.cache-hit != 'true'
        run: |
          cd spark && MAVEN_OPTS='-Xmx20g' ../mvnw -B -Prelease exec:java -Dexec.mainClass="org.apache.spark.sql.GenTPCHData" -Dexec.classpathScope="test" -Dexec.cleanupDaemonThreads="false" -Dexec.args="--location `pwd`/.. --scaleFactor 1 --numPartitions 1 --overwrite"

      - name: Run TPC-H queries
        run: |
          SPARK_HOME=`pwd` SPARK_TPCH_DATA=`pwd`/tpch/sf1_parquet ./mvnw -B -Prelease -Dsuites=org.apache.spark.sql.CometTPCHQuerySuite test

  # TPC-DS correctness tests - verifies benchmark queries produce correct results
  verify-benchmark-results-tpcds:
    needs: build-native
    name: Verify TPC-DS Results (${{ matrix.join }})
    runs-on: ${{ github.repository_owner == 'apache' && format('runs-on={0},family=m8a+m7a+c8a,cpu=16,image=ubuntu24-full-x64,extras=s3-cache,disk=large,tag=datafusion-comet', github.run_id) || 'ubuntu-latest' }}
    container:
      image: amd64/rust
    strategy:
      matrix:
        join: [sort_merge, broadcast, hash]
      fail-fast: false
    steps:
      - uses: runs-on/action@742bf56072eb4845a0f94b3394673e4903c90ff0  # v2.1.0

      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 11

      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-linux
          path: native/target/release/

      - name: Cache Maven dependencies
        uses: actions/cache@v5
        with:
          path: |
            ~/.m2/repository
            /root/.m2/repository
          key: ${{ runner.os }}-java-maven-${{ hashFiles('**/pom.xml') }}
          restore-keys: |
            ${{ runner.os }}-java-maven-

      - name: Cache TPC-DS data
        id: cache-tpcds
        uses: actions/cache@v5
        with:
          path: ./tpcds-sf-1
          key: tpcds-${{ hashFiles('.github/workflows/pr_build_linux.yml') }}

      - name: Build project
        run: |
          ./mvnw -B -Prelease install -DskipTests

      - name: Checkout tpcds-kit
        if: steps.cache-tpcds.outputs.cache-hit != 'true'
        uses: actions/checkout@v6
        with:
          repository: databricks/tpcds-kit
          path: ./tpcds-kit

      - name: Build tpcds-kit
        if: steps.cache-tpcds.outputs.cache-hit != 'true'
        run: |
          apt-get update && apt-get install -y yacc bison flex gcc-12 g++-12
          update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 120 --slave /usr/bin/g++ g++ /usr/bin/g++-12
          cd tpcds-kit/tools && make OS=LINUX

      - name: Generate TPC-DS data (SF=1)
        if: steps.cache-tpcds.outputs.cache-hit != 'true'
        run: |
          cd spark && MAVEN_OPTS='-Xmx20g' ../mvnw -B -Prelease exec:java -Dexec.mainClass="org.apache.spark.sql.GenTPCDSData" -Dexec.classpathScope="test" -Dexec.cleanupDaemonThreads="false" -Dexec.args="--dsdgenDir `pwd`/../tpcds-kit/tools --location `pwd`/../tpcds-sf-1 --scaleFactor 1 --numPartitions 1"

      - name: Run TPC-DS queries (Sort merge join)
        if: matrix.join == 'sort_merge'
        run: |
          SPARK_HOME=`pwd` SPARK_TPCDS_DATA=`pwd`/tpcds-sf-1 ./mvnw -B -Prelease -Dsuites=org.apache.spark.sql.CometTPCDSQuerySuite test
        env:
          SPARK_TPCDS_JOIN_CONF: |
            spark.sql.autoBroadcastJoinThreshold=-1
            spark.sql.join.preferSortMergeJoin=true

      - name: Run TPC-DS queries (Broadcast hash join)
        if: matrix.join == 'broadcast'
        run: |
          SPARK_HOME=`pwd` SPARK_TPCDS_DATA=`pwd`/tpcds-sf-1 ./mvnw -B -Prelease -Dsuites=org.apache.spark.sql.CometTPCDSQuerySuite test
        env:
          SPARK_TPCDS_JOIN_CONF: |
            spark.sql.autoBroadcastJoinThreshold=10485760

      - name: Run TPC-DS queries (Shuffled hash join)
        if: matrix.join == 'hash'
        run: |
          SPARK_HOME=`pwd` SPARK_TPCDS_DATA=`pwd`/tpcds-sf-1 ./mvnw -B -Prelease -Dsuites=org.apache.spark.sql.CometTPCDSQuerySuite test
        env:
          SPARK_TPCDS_JOIN_CONF: |
            spark.sql.autoBroadcastJoinThreshold=-1
            spark.sql.join.forceApplyShuffledHashJoin=true


================================================
FILE: .github/workflows/pr_build_macos.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: PR Build (macOS)

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  push:
    branches:
      - main
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  pull_request:
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  # manual trigger
  # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
  workflow_dispatch:

env:
  RUST_VERSION: stable
  RUST_BACKTRACE: 1

jobs:

  # Fast lint check - gates all other jobs (runs on Linux for cost efficiency)
  lint:
    name: Lint
    runs-on: ubuntu-latest
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6

      - name: Check Rust formatting
        run: |
          rustup component add rustfmt
          cd native && cargo fmt --all -- --check

  # Build native library once and share with all test jobs
  build-native:
    needs: lint
    name: Build Native Library (macOS)
    runs-on: macos-14
    steps:
      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-macos-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 17
          jdk-architecture: aarch64
          protoc-architecture: aarch_64

      - name: Restore Cargo cache
        uses: actions/cache/restore@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}
          restore-keys: |
            ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-

      - name: Build native library (CI profile)
        run: |
          cd native
          # CI profile: same overflow behavior as release, but faster compilation
          # (no LTO, parallel codegen)
          cargo build --profile ci
        env:
          RUSTFLAGS: "-Ctarget-cpu=apple-m1"

      - name: Upload native library
        uses: actions/upload-artifact@v7
        with:
          name: native-lib-macos
          path: native/target/ci/libcomet.dylib
          retention-days: 1

      - name: Save Cargo cache
        uses: actions/cache/save@v5
        if: github.ref == 'refs/heads/main'
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}

  macos-aarch64-test:
    needs: build-native
    strategy:
      matrix:
        os: [macos-14]

        # the goal with these profiles is to get coverage of all Java, Scala, and Spark
        # versions without testing all possible combinations, which would be overkill
        profile:
          - name: "Spark 3.4, JDK 11, Scala 2.12"
            java_version: "11"
            maven_opts: "-Pspark-3.4 -Pscala-2.12"

          - name: "Spark 3.5, JDK 17, Scala 2.13"
            java_version: "17"
            maven_opts: "-Pspark-3.5 -Pscala-2.13"

          - name: "Spark 4.0, JDK 17, Scala 2.13"
            java_version: "17"
            maven_opts: "-Pspark-4.0 -Pscala-2.13"

        suite:
          - name: "fuzz"
            value: |
              org.apache.comet.CometFuzzTestSuite
              org.apache.comet.CometFuzzAggregateSuite
              org.apache.comet.CometFuzzIcebergSuite
              org.apache.comet.CometFuzzMathSuite
              org.apache.comet.DataGeneratorSuite
          - name: "shuffle"
            value: |
              org.apache.comet.exec.CometShuffleSuite
              org.apache.comet.exec.CometShuffle4_0Suite
              org.apache.comet.exec.CometNativeColumnarToRowSuite
              org.apache.comet.exec.CometNativeShuffleSuite
              org.apache.comet.exec.CometShuffleEncryptionSuite
              org.apache.comet.exec.CometShuffleManagerSuite
              org.apache.comet.exec.CometAsyncShuffleSuite
              org.apache.comet.exec.DisableAQECometShuffleSuite
              org.apache.comet.exec.DisableAQECometAsyncShuffleSuite
              org.apache.spark.shuffle.sort.SpillSorterSuite
          - name: "parquet"
            value: |
              org.apache.comet.parquet.CometParquetWriterSuite
              org.apache.comet.parquet.ParquetReadV1Suite
              org.apache.comet.parquet.ParquetReadV2Suite
              org.apache.comet.parquet.ParquetReadFromFakeHadoopFsSuite
              org.apache.spark.sql.comet.ParquetDatetimeRebaseV1Suite
              org.apache.spark.sql.comet.ParquetDatetimeRebaseV2Suite
              org.apache.spark.sql.comet.ParquetEncryptionITCase
              org.apache.comet.exec.CometNativeReaderSuite
              org.apache.comet.CometIcebergNativeSuite
          - name: "csv"
            value: |
              org.apache.comet.csv.CometCsvNativeReadSuite
          - name: "exec"
            value: |
              org.apache.comet.exec.CometAggregateSuite
              org.apache.comet.exec.CometExec3_4PlusSuite
              org.apache.comet.exec.CometExecSuite
              org.apache.comet.exec.CometGenerateExecSuite
              org.apache.comet.exec.CometWindowExecSuite
              org.apache.comet.exec.CometJoinSuite
              org.apache.comet.CometNativeSuite
              org.apache.comet.CometSparkSessionExtensionsSuite
              org.apache.spark.CometPluginsSuite
              org.apache.spark.CometPluginsDefaultSuite
              org.apache.spark.CometPluginsNonOverrideSuite
              org.apache.spark.CometPluginsUnifiedModeOverrideSuite
              org.apache.comet.rules.CometScanRuleSuite
              org.apache.comet.rules.CometExecRuleSuite
              org.apache.spark.sql.CometTPCDSQuerySuite
              org.apache.spark.sql.CometTPCDSQueryTestSuite
              org.apache.spark.sql.CometTPCHQuerySuite
              org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite
              org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite
              org.apache.spark.sql.comet.CometTaskMetricsSuite
              org.apache.spark.sql.comet.CometDppFallbackRepro3949Suite
              org.apache.spark.sql.comet.CometShuffleFallbackStickinessSuite
              org.apache.comet.objectstore.NativeConfigSuite
          - name: "expressions"
            value: |
              org.apache.comet.CometExpressionSuite
              org.apache.comet.CometSqlFileTestSuite
              org.apache.comet.CometExpressionCoverageSuite
              org.apache.comet.CometHashExpressionSuite
              org.apache.comet.CometTemporalExpressionSuite
              org.apache.comet.CometArrayExpressionSuite
              org.apache.comet.CometCastSuite
              org.apache.comet.CometMathExpressionSuite
              org.apache.comet.CometStringExpressionSuite
              org.apache.comet.CometBitwiseExpressionSuite
              org.apache.comet.CometMapExpressionSuite
              org.apache.comet.CometJsonExpressionSuite
              org.apache.comet.CometCsvExpressionSuite
              org.apache.comet.CometDateTimeUtilsSuite
              org.apache.comet.SparkErrorConverterSuite
              org.apache.comet.expressions.conditional.CometIfSuite
              org.apache.comet.expressions.conditional.CometCoalesceSuite
              org.apache.comet.expressions.conditional.CometCaseWhenSuite
          - name: "sql"
            value: |
              org.apache.spark.sql.CometToPrettyStringSuite

      fail-fast: false
    name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}]
    runs-on: ${{ matrix.os }}
    steps:
      - uses: actions/checkout@v6

      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-macos-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: ${{ matrix.profile.java_version }}
          jdk-architecture: aarch64
          protoc-architecture: aarch_64

      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-macos
          # Download to release/ since Maven's -Prelease expects libcomet.dylib there
          path: native/target/release/

      # Restore cargo registry cache (for any cargo commands that might run)
      - name: Cache Cargo registry
        uses: actions/cache@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
          key: ${{ runner.os }}-cargo-registry-${{ hashFiles('native/**/Cargo.lock') }}
          restore-keys: |
            ${{ runner.os }}-cargo-registry-

      - name: Set thread thresholds envs for spark test on macOS
        # see: https://github.com/apache/datafusion-comet/issues/2965
        shell: bash
        run: |
          echo "SPARK_TEST_SQL_SHUFFLE_EXCHANGE_MAX_THREAD_THRESHOLD=256" >> $GITHUB_ENV
          echo "SPARK_TEST_SQL_RESULT_QUERY_STAGE_MAX_THREAD_THRESHOLD=256" >> $GITHUB_ENV
          echo "SPARK_TEST_HIVE_SHUFFLE_EXCHANGE_MAX_THREAD_THRESHOLD=48" >> $GITHUB_ENV
          echo "SPARK_TEST_HIVE_RESULT_QUERY_STAGE_MAX_THREAD_THRESHOLD=48" >> $GITHUB_ENV

      - name: Java test steps
        uses: ./.github/actions/java-test
        with:
          artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
          suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }}
          maven_opts: ${{ matrix.profile.maven_opts }}
          skip-native-build: true


================================================
FILE: .github/workflows/pr_markdown_format.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Check Markdown Formatting

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  pull_request:
    paths:
      - '**.md'

jobs:
  prettier-check:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v6

      - name: Setup Node.js
        uses: actions/setup-node@v6
        with:
          node-version: '24'

      - name: Install prettier
        run: npm install -g prettier

      - name: Check markdown formatting
        run: |
          # if you encounter error, run prettier locally and commit changes using instructions at:
          #
          # https://datafusion.apache.org/comet/contributor-guide/development.html#how-to-format-md-document
          #
          prettier --check "**/*.md"


================================================
FILE: .github/workflows/pr_missing_suites.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Check that all test suites are added to PR workflows

on:
  push:
    branches:
      - main
  pull_request:
    types: [opened, synchronize, reopened]

jobs:
  check-missing-suites:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v6
      - name: Check Missing Suites
        run: python3 dev/ci/check-suites.py


================================================
FILE: .github/workflows/pr_rat_check.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: RAT License Check

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

permissions:
  contents: read

# No paths-ignore: this workflow must run for ALL changes including docs
on:
  push:
    branches:
      - main
  pull_request:
  workflow_dispatch:

jobs:
  rat-check:
    name: RAT License Check
    runs-on: ubuntu-slim
    steps:
      - uses: actions/checkout@v6
      - name: Set up Java
        uses: actions/setup-java@v5
        with:
          distribution: temurin
          java-version: 11
      - name: Run RAT check
        run: ./mvnw -B -N apache-rat:check


================================================
FILE: .github/workflows/pr_title_check.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Check PR Title

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  pull_request:
    types: [opened, edited, reopened]

jobs:
  check-pr-title:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v6
      - name: Check PR title
        env:
          PR_TITLE: ${{ github.event.pull_request.title }}
        run: |
          if ! echo $PR_TITLE | grep -Eq '^(\w+)(\(.+\))?: .+$'; then
            echo "PR title does not follow conventional commit style."
            echo "Please use a title in the format: type: message, or type(scope): message"
            echo "Example: feat: Add support for sort-merge join"
            exit 1
          fi



================================================
FILE: .github/workflows/spark_sql_test.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Spark SQL Tests

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  push:
    branches:
      - main
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  pull_request:
    paths-ignore:
      - "benchmarks/**"
      - "doc/**"
      - "docs/**"
      - "**.md"
      - "native/core/benches/**"
      - "native/spark-expr/benches/**"
      - "spark/src/test/scala/org/apache/spark/sql/benchmark/**"
  # manual trigger
  # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
  workflow_dispatch:
    inputs:
      collect-fallback-logs:
        description: 'Whether to collect Comet fallback reasons from spark sql unit test logs'
        required: false
        default: 'false'
        type: boolean

env:
  RUST_VERSION: stable
  RUST_BACKTRACE: 1

jobs:

  # Build native library once and share with all test jobs
  build-native:
    name: Build Native Library
    runs-on: ubuntu-24.04
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6

      - name: Setup Rust toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{ env.RUST_VERSION }}
          jdk-version: 17

      - name: Restore Cargo cache
        uses: actions/cache/restore@v5
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}
          restore-keys: |
            ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-

      - name: Build native library (CI profile)
        run: |
          cd native
          cargo build --profile ci
        env:
          RUSTFLAGS: "-Ctarget-cpu=x86-64-v3"

      - name: Upload native library
        uses: actions/upload-artifact@v7
        with:
          name: native-lib-linux
          path: native/target/ci/libcomet.so
          retention-days: 1

      - name: Save Cargo cache
        uses: actions/cache/save@v5
        if: github.ref == 'refs/heads/main'
        with:
          path: |
            ~/.cargo/registry
            ~/.cargo/git
            native/target
          key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }}-${{ hashFiles('native/**/*.rs') }}

  spark-sql-test:
    needs: build-native
    strategy:
      matrix:
        os: [ubuntu-24.04]
        module:
          - {name: "catalyst", args1: "catalyst/test", args2: ""}
          - {name: "sql_core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
          - {name: "sql_core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
          - {name: "sql_core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
          - {name: "sql_hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
          - {name: "sql_hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
          - {name: "sql_hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
        # Since 4f5eaf0, auto mode uses native_datafusion for V1 scans,
        # so we only need to test with auto.
        config:
          - {spark-short: '3.4', spark-full: '3.4.3', java: 11, scan-impl: 'auto'}
          - {spark-short: '3.5', spark-full: '3.5.8', java: 11, scan-impl: 'auto'}
          - {spark-short: '4.0', spark-full: '4.0.1', java: 17, scan-impl: 'auto'}
        # Skip sql_hive-1 for Spark 4.0 due to https://github.com/apache/datafusion-comet/issues/2946
        exclude:
          - config: {spark-short: '4.0', spark-full: '4.0.1', java: 17, scan-impl: 'auto'}
            module: {name: "sql_hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
      fail-fast: false
    name: spark-sql-${{ matrix.config.scan-impl }}-${{ matrix.module.name }}/spark-${{ matrix.config.spark-full }}
    runs-on: ${{ matrix.os }}
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6
      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{env.RUST_VERSION}}
          jdk-version: ${{ matrix.config.java }}
      - name: Download native library
        uses: actions/download-artifact@v8
        with:
          name: native-lib-linux
          path: native/target/release/
      - name: Setup Spark
        uses: ./.github/actions/setup-spark-builder
        with:
          spark-version: ${{ matrix.config.spark-full }}
          spark-short-version: ${{ matrix.config.spark-short }}
          skip-native-build: true
      - name: Run Spark tests
        run: |
          cd apache-spark
          rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
          NOLINT_ON_COMPILE=true ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_PARQUET_SCAN_IMPL=${{ matrix.config.scan-impl }} ENABLE_COMET_LOG_FALLBACK_REASONS=${{ github.event.inputs.collect-fallback-logs || 'false' }} \
            build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
          if [ "${{ github.event.inputs.collect-fallback-logs }}" = "true" ]; then
            find . -type f -name "unit-tests.log" -print0 | xargs -0 grep -h "Comet cannot accelerate" | sed 's/.*Comet cannot accelerate/Comet cannot accelerate/' | sort -u > fallback.log
          fi
        env:
          LC_ALL: "C.UTF-8"
      - name: Upload fallback log
        if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
        uses: actions/upload-artifact@v7
        with:
          name: fallback-log-spark-sql-${{ matrix.config.scan-impl }}-${{ matrix.module.name }}-spark-${{ matrix.config.spark-full }}
          path: "**/fallback.log"

  merge-fallback-logs:
    if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
    name: merge-fallback-logs
    needs: [spark-sql-test]
    runs-on: ubuntu-24.04
    steps:
      - name: Download fallback log artifacts
        uses: actions/download-artifact@v8
        with:
          path: fallback-logs/
      - name: Merge fallback logs
        run: |
          find ./fallback-logs/ -type f -name "fallback.log" -print0 | xargs -0 cat | sort -u > all_fallback.log
      - name: Upload merged fallback log
        uses: actions/upload-artifact@v7
        with:
          name: all-fallback-log
          path: all_fallback.log


================================================
FILE: .github/workflows/spark_sql_test_native_iceberg_compat.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Spark SQL Tests (native_iceberg_compat)

concurrency:
  group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
  cancel-in-progress: true

on:
  # manual trigger
  # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
  workflow_dispatch:

env:
  RUST_VERSION: stable
  RUST_BACKTRACE: 1

jobs:
  spark-sql-catalyst-native-iceberg-compat:
    strategy:
      matrix:
        os: [ubuntu-24.04]
        java-version: [11]
        spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full: '3.5.8'}]
        module:
          - {name: "catalyst", args1: "catalyst/test", args2: ""}
          - {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
          - {name: "sql/core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
          - {name: "sql/core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
          - {name: "sql/hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
          - {name: "sql/hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
          - {name: "sql/hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
      fail-fast: false
    name: spark-sql-native-iceberg-compat-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{ matrix.spark-version.full }}/java-${{ matrix.java-version }}
    runs-on: ${{ matrix.os }}
    container:
      image: amd64/rust
    steps:
      - uses: actions/checkout@v6
      - name: Setup Rust & Java toolchain
        uses: ./.github/actions/setup-builder
        with:
          rust-version: ${{env.RUST_VERSION}}
          jdk-version: ${{ matrix.java-version }}
      - name: Setup Spark
        uses: ./.github/actions/setup-spark-builder
        with:
          spark-version: ${{ matrix.spark-version.full }}
          spark-short-version: ${{ matrix.spark-version.short }}
      - name: Run Spark tests
        run: |
          cd apache-spark
          rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
          ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_PARQUET_SCAN_IMPL=native_iceberg_compat build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
        env:
          LC_ALL: "C.UTF-8"



================================================
FILE: .github/workflows/stale.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: "Close stale PRs"
on:
  schedule:
    - cron: "30 1 * * *"

jobs:
  close-stale-prs:
    runs-on: ubuntu-latest
    permissions:
      issues: write
      pull-requests: write
    steps:
      - uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f  # v10.2.0
        with:
          stale-pr-message: "Thank you for your contribution. Unfortunately, this pull request is stale because it has been open 60 days with no activity. Please remove the stale label or comment or this will be closed in 7 days."
          days-before-pr-stale: 60
          days-before-pr-close: 7
          # do not close stale issues
          days-before-issue-stale: -1
          days-before-issue-close: -1
          repo-token: ${{ secrets.GITHUB_TOKEN }}

================================================
FILE: .github/workflows/take.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Assign/unassign the issue via `take` or `untake` comment
on:
  issue_comment:
    types: created

permissions:
  issues: write

jobs:
  issue_assign:
    runs-on: ubuntu-latest
    if: (!github.event.issue.pull_request) && (github.event.comment.body == 'take' || github.event.comment.body == 'untake')
    concurrency:
      group: ${{ github.actor }}-issue-assign
    steps:
      - name: Take or untake issue
        env:
          COMMENT_BODY: ${{ github.event.comment.body }}
          ISSUE_NUMBER: ${{ github.event.issue.number }}
          USER_LOGIN: ${{ github.event.comment.user.login }}
          REPO: ${{ github.repository }}
          TOKEN: ${{ secrets.GITHUB_TOKEN }}
        run: |
          if [ "$COMMENT_BODY" == "take" ]
          then
            CODE=$(curl -H "Authorization: token $TOKEN" -LI https://api.github.com/repos/$REPO/issues/$ISSUE_NUMBER/assignees/$USER_LOGIN -o /dev/null -w '%{http_code}\n' -s)
            if [ "$CODE" -eq "204" ]
            then
              echo "Assigning issue $ISSUE_NUMBER to $USER_LOGIN"
              curl -X POST -H "Authorization: token $TOKEN" -H "Content-Type: application/json" -d "{\"assignees\": [\"$USER_LOGIN\"]}" https://api.github.com/repos/$REPO/issues/$ISSUE_NUMBER/assignees
            else
              echo "Cannot assign issue $ISSUE_NUMBER to $USER_LOGIN"
            fi
          elif [ "$COMMENT_BODY" == "untake" ]
          then
            echo "Unassigning issue $ISSUE_NUMBER from $USER_LOGIN"
            curl -X DELETE -H "Authorization: token $TOKEN" -H "Content-Type: application/json" -d "{\"assignees\": [\"$USER_LOGIN\"]}" https://api.github.com/repos/$REPO/issues/$ISSUE_NUMBER/assignees
          fi


================================================
FILE: .github/workflows/validate_workflows.yml
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

name: Validate Github Workflows

on:
  pull_request:
    paths:
      - ".github/workflows/*.yml"
      - ".github/workflows/*.yaml"
  push:
    branches:
      - main

jobs:
  validate:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v6

      - name: Install actionlint
        run: |
          curl -sSfL https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash | bash
          echo "$PWD" >> $GITHUB_PATH

      - name: Lint GitHub Actions workflows
        run: actionlint -color --shellcheck=off


================================================
FILE: .gitignore
================================================
CLAUDE.md
target
.idea
*.iml
.vscode/
.bloop/
.metals/
derby.log
metastore_db/
spark-warehouse/
dependency-reduced-pom.xml
native/proto/src/generated
prebuild
.flattened-pom.xml
rat.txt
filtered_rat.txt
dev/dist
apache-rat-*.jar
venv
.venv
dev/release/comet-rm/workdir
spark/benchmarks
.DS_Store
comet-event-trace.json
__pycache__
output


================================================
FILE: .mvn/wrapper/maven-wrapper.properties
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.6/apache-maven-3.9.6-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar


================================================
FILE: .scalafix.conf
================================================
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.
rules = [
  ExplicitResultTypes,
  NoAutoTupling,
  RemoveUnused,

  DisableSyntax,
  LeakingImplicitClassVal,
  NoValInForComprehension,
  ProcedureSyntax,
  RedundantSyntax
]


================================================
FILE: CHANGELOG.md
================================================
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements.  See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.  The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied.  See the License for the
specific language governing permissions and limitations
under the License.
-->

# Apache DataFusion Comet Changelog

Comprehensive changelogs for each release are available [here](dev/changelog).


================================================
FILE: LICENSE.txt
================================================

                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding those notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.

   END OF TERMS AND CONDITIONS

   APPENDIX: How to apply the Apache License to your work.

      To apply the Apache License to your work, attach the following
      boilerplate notice, with the fields enclosed by brackets "[]"
      replaced with your own identifying information. (Don't include
      the brackets!)  The text should be enclosed in the appropriate
      comment syntax for the file format. We also recommend that a
      file or class name and description of purpose be included on the
      same "printed page" as the copyright notice for easier
      identification within third-party archives.

   Copyright [yyyy] [name of copyright owner]

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.

--------------------------------------------------------------------------------

This project includes code from Apache Aurora.

* dev/release/{release,changelog,release-candidate} are based on the scripts from
  Apache Aurora

Copyright: 2016 The Apache Software Foundation.
Home page: https://aurora.apache.org/
License: http://www.apache.org/licenses/LICENSE-2.0

================================================
FILE: Makefile
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

.PHONY: all core jvm test clean release-linux release bench

define spark_jvm_17_extra_args
$(shell ./mvnw help:evaluate -q -DforceStdout -Dexpression=extraJavaTestArgs)
endef

# Build optional Comet native features (like hdfs e.g)
FEATURES_ARG := $(shell ! [ -z $(COMET_FEATURES) ] && echo '--features=$(COMET_FEATURES)')

all: core jvm

core:
	cd native && cargo build $(FEATURES_ARG)
test-rust:
	# We need to compile CometException so that the cargo test can pass
	./mvnw compile -pl common -DskipTests $(PROFILES)
	cd native && cargo build $(FEATURES_ARG) && \
	RUST_BACKTRACE=1 cargo test $(FEATURES_ARG)
jvm:
	./mvnw clean package -DskipTests $(PROFILES)
test-jvm: core
	SPARK_HOME=`pwd` COMET_CONF_DIR=$(shell pwd)/conf RUST_BACKTRACE=1 ./mvnw verify $(PROFILES)
test: test-rust test-jvm
clean:
	cd native && cargo clean
	./mvnw clean $(PROFILES)
	rm -rf .dist
bench:
	cd native && RUSTFLAGS="-Ctarget-cpu=native" cargo bench $(FEATURES_ARG) $(filter-out $@,$(MAKECMDGOALS))
format:
	cd native && cargo fmt
	./mvnw compile test-compile scalafix:scalafix -Psemanticdb $(PROFILES)
	./mvnw spotless:apply $(PROFILES)

# build native libs for amd64 architecture Linux/MacOS on a Linux/amd64 machine/container
core-amd64-libs:
	cd native && RUSTFLAGS="-Ctarget-cpu=x86-64-v3" cargo build -j 2 --release $(FEATURES_ARG)
ifdef HAS_OSXCROSS
	rustup target add x86_64-apple-darwin
	cd native && cargo build -j 2 --target x86_64-apple-darwin --release $(FEATURES_ARG)
endif

# build native libs for arm64 architecture Linux/MacOS on a Linux/arm64 machine/container
core-arm64-libs:
	cd native && RUSTFLAGS="-Ctarget-cpu=neoverse-n1" cargo build -j 2 --release $(FEATURES_ARG)
ifdef HAS_OSXCROSS
	rustup target add aarch64-apple-darwin
	cd native && cargo build -j 2 --target aarch64-apple-darwin --release $(FEATURES_ARG)
endif

core-amd64:
	rustup target add x86_64-apple-darwin
	cd native && RUSTFLAGS="-Ctarget-cpu=skylake" CC=o64-clang CXX=o64-clang++ cargo build --target x86_64-apple-darwin --release $(FEATURES_ARG)
	mkdir -p common/target/classes/org/apache/comet/darwin/x86_64
	cp native/target/x86_64-apple-darwin/release/libcomet.dylib common/target/classes/org/apache/comet/darwin/x86_64
	cd native && RUSTFLAGS="-Ctarget-cpu=x86-64-v3" cargo build --release $(FEATURES_ARG)
	mkdir -p common/target/classes/org/apache/comet/linux/amd64
	cp native/target/release/libcomet.so common/target/classes/org/apache/comet/linux/amd64
	jar -cf common/target/comet-native-x86_64.jar \
		-C common/target/classes/org/apache/comet darwin \
		-C common/target/classes/org/apache/comet linux
	./dev/deploy-file common/target/comet-native-x86_64.jar comet-native-x86_64${COMET_CLASSIFIER} jar

core-arm64:
	rustup target add aarch64-apple-darwin
	cd native && RUSTFLAGS="-Ctarget-cpu=apple-m1" CC=arm64-apple-darwin21.4-clang CXX=arm64-apple-darwin21.4-clang++ CARGO_FEATURE_NEON=1 cargo build --target aarch64-apple-darwin --release $(FEATURES_ARG)
	mkdir -p common/target/classes/org/apache/comet/darwin/aarch64
	cp native/target/aarch64-apple-darwin/release/libcomet.dylib common/target/classes/org/apache/comet/darwin/aarch64
	cd native && RUSTFLAGS="-Ctarget-cpu=neoverse-n1" cargo build --release $(FEATURES_ARG)
	mkdir -p common/target/classes/org/apache/comet/linux/aarch64
	cp native/target/release/libcomet.so common/target/classes/org/apache/comet/linux/aarch64
	jar -cf common/target/comet-native-aarch64.jar \
		-C common/target/classes/org/apache/comet darwin \
		-C common/target/classes/org/apache/comet linux
	./dev/deploy-file common/target/comet-native-aarch64.jar comet-native-aarch64${COMET_CLASSIFIER} jar

release-linux: clean
	rustup target add aarch64-apple-darwin x86_64-apple-darwin
	cd native && RUSTFLAGS="-Ctarget-cpu=apple-m1" CC=arm64-apple-darwin21.4-clang CXX=arm64-apple-darwin21.4-clang++ CARGO_FEATURE_NEON=1 cargo build --target aarch64-apple-darwin --release $(FEATURES_ARG)
	cd native && RUSTFLAGS="-Ctarget-cpu=skylake" CC=o64-clang CXX=o64-clang++ cargo build --target x86_64-apple-darwin --release $(FEATURES_ARG)
	cd native && RUSTFLAGS="-Ctarget-cpu=native" cargo build --release $(FEATURES_ARG)
	./mvnw install -Prelease -DskipTests $(PROFILES)
release:
	cd native && RUSTFLAGS="$(RUSTFLAGS) -Ctarget-cpu=native" cargo build --release $(FEATURES_ARG)
	./mvnw install -Prelease -DskipTests $(PROFILES)
release-nogit:
	cd native && RUSTFLAGS="-Ctarget-cpu=native" cargo build --release
	./mvnw install -Prelease -DskipTests $(PROFILES) -Dmaven.gitcommitid.skip=true
benchmark-%: release
	cd spark && COMET_CONF_DIR=$(shell pwd)/conf MAVEN_OPTS='-Xmx20g ${call spark_jvm_17_extra_args}' ../mvnw exec:java -Dexec.mainClass="$*" -Dexec.classpathScope="test" -Dexec.cleanupDaemonThreads="false" -Dexec.args="$(filter-out $@,$(MAKECMDGOALS))" $(PROFILES)
.DEFAULT:
	@: # ignore arguments provided to benchmarks e.g. "make benchmark-foo -- --bar", we do not want to treat "--bar" as target


================================================
FILE: NOTICE.txt
================================================
Apache DataFusion Comet
Copyright 2024 The Apache Software Foundation

This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).

This product includes software developed at
Apache Gluten (https://github.com/apache/incubator-gluten/)
Specifically:
- Optimizer rule to replace SortMergeJoin with ShuffleHashJoin

This product includes software developed at
DataFusion HDFS ObjectStore Contrib Package(https://github.com/datafusion-contrib/datafusion-objectstore-hdfs)

This product includes software developed at
DataFusion fs-hdfs3 Contrib Package(https://github.com/datafusion-contrib/fs-hdfs)


================================================
FILE: README.md
================================================
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements.  See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.  The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied.  See the License for the
specific language governing permissions and limitations
under the License.
-->

# Apache DataFusion Comet

[![Apache licensed][license-badge]][license-url]
[![Discord chat][discord-badge]][discord-url]
[![Pending PRs][pending-pr-badge]][pending-pr-url]
[![Maven Central][maven-badge]][maven-url]

[license-badge]: https://img.shields.io/badge/license-Apache%20v2-blue.svg
[license-url]: https://github.com/apache/datafusion-comet/blob/main/LICENSE.txt
[discord-badge]: https://img.shields.io/discord/885562378132000778.svg?logo=discord&style=flat-square
[discord-url]: https://discord.gg/3EAr4ZX6JK
[pending-pr-badge]: https://img.shields.io/github/issues-search/apache/datafusion-comet?query=is%3Apr+is%3Aopen+draft%3Afalse+review%3Arequired+status%3Asuccess&label=Pending%20PRs&logo=github
[pending-pr-url]: https://github.com/apache/datafusion-comet/pulls?q=is%3Apr+is%3Aopen+draft%3Afalse+review%3Arequired+status%3Asuccess+sort%3Aupdated-desc
[maven-badge]: https://img.shields.io/maven-central/v/org.apache.datafusion/comet-spark-spark4.0_2.13
[maven-url]: https://search.maven.org/search?q=g:org.apache.datafusion%20AND%20comet-spark

<img src="docs/source/_static/images/DataFusionComet-Logo-Light.png" width="512" alt="logo"/>

Apache DataFusion Comet is a high-performance accelerator for Apache Spark, built on top of the powerful
[Apache DataFusion] query engine. Comet is designed to significantly enhance the
performance of Apache Spark workloads while leveraging commodity hardware and seamlessly integrating with the
Spark ecosystem without requiring any code changes.

Comet also accelerates Apache Iceberg, when performing Parquet scans from Spark.

[Apache DataFusion]: https://datafusion.apache.org

# Benefits of Using Comet

## Run Spark Queries at DataFusion Speeds

Comet delivers a performance speedup for many queries, enabling faster data processing and shorter time-to-insights.

The following chart shows the time it takes to run the 22 TPC-H queries against 100 GB of data in Parquet format
using a single executor with 8 cores. See the [Comet Benchmarking Guide](https://datafusion.apache.org/comet/contributor-guide/benchmarking.html)
for details of the environment used for these benchmarks.

When using Comet, the overall run time is reduced from 687 seconds to 302 seconds, a 2.2x speedup.

![](docs/source/_static/images/benchmark-results/0.11.0/tpch_allqueries.png)

Here is a breakdown showing relative performance of Spark and Comet for each TPC-H query.

![](docs/source/_static/images/benchmark-results/0.11.0/tpch_queries_compare.png)

The following charts shows how much Comet currently accelerates each query from the benchmark.

### Relative speedup

![](docs/source/_static/images/benchmark-results/0.11.0/tpch_queries_speedup_rel.png)

### Absolute speedup

![](docs/source/_static/images/benchmark-results/0.11.0/tpch_queries_speedup_abs.png)

These benchmarks can be reproduced in any environment using the documentation in the
[Comet Benchmarking Guide](https://datafusion.apache.org/comet/contributor-guide/benchmarking.html). We encourage
you to run your own benchmarks.

Results for our benchmark derived from TPC-DS are available in the [benchmarking guide](https://datafusion.apache.org/comet/contributor-guide/benchmark-results/tpc-ds.html).

## Use Commodity Hardware

Comet leverages commodity hardware, eliminating the need for costly hardware upgrades or
specialized hardware accelerators, such as GPUs or FPGA. By maximizing the utilization of commodity hardware, Comet
ensures cost-effectiveness and scalability for your Spark deployments.

## Spark Compatibility

Comet aims for 100% compatibility with all supported versions of Apache Spark, allowing you to integrate Comet into
your existing Spark deployments and workflows seamlessly. With no code changes required, you can immediately harness
the benefits of Comet's acceleration capabilities without disrupting your Spark applications.

## Tight Integration with Apache DataFusion

Comet tightly integrates with the core Apache DataFusion project, leveraging its powerful execution engine. With
seamless interoperability between Comet and DataFusion, you can achieve optimal performance and efficiency in your
Spark workloads.

## Active Community

Comet boasts a vibrant and active community of developers, contributors, and users dedicated to advancing the
capabilities of Apache DataFusion and accelerating the performance of Apache Spark.

## Getting Started

To get started with Apache DataFusion Comet, follow the
[installation instructions](https://datafusion.apache.org/comet/user-guide/installation.html). Join the
[DataFusion Slack and Discord channels](https://datafusion.apache.org/contributor-guide/communication.html) to connect
with other users, ask questions, and share your experiences with Comet.

Follow [Apache DataFusion Comet Overview](https://datafusion.apache.org/comet/about/index.html#comet-overview) to get more detailed information

## Contributing

We welcome contributions from the community to help improve and enhance Apache DataFusion Comet. Whether it's fixing
bugs, adding new features, writing documentation, or optimizing performance, your contributions are invaluable in
shaping the future of Comet. Check out our
[contributor guide](https://datafusion.apache.org/comet/contributor-guide/contributing.html) to get started.

## License

Apache DataFusion Comet is licensed under the Apache License 2.0. See the [LICENSE.txt](LICENSE.txt) file for details.

## Acknowledgments

We would like to express our gratitude to the Apache DataFusion community for their support and contributions to
Comet. Together, we're building a faster, more efficient future for big data processing with Apache Spark.


================================================
FILE: benchmarks/Dockerfile
================================================
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

FROM apache/datafusion-comet:0.7.0-spark3.5.5-scala2.12-java11

RUN apt update \
    && apt install -y git python3 python3-pip \
    && apt clean

RUN cd /opt \
    && git clone https://github.com/apache/datafusion-benchmarks.git


================================================
FILE: benchmarks/README.md
================================================
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements.  See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.  The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied.  See the License for the
specific language governing permissions and limitations
under the License.
-->

# Running Comet Benchmarks in Microk8s

This guide explains how to run benchmarks derived from TPC-H and TPC-DS in Apache DataFusion Comet deployed in a
local Microk8s cluster.

## Use Microk8s locally

Install Micro8s following the instructions at https://microk8s.io/docs/getting-started and then perform these
additional steps, ensuring that any existing kube config is backed up first.

```shell
mkdir -p ~/.kube
microk8s config > ~/.kube/config

microk8s enable dns
microk8s enable registry

microk8s kubectl create serviceaccount spark
```

## Build Comet Docker Image

Run the following command from the root of this repository to build the Comet Docker image, or use a published
Docker image from https://github.com/orgs/apache/packages?repo_name=datafusion-comet

```shell
docker build -t apache/datafusion-comet -f kube/Dockerfile .
```

## Build Comet Benchmark Docker Image

Build the benchmark Docker image and push to the Microk8s Docker registry.

```shell
docker build -t apache/datafusion-comet-tpcbench  .
docker tag apache/datafusion-comet-tpcbench localhost:32000/apache/datafusion-comet-tpcbench:latest
docker push localhost:32000/apache/datafusion-comet-tpcbench:latest
```

## Run benchmarks

```shell
export SPARK_MASTER=k8s://https://127.0.0.1:16443
export COMET_DOCKER_IMAGE=localhost:32000/apache/datafusion-comet-tpcbench:latest
# Location of Comet JAR within the Docker image
export COMET_JAR=/opt/spark/jars/comet-spark-spark3.4_2.12-0.5.0-SNAPSHOT.jar

$SPARK_HOME/bin/spark-submit \
    --master $SPARK_MASTER \
    --deploy-mode cluster  \
    --name comet-tpcbench \
    --driver-memory 8G \
    --conf spark.driver.memory=8G \
    --conf spark.executor.instances=1 \
    --conf spark.executor.memory=32G \
    --conf spark.executor.cores=8 \
    --conf spark.cores.max=8 \
    --conf spark.task.cpus=1 \
    --conf spark.executor.memoryOverhead=3G \
    --jars local://$COMET_JAR \
    --conf spark.executor.extraClassPath=$COMET_JAR \
    --conf spark.driver.extraClassPath=$COMET_JAR \
    --conf spark.plugins=org.apache.spark.CometPlugin \
    --conf spark.sql.extensions=org.apache.comet.CometSparkSessionExtensions \
    --conf spark.comet.enabled=true \
    --conf spark.comet.exec.enabled=true \
    --conf spark.comet.exec.all.enabled=true \
    --conf spark.comet.cast.allowIncompatible=true \
    --conf spark.comet.exec.shuffle.enabled=true \
    --conf spark.comet.exec.shuffle.mode=auto \
    --conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
    --conf spark.kubernetes.namespace=default \
    --conf spark.kubernetes.driver.pod.name=tpcbench  \
    --conf spark.kubernetes.container.image=$COMET_DOCKER_IMAGE \
    --conf spark.kubernetes.driver.volumes.hostPath.tpcdata.mount.path=/mnt/bigdata/tpcds/sf100/ \
    --conf spark.kubernetes.driver.volumes.hostPath.tpcdata.options.path=/mnt/bigdata/tpcds/sf100/ \
    --conf spark.kubernetes.executor.volumes.hostPath.tpcdata.mount.path=/mnt/bigdata/tpcds/sf100/ \
    --conf spark.kubernetes.executor.volumes.hostPath.tpcdata.options.path=/mnt/bigdata/tpcds/sf100/ \
    --conf spark.kubernetes.authenticate.caCertFile=/var/snap/microk8s/current/certs/ca.crt \
    local:///opt/datafusion-benchmarks/runners/datafusion-comet/tpcbench.py \
    --benchmark tpcds \
    --data /mnt/bigdata/tpcds/sf100/ \
    --queries /opt/datafusion-benchmarks/tpcds/queries-spark \
    --iterations 1
```


================================================
FILE: benchmarks/pyspark/README.md
================================================
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements.  See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.  The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied.  See the License for the
specific language governing permissions and limitations
under the License.
-->

# PySpark Benchmarks

A suite of PySpark benchmarks for comparing performance between Spark, Comet JVM, and Comet Native implementations.

## Available Benchmarks

Run `python run_benchmark.py --list-benchmarks` to see all available benchmarks:

- **shuffle-hash** - Shuffle all columns using hash partitioning on group_key
- **shuffle-roundrobin** - Shuffle all columns using round-robin partitioning

## Prerequisites

- Apache Spark cluster (standalone, YARN, or Kubernetes)
- PySpark installed
- Comet JAR built

## Build Comet JAR

```bash
cd /path/to/datafusion-comet
make release
```

## Step 1: Generate Test Data

Generate test data with realistic 50-column schema (nested structs, arrays, maps):

```bash
spark-submit \
  --master spark://master:7077 \
  --executor-memory 16g \
  generate_data.py \
  --output /tmp/shuffle-benchmark-data \
  --rows 10000000 \
  --partitions 200
```

### Data Generation Options

| Option               | Default    | Description                  |
| -------------------- | ---------- | ---------------------------- |
| `--output`, `-o`     | (required) | Output path for parquet data |
| `--rows`, `-r`       | 10000000   | Number of rows               |
| `--partitions`, `-p` | 200        | Number of output partitions  |

## Step 2: Run Benchmarks

### List Available Benchmarks

```bash
python run_benchmark.py --list-benchmarks
```

### Run Individual Benchmarks

You can run specific benchmarks by name:

```bash
# Hash partitioning shuffle - Spark baseline
spark-submit --master spark://master:7077 \
  run_benchmark.py --data /tmp/shuffle-benchmark-data --mode spark --benchmark shuffle-hash

# Round-robin shuffle - Spark baseline
spark-submit --master spark://master:7077 \
  run_benchmark.py --data /tmp/shuffle-benchmark-data --mode spark --benchmark shuffle-roundrobin

# Hash partitioning - Comet JVM shuffle
spark-submit --master spark://master:7077 \
  --jars /path/to/comet.jar \
  --conf spark.comet.enabled=true \
  --conf spark.comet.exec.shuffle.enabled=true \
  --conf spark.comet.shuffle.mode=jvm \
  --conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
  run_benchmark.py --data /tmp/shuffle-benchmark-data --mode jvm --benchmark shuffle-hash

# Round-robin - Comet Native shuffle
spark-submit --master spark://master:7077 \
  --jars /path/to/comet.jar \
  --conf spark.comet.enabled=true \
  --conf spark.comet.exec.shuffle.enabled=true \
  --conf spark.comet.exec.shuffle.mode=native \
  --conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
  run_benchmark.py --data /tmp/shuffle-benchmark-data --mode native --benchmark shuffle-roundrobin
```

### Run All Benchmarks

Use the provided script to run all benchmarks across all modes:

```bash
SPARK_MASTER=spark://master:7077 \
EXECUTOR_MEMORY=16g \
./run_all_benchmarks.sh /tmp/shuffle-benchmark-data
```

## Checking Results

Open the Spark UI (default: http://localhost:4040) during each benchmark run to compare shuffle write sizes in the Stages tab.

## Adding New Benchmarks

The benchmark framework makes it easy to add new benchmarks:

1. **Create a benchmark class** in `benchmarks/` directory (or add to existing file):

```python
from benchmarks.base import Benchmark

class MyBenchmark(Benchmark):
    @classmethod
    def name(cls) -> str:
        return "my-benchmark"

    @classmethod
    def description(cls) -> str:
        return "Description of what this benchmark does"

    def run(self) -> Dict[str, Any]:
        # Read data
        df = self.spark.read.parquet(self.data_path)

        # Run your benchmark operation
        def benchmark_operation():
            result = df.filter(...).groupBy(...).agg(...)
            result.write.mode("overwrite").parquet("/tmp/output")

        # Time it
        duration_ms = self._time_operation(benchmark_operation)

        return {
            'duration_ms': duration_ms,
            # Add any other metrics you want to track
        }
```

2. **Register the benchmark** in `benchmarks/__init__.py`:

```python
from .my_module import MyBenchmark

_BENCHMARK_REGISTRY = {
    # ... existing benchmarks
    MyBenchmark.name(): MyBenchmark,
}
```

3. **Run your new benchmark**:

```bash
python run_benchmark.py --data /path/to/data --mode spark --benchmark my-benchmark
```

The base `Benchmark` class provides:

- Automatic timing via `_time_operation()`
- Standard output formatting via `execute_timed()`
- Access to SparkSession, data path, and mode
- Spark configuration printing


================================================
FILE: benchmarks/pyspark/benchmarks/__init__.py
================================================
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

"""
Benchmark registry for PySpark benchmarks.

This module provides a central registry for discovering and running benchmarks.
"""

from typing import Dict, Type, List

from .base import Benchmark
from .shuffle import ShuffleHashBenchmark, ShuffleRoundRobinBenchmark


# Registry of all available benchmarks
_BENCHMARK_REGISTRY: Dict[str, Type[Benchmark]] = {
    ShuffleHashBenchmark.name(): ShuffleHashBenchmark,
    ShuffleRoundRobinBenchmark.name(): ShuffleRoundRobinBenchmark,
}


def get_benchmark(name: str) -> Type[Benchmark]:
    """
    Get a benchmark class by name.

    Args:
        name: Benchmark name

    Returns:
        Benchmark class

    Raises:
        KeyError: If benchmark name is not found
    """
    if name not in _BENCHMARK_REGISTRY:
        available = ", ".join(sorted(_BENCHMARK_REGISTRY.keys()))
        raise KeyError(
            f"Unknown benchmark: {name}. Available benchmarks: {available}"
        )
    return _BENCHMARK_REGISTRY[name]


def list_benchmarks() -> List[tuple[str, str]]:
    """
    List all available benchmarks.

    Returns:
        List of (name, description) tuples
    """
    benchmarks = []
    for name in sorted(_BENCHMARK_REGISTRY.keys()):
        benchmark_cls = _BENCHMARK_REGISTRY[name]
        benchmarks.append((name, benchmark_cls.description()))
    return benchmarks


__all__ = [
    'Benchmark',
    'get_benchmark',
    'list_benchmarks',
    'ShuffleHashBenchmark',
    'ShuffleRoundRobinBenchmark',
]


================================================
FILE: benchmarks/pyspark/benchmarks/base.py
================================================
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

"""
Base benchmark class providing common functionality for all benchmarks.
"""

import time
from abc import ABC, abstractmethod
from typing import Dict, Any

from pyspark.sql import SparkSession


class Benchmark(ABC):
    """Base class for all PySpark benchmarks."""

    def __init__(self, spark: SparkSession, data_path: str, mode: str):
        """
        Initialize benchmark.

        Args:
            spark: SparkSession instance
            data_path: Path to input data
            mode: Execution mode (spark, jvm, native)
        """
        self.spark = spark
        self.data_path = data_path
        self.mode = mode

    @classmethod
    @abstractmethod
    def name(cls) -> str:
        """Return the benchmark name (used for CLI)."""
        pass

    @classmethod
    @abstractmethod
    def description(cls) -> str:
        """Return a short description of the benchmark."""
        pass

    @abstractmethod
    def run(self) -> Dict[str, Any]:
        """
        Run the benchmark and return results.

        Returns:
            Dictionary containing benchmark results (must include 'duration_ms')
        """
        pass

    def execute_timed(self) -> Dict[str, Any]:
        """
        Execute the benchmark with timing and standard output.

        Returns:
            Dictionary containing benchmark results
        """
        print(f"\n{'=' * 80}")
        print(f"Benchmark: {self.name()}")
        print(f"Mode: {self.mode.upper()}")
        print(f"{'=' * 80}")
        print(f"Data path: {self.data_path}")

        # Print relevant Spark configuration
        self._print_spark_config()

        # Clear cache before running
        self.spark.catalog.clearCache()

        # Run the benchmark
        print(f"\nRunning benchmark...")
        results = self.run()

        # Print results
        print(f"\nDuration: {results['duration_ms']:,} ms")
        if 'row_count' in results:
            print(f"Rows processed: {results['row_count']:,}")

        # Print any additional metrics
        for key, value in results.items():
            if key not in ['duration_ms', 'row_count']:
                print(f"{key}: {value}")

        print(f"{'=' * 80}\n")

        return results

    def _print_spark_config(self):
        """Print relevant Spark configuration."""
        conf = self.spark.sparkContext.getConf()
        print(f"Shuffle manager: {conf.get('spark.shuffle.manager', 'default')}")
        print(f"Comet enabled: {conf.get('spark.comet.enabled', 'false')}")
        print(f"Comet shuffle enabled: {conf.get('spark.comet.exec.shuffle.enabled', 'false')}")
        print(f"Comet shuffle mode: {conf.get('spark.comet.shuffle.mode', 'not set')}")
        print(f"Spark UI: {self.spark.sparkContext.uiWebUrl}")

    def _time_operation(self, operation_fn):
        """
        Time an operation and return duration in milliseconds.

        Args:
            operation_fn: Function to time (takes no arguments)

        Returns:
            Duration in milliseconds
        """
        start_time = time.time()
        operation_fn()
        duration_ms = int((time.time() - start_time) * 1000)
        return duration_ms


================================================
FILE: benchmarks/pyspark/benchmarks/shuffle.py
================================================
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

"""
Shuffle benchmarks for comparing shuffle file sizes and performance.

These benchmarks test different partitioning strategies (hash, round-robin)
across Spark, Comet JVM, and Comet Native shuffle implementations.
"""

from typing import Dict, Any
from pyspark.sql import DataFrame

from .base import Benchmark


class ShuffleBenchmark(Benchmark):
    """Base class for shuffle benchmarks with common repartitioning logic."""

    def __init__(self, spark, data_path: str, mode: str, num_partitions: int = 200):
        """
        Initialize shuffle benchmark.

        Args:
            spark: SparkSession instance
            data_path: Path to input parquet data
            mode: Execution mode (spark, jvm, native)
            num_partitions: Number of partitions to shuffle to
        """
        super().__init__(spark, data_path, mode)
        self.num_partitions = num_partitions

    def _read_and_count(self) -> tuple[DataFrame, int]:
        """Read input data and count rows."""
        df = self.spark.read.parquet(self.data_path)
        row_count = df.count()
        return df, row_count

    def _repartition(self, df: DataFrame) -> DataFrame:
        """
        Repartition dataframe using the strategy defined by subclass.

        Args:
            df: Input dataframe

        Returns:
            Repartitioned dataframe
        """
        raise NotImplementedError("Subclasses must implement _repartition")

    def _write_output(self, df: DataFrame, output_path: str):
        """Write repartitioned data to parquet."""
        df.write.mode("overwrite").parquet(output_path)

    def run(self) -> Dict[str, Any]:
        """
        Run the shuffle benchmark.

        Returns:
            Dictionary with duration_ms and row_count
        """
        # Read input data
        df, row_count = self._read_and_count()
        print(f"Number of rows: {row_count:,}")

        # Define the benchmark operation
        def benchmark_operation():
            # Repartition using the specific strategy
            repartitioned = self._repartition(df)

            # Write to parquet to force materialization
            output_path = f"/tmp/shuffle-benchmark-output-{self.mode}-{self.name()}"
            self._write_output(repartitioned, output_path)
            print(f"Wrote repartitioned data to: {output_path}")

        # Time the operation
        duration_ms = self._time_operation(benchmark_operation)

        return {
            'duration_ms': duration_ms,
            'row_count': row_count,
            'num_partitions': self.num_partitions,
        }


class ShuffleHashBenchmark(ShuffleBenchmark):
    """Shuffle benchmark using hash partitioning on a key column."""

    @classmethod
    def name(cls) -> str:
        return "shuffle-hash"

    @classmethod
    def description(cls) -> str:
        return "Shuffle all columns using hash partitioning on group_key"

    def _repartition(self, df: DataFrame) -> DataFrame:
        """Repartition using hash partitioning on group_key."""
        return df.repartition(self.num_partitions, "group_key")


class ShuffleRoundRobinBenchmark(ShuffleBenchmark):
    """Shuffle benchmark using round-robin partitioning."""

    @classmethod
    def name(cls) -> str:
        return "shuffle-roundrobin"

    @classmethod
    def description(cls) -> str:
        return "Shuffle all columns using round-robin partitioning"

    def _repartition(self, df: DataFrame) -> DataFrame:
        """Repartition using round-robin (no partition columns specified)."""
        return df.repartition(self.num_partitions)


================================================
FILE: benchmarks/pyspark/generate_data.py
================================================
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

"""
Generate test data for shuffle size comparison benchmark.

This script generates a parquet dataset with a realistic schema (100 columns
including deeply nested structs, arrays, and maps) for benchmarking shuffle
operations across Spark, Comet JVM, and Comet Native shuffle modes.
"""

import argparse
from pyspark.sql import SparkSession
from pyspark.sql import functions as F
from pyspark.sql.types import (
    StructType, StructField, IntegerType, LongType, DoubleType,
    StringType, BooleanType, DateType, TimestampType, ArrayType,
    MapType, DecimalType
)


def generate_data(output_path: str, num_rows: int, num_partitions: int):
    """Generate test data with realistic schema and write to parquet."""

    spark = SparkSession.builder \
        .appName("ShuffleBenchmark-DataGen") \
        .getOrCreate()

    print(f"Generating {num_rows:,} rows with {num_partitions} partitions")
    print(f"Output path: {output_path}")
    print("Schema: 100 columns including deeply nested structs, arrays, and maps")

    # Start with a range and build up the columns
    df = spark.range(0, num_rows, numPartitions=num_partitions)

    # Add columns using selectExpr for better performance
    df = df.selectExpr(
        # Key columns for grouping/partitioning (1-3)
        "cast(id % 1000 as int) as partition_key",
        "cast(id % 100 as int) as group_key",
        "id as row_id",

        # Integer columns (4-15)
        "cast(id % 10000 as int) as category_id",
        "cast(id % 500 as int) as region_id",
        "cast(id % 50 as int) as department_id",
        "cast((id * 7) % 1000000 as int) as customer_id",
        "cast((id * 13) % 100000 as int) as product_id",
        "cast(id % 12 + 1 as int) as month",
        "cast(id % 28 + 1 as int) as day",
        "cast(2020 + (id % 5) as int) as year",
        "cast((id * 17) % 256 as int) as priority",
        "cast((id * 19) % 1000 as int) as rank",
        "cast((id * 23) % 10000 as int) as score_int",
        "cast((id * 29) % 500 as int) as level",

        # Long columns (16-22)
        "id * 1000 as transaction_id",
        "(id * 17) % 10000000000 as account_number",
        "(id * 31) % 1000000000 as reference_id",
        "(id * 37) % 10000000000 as external_id",
        "(id * 41) % 1000000000 as correlation_id",
        "(id * 43) % 10000000000 as trace_id",
        "(id * 47) % 1000000000 as span_id",

        # Double columns (23-35)
        "cast(id % 10000 as double) / 100.0 as amount",
        "cast((id * 3) % 10000 as double) / 100.0 as price",
        "cast(id % 100 as double) / 100.0 as discount",
        "cast((id * 7) % 500 as double) / 10.0 as weight",
        "cast((id * 11) % 1000 as double) / 10.0 as height",
        "cast(id % 360 as double) as latitude",
        "cast((id * 2) % 360 as double) as longitude",
        "cast((id * 13) % 10000 as double) / 1000.0 as rate",
        "cast((id * 17) % 100 as double) / 100.0 as percentage",
        "cast((id * 19) % 1000 as double) as velocity",
        "cast((id * 23) % 500 as double) / 10.0 as acceleration",
        "cast((id * 29) % 10000 as double) / 100.0 as temperature",
        "cast((id * 31) % 1000 as double) / 10.0 as pressure",

        # String columns (36-50)
        "concat('user_', cast(id % 100000 as string)) as user_name",
        "concat('email_', cast(id % 50000 as string), '@example.com') as email",
        "concat('SKU-', lpad(cast(id % 10000 as string), 6, '0')) as sku",
        "concat('ORD-', cast(id as string)) as order_id",
        "array('pending', 'processing', 'shipped', 'delivered', 'cancelled')[cast(id % 5 as int)] as status",
        "array('USD', 'EUR', 'GBP', 'JPY', 'CAD')[cast(id % 5 as int)] as currency",
        "concat('Description for item ', cast(id % 1000 as string), ' with additional details') as description",
        "concat('REF-', lpad(cast(id % 100000 as string), 8, '0')) as reference_code",
        "concat('TXN-', cast(id as string), '-', cast(id % 1000 as string)) as transaction_code",
        "array('A', 'B', 'C', 'D', 'E')[cast(id % 5 as int)] as grade",
        "concat('Note: Record ', cast(id as string), ' processed successfully') as notes",
        "concat('Session-', lpad(cast(id % 10000 as string), 6, '0')) as session_id",
        "concat('Device-', cast(id % 1000 as string)) as device_id",
        "array('chrome', 'firefox', 'safari', 'edge')[cast(id % 4 as int)] as browser",
        "array('windows', 'macos', 'linux', 'ios', 'android')[cast(id % 5 as int)] as os",

        # Boolean columns (51-56)
        "id % 2 = 0 as is_active",
        "id % 3 = 0 as is_verified",
        "id % 7 = 0 as is_premium",
        "id % 5 = 0 as is_deleted",
        "id % 11 = 0 as is_featured",
        "id % 13 = 0 as is_archived",

        # Date and timestamp columns (57-60)
        "date_add(to_date('2020-01-01'), cast(id % 1500 as int)) as created_date",
        "date_add(to_date('2020-01-01'), cast((id + 30) % 1500 as int)) as updated_date",
        "date_add(to_date('2020-01-01'), cast((id + 60) % 1500 as int)) as expires_date",
        "to_timestamp(concat('2020-01-01 ', lpad(cast(id % 24 as string), 2, '0'), ':00:00')) as created_at",

        # Simple arrays (61-65)
        "array(cast(id % 100 as int), cast((id + 1) % 100 as int), cast((id + 2) % 100 as int), cast((id + 3) % 100 as int), cast((id + 4) % 100 as int)) as tag_ids",
        "array(cast(id % 1000 as double) / 10.0, cast((id * 2) % 1000 as double) / 10.0, cast((id * 3) % 1000 as double) / 10.0) as scores",
        "array(concat('tag_', cast(id % 20 as string)), concat('tag_', cast((id + 5) % 20 as string)), concat('tag_', cast((id + 10) % 20 as string))) as tags",
        "array(id % 2 = 0, id % 3 = 0, id % 5 = 0, id % 7 = 0) as flag_array",
        "array(id * 1000, id * 2000, id * 3000) as long_array",

        # Simple maps (66-68)
        "map('key1', cast(id % 100 as string), 'key2', cast((id * 2) % 100 as string), 'key3', cast((id * 3) % 100 as string)) as str_attributes",
        "map('score1', cast(id % 100 as double), 'score2', cast((id * 2) % 100 as double)) as double_attributes",
        "map(cast(id % 10 as int), concat('val_', cast(id % 100 as string)), cast((id + 1) % 10 as int), concat('val_', cast((id + 1) % 100 as string))) as int_key_map",

        # Level 2 nested struct: address with nested geo (69)
        "named_struct("
        "  'street', concat(cast(id % 9999 as string), ' Main St'),"
        "  'city', array('New York', 'Los Angeles', 'Chicago', 'Houston', 'Phoenix')[cast(id % 5 as int)],"
        "  'state', array('NY', 'CA', 'IL', 'TX', 'AZ')[cast(id % 5 as int)],"
        "  'zip', lpad(cast(id % 99999 as string), 5, '0'),"
        "  'country', 'USA',"
        "  'geo', named_struct("
        "    'lat', cast(id % 180 as double) - 90.0,"
        "    'lng', cast(id % 360 as double) - 180.0,"
        "    'accuracy', cast(id % 100 as double)"
        "  )"
        ") as address",

        # Level 3 nested struct: organization hierarchy (70)
        "named_struct("
        "  'company', named_struct("
        "    'name', concat('Company_', cast(id % 1000 as string)),"
        "    'industry', array('tech', 'finance', 'healthcare', 'retail')[cast(id % 4 as int)],"
        "    'headquarters', named_struct("
        "      'city', array('NYC', 'SF', 'LA', 'CHI')[cast(id % 4 as int)],"
        "      'country', 'USA',"
        "      'timezone', array('EST', 'PST', 'PST', 'CST')[cast(id % 4 as int)]"
        "    )"
        "  ),"
        "  'department', named_struct("
        "    'name', array('Engineering', 'Sales', 'Marketing', 'HR')[cast(id % 4 as int)],"
        "    'code', concat('DEPT-', cast(id % 100 as string)),"
        "    'budget', cast(id % 1000000 as double)"
        "  )"
        ") as organization",

        # Level 4 nested struct: deep config (71)
        "named_struct("
        "  'level1', named_struct("
        "    'level2a', named_struct("
        "      'level3a', named_struct("
        "        'value_int', cast(id % 1000 as int),"
        "        'value_str', concat('deep_', cast(id % 100 as string)),"
        "        'value_bool', id % 2 = 0"
        "      ),"
        "      'level3b', named_struct("
        "        'metric1', cast(id % 100 as double),"
        "        'metric2', cast((id * 2) % 100 as double)"
        "      )"
        "    ),"
        "    'level2b', named_struct("
        "      'setting1', concat('setting_', cast(id % 50 as string)),"
        "      'setting2', id % 3 = 0,"
        "      'values', array(cast(id % 10 as int), cast((id + 1) % 10 as int), cast((id + 2) % 10 as int))"
        "    )"
        "  ),"
        "  'metadata', named_struct("
        "    'version', concat('v', cast(id % 10 as string)),"
        "    'timestamp', id * 1000"
        "  )"
        ") as deep_config",

        # Array of structs with nested structs (72)
        "array("
        "  named_struct("
        "    'item_id', cast(id % 1000 as int),"
        "    'details', named_struct("
        "      'name', concat('Item_', cast(id % 100 as string)),"
        "      'category', array('electronics', 'clothing', 'food', 'books')[cast(id % 4 as int)],"
        "      'pricing', named_struct("
        "        'base', cast(id % 100 as double) + 0.99,"
        "        'discount', cast(id % 20 as double) / 100.0,"
        "        'tax_rate', 0.08"
        "      )"
        "    ),"
        "    'quantity', cast(id % 10 + 1 as int)"
        "  ),"
        "  named_struct("
        "    'item_id', cast((id + 100) % 1000 as int),"
        "    'details', named_struct("
        "      'name', concat('Item_', cast((id + 100) % 100 as string)),"
        "      'category', array('electronics', 'clothing', 'food', 'books')[cast((id + 1) % 4 as int)],"
        "      'pricing', named_struct("
        "        'base', cast((id + 50) % 100 as double) + 0.99,"
        "        'discount', cast((id + 5) % 20 as double) / 100.0,"
        "        'tax_rate', 0.08"
        "      )"
        "    ),"
        "    'quantity', cast((id + 1) % 10 + 1 as int)"
        "  )"
        ") as line_items",

        # Map with struct values (73)
        "map("
        "  'primary', named_struct('name', concat('Primary_', cast(id % 100 as string)), 'score', cast(id % 100 as double), 'active', true),"
        "  'secondary', named_struct('name', concat('Secondary_', cast(id % 100 as string)), 'score', cast((id * 2) % 100 as double), 'active', id % 2 = 0)"
        ") as contact_map",

        # Struct with map containing arrays (74)
        "named_struct("
        "  'config_name', concat('Config_', cast(id % 100 as string)),"
        "  'settings', map("
        "    'integers', array(cast(id % 10 as int), cast((id + 1) % 10 as int), cast((id + 2) % 10 as int)),"
        "    'strings', array(concat('s1_', cast(id % 10 as string)), concat('s2_', cast(id % 10 as string)))"
        "  ),"
        "  'enabled', id % 2 = 0"
        ") as config_with_map",

        # Array of arrays (75)
        "array("
        "  array(cast(id % 10 as int), cast((id + 1) % 10 as int), cast((id + 2) % 10 as int)),"
        "  array(cast((id * 2) % 10 as int), cast((id * 2 + 1) % 10 as int)),"
        "  array(cast((id * 3) % 10 as int), cast((id * 3 + 1) % 10 as int), cast((id * 3 + 2) % 10 as int), cast((id * 3 + 3) % 10 as int))"
        ") as nested_int_arrays",

        # Array of maps (76)
        "array("
        "  map('a', cast(id % 100 as string), 'b', cast((id + 1) % 100 as string)),"
        "  map('x', cast((id * 2) % 100 as string), 'y', cast((id * 2 + 1) % 100 as string), 'z', cast((id * 2 + 2) % 100 as string))"
        ") as array_of_maps",

        # Map with array values (77)
        "map("
        "  'scores', array(cast(id % 100 as double), cast((id * 2) % 100 as double), cast((id * 3) % 100 as double)),"
        "  'ranks', array(cast(id % 10 as double), cast((id + 1) % 10 as double))"
        ") as map_with_arrays",

        # Complex event structure (78)
        "named_struct("
        "  'event_id', concat('EVT-', cast(id as string)),"
        "  'event_type', array('click', 'view', 'purchase', 'signup')[cast(id % 4 as int)],"
        "  'timestamp', id * 1000,"
        "  'properties', map("
        "    'source', array('web', 'mobile', 'api')[cast(id % 3 as int)],"
        "    'campaign', concat('camp_', cast(id % 50 as string))"
        "  ),"
        "  'user', named_struct("
        "    'id', cast(id % 100000 as int),"
        "    'segment', array('new', 'returning', 'premium')[cast(id % 3 as int)],"
        "    'attributes', named_struct("
        "      'age_group', array('18-24', '25-34', '35-44', '45+')[cast(id % 4 as int)],"
        "      'interests', array(concat('int_', cast(id % 10 as string)), concat('int_', cast((id + 1) % 10 as string)))"
        "    )"
        "  )"
        ") as event_data",

        # Financial transaction with deep nesting (79)
        "named_struct("
        "  'txn_id', concat('TXN-', cast(id as string)),"
        "  'amount', named_struct("
        "    'value', cast(id % 10000 as double) / 100.0,"
        "    'currency', array('USD', 'EUR', 'GBP')[cast(id % 3 as int)],"
        "    'exchange', named_struct("
        "      'rate', 1.0 + cast(id % 100 as double) / 1000.0,"
        "      'source', 'market',"
        "      'timestamp', id * 1000"
        "    )"
        "  ),"
        "  'parties', named_struct("
        "    'sender', named_struct("
        "      'account', concat('ACC-', lpad(cast(id % 100000 as string), 8, '0')),"
        "      'bank', named_struct("
        "        'code', concat('BNK-', cast(id % 100 as string)),"
        "        'country', array('US', 'UK', 'DE', 'JP')[cast(id % 4 as int)]"
        "      )"
        "    ),"
        "    'receiver', named_struct("
        "      'account', concat('ACC-', lpad(cast((id + 50000) % 100000 as string), 8, '0')),"
        "      'bank', named_struct("
        "        'code', concat('BNK-', cast((id + 50) % 100 as string)),"
        "        'country', array('US', 'UK', 'DE', 'JP')[cast((id + 1) % 4 as int)]"
        "      )"
        "    )"
        "  )"
        ") as financial_txn",

        # Product catalog entry (80)
        "named_struct("
        "  'product_id', concat('PROD-', lpad(cast(id % 10000 as string), 6, '0')),"
        "  'variants', array("
        "    named_struct("
        "      'sku', concat('VAR-', cast(id % 1000 as string), '-A'),"
        "      'attributes', map('color', 'red', 'size', 'S'),"
        "      'inventory', named_struct('quantity', cast(id % 100 as int), 'warehouse', 'WH-1')"
        "    ),"
        "    named_struct("
        "      'sku', concat('VAR-', cast(id % 1000 as string), '-B'),"
        "      'attributes', map('color', 'blue', 'size', 'M'),"
        "      'inventory', named_struct('quantity', cast((id + 10) % 100 as int), 'warehouse', 'WH-2')"
        "    )"
        "  ),"
        "  'pricing', named_struct("
        "    'list_price', cast(id % 1000 as double) + 0.99,"
        "    'tiers', array("
        "      named_struct('min_qty', 1, 'price', cast(id % 1000 as double) + 0.99),"
        "      named_struct('min_qty', 10, 'price', cast(id % 1000 as double) * 0.9 + 0.99),"
        "      named_struct('min_qty', 100, 'price', cast(id % 1000 as double) * 0.8 + 0.99)"
        "    )"
        "  )"
        ") as product_catalog",

        # Additional scalar columns (81-90)
        "cast((id * 53) % 10000 as int) as metric_1",
        "cast((id * 59) % 10000 as int) as metric_2",
        "cast((id * 61) % 10000 as int) as metric_3",
        "cast((id * 67) % 1000000 as long) as counter_1",
        "cast((id * 71) % 1000000 as long) as counter_2",
        "cast((id * 73) % 10000 as double) / 100.0 as measure_1",
        "cast((id * 79) % 10000 as double) / 100.0 as measure_2",
        "concat('label_', cast(id % 500 as string)) as label_1",
        "concat('category_', cast(id % 200 as string)) as label_2",
        "id % 17 = 0 as flag_1",

        # Additional complex columns (91-95)
        "array("
        "  named_struct('ts', id * 1000, 'value', cast(id % 100 as double)),"
        "  named_struct('ts', id * 1000 + 1000, 'value', cast((id + 1) % 100 as double)),"
        "  named_struct('ts', id * 1000 + 2000, 'value', cast((id + 2) % 100 as double))"
        ") as time_series",

        "map("
        "  'en', concat('English text ', cast(id % 100 as string)),"
        "  'es', concat('Spanish texto ', cast(id % 100 as string)),"
        "  'fr', concat('French texte ', cast(id % 100 as string))"
        ") as translations",

        "named_struct("
        "  'rules', array("
        "    named_struct('id', cast(id % 100 as int), 'condition', concat('cond_', cast(id % 10 as string)), 'action', concat('act_', cast(id % 5 as string))),"
        "    named_struct('id', cast((id + 1) % 100 as int), 'condition', concat('cond_', cast((id + 1) % 10 as string)), 'action', concat('act_', cast((id + 1) % 5 as string)))"
        "  ),"
        "  'default_action', 'none',"
        "  'priority', cast(id % 10 as int)"
        ") as rule_engine",

        "array("
        "  map('metric', 'cpu', 'value', cast(id % 100 as double), 'unit', 'percent'),"
        "  map('metric', 'memory', 'value', cast((id * 2) % 100 as double), 'unit', 'percent'),"
        "  map('metric', 'disk', 'value', cast((id * 3) % 100 as double), 'unit', 'percent')"
        ") as system_metrics",

        "named_struct("
        "  'permissions', map("
        "    'read', array('user', 'admin'),"
        "    'write', array('admin'),"
        "    'delete', array('admin')"
        "  ),"
        "  'roles', array("
        "    named_struct('name', 'viewer', 'level', 1),"
        "    named_struct('name', 'editor', 'level', 2),"
        "    named_struct('
Download .txt
gitextract_2d7o17rp/

├── .asf.yaml
├── .claude/
│   └── skills/
│       ├── audit-comet-expression/
│       │   └── SKILL.md
│       └── review-comet-pr/
│           └── SKILL.md
├── .dockerignore
├── .github/
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.yml
│   │   └── feature_request.yml
│   ├── actions/
│   │   ├── java-test/
│   │   │   └── action.yaml
│   │   ├── rust-test/
│   │   │   └── action.yaml
│   │   ├── setup-builder/
│   │   │   └── action.yaml
│   │   ├── setup-iceberg-builder/
│   │   │   └── action.yaml
│   │   ├── setup-macos-builder/
│   │   │   └── action.yaml
│   │   └── setup-spark-builder/
│   │       └── action.yaml
│   ├── dependabot.yml
│   ├── pull_request_template.md
│   └── workflows/
│       ├── codeql.yml
│       ├── docker-publish.yml
│       ├── docs.yaml
│       ├── iceberg_spark_test.yml
│       ├── label_new_issues.yml
│       ├── miri.yml
│       ├── pr_benchmark_check.yml
│       ├── pr_build_linux.yml
│       ├── pr_build_macos.yml
│       ├── pr_markdown_format.yml
│       ├── pr_missing_suites.yml
│       ├── pr_rat_check.yml
│       ├── pr_title_check.yml
│       ├── spark_sql_test.yml
│       ├── spark_sql_test_native_iceberg_compat.yml
│       ├── stale.yml
│       ├── take.yml
│       └── validate_workflows.yml
├── .gitignore
├── .mvn/
│   └── wrapper/
│       ├── maven-wrapper.jar
│       └── maven-wrapper.properties
├── .scalafix.conf
├── CHANGELOG.md
├── LICENSE.txt
├── Makefile
├── NOTICE.txt
├── README.md
├── benchmarks/
│   ├── Dockerfile
│   ├── README.md
│   ├── pyspark/
│   │   ├── README.md
│   │   ├── benchmarks/
│   │   │   ├── __init__.py
│   │   │   ├── base.py
│   │   │   └── shuffle.py
│   │   ├── generate_data.py
│   │   ├── run_all_benchmarks.sh
│   │   └── run_benchmark.py
│   └── tpc/
│       ├── .gitignore
│       ├── README.md
│       ├── create-iceberg-tables.py
│       ├── drop-caches.sh
│       ├── engines/
│       │   ├── comet-hashjoin.toml
│       │   ├── comet-iceberg-hashjoin.toml
│       │   ├── comet-iceberg.toml
│       │   ├── comet.toml
│       │   ├── gluten.toml
│       │   └── spark.toml
│       ├── generate-comparison.py
│       ├── infra/
│       │   └── docker/
│       │       ├── Dockerfile
│       │       ├── Dockerfile.build-comet
│       │       ├── docker-compose-laptop.yml
│       │       └── docker-compose.yml
│       ├── queries/
│       │   ├── tpcds/
│       │   │   ├── q1.sql
│       │   │   ├── q10.sql
│       │   │   ├── q11.sql
│       │   │   ├── q12.sql
│       │   │   ├── q13.sql
│       │   │   ├── q14.sql
│       │   │   ├── q15.sql
│       │   │   ├── q16.sql
│       │   │   ├── q17.sql
│       │   │   ├── q18.sql
│       │   │   ├── q19.sql
│       │   │   ├── q2.sql
│       │   │   ├── q20.sql
│       │   │   ├── q21.sql
│       │   │   ├── q22.sql
│       │   │   ├── q23.sql
│       │   │   ├── q24.sql
│       │   │   ├── q25.sql
│       │   │   ├── q26.sql
│       │   │   ├── q27.sql
│       │   │   ├── q28.sql
│       │   │   ├── q29.sql
│       │   │   ├── q3.sql
│       │   │   ├── q30.sql
│       │   │   ├── q31.sql
│       │   │   ├── q32.sql
│       │   │   ├── q33.sql
│       │   │   ├── q34.sql
│       │   │   ├── q35.sql
│       │   │   ├── q36.sql
│       │   │   ├── q37.sql
│       │   │   ├── q38.sql
│       │   │   ├── q39.sql
│       │   │   ├── q4.sql
│       │   │   ├── q40.sql
│       │   │   ├── q41.sql
│       │   │   ├── q42.sql
│       │   │   ├── q43.sql
│       │   │   ├── q44.sql
│       │   │   ├── q45.sql
│       │   │   ├── q46.sql
│       │   │   ├── q47.sql
│       │   │   ├── q48.sql
│       │   │   ├── q49.sql
│       │   │   ├── q5.sql
│       │   │   ├── q50.sql
│       │   │   ├── q51.sql
│       │   │   ├── q52.sql
│       │   │   ├── q53.sql
│       │   │   ├── q54.sql
│       │   │   ├── q55.sql
│       │   │   ├── q56.sql
│       │   │   ├── q57.sql
│       │   │   ├── q58.sql
│       │   │   ├── q59.sql
│       │   │   ├── q6.sql
│       │   │   ├── q60.sql
│       │   │   ├── q61.sql
│       │   │   ├── q62.sql
│       │   │   ├── q63.sql
│       │   │   ├── q64.sql
│       │   │   ├── q65.sql
│       │   │   ├── q66.sql
│       │   │   ├── q67.sql
│       │   │   ├── q68.sql
│       │   │   ├── q69.sql
│       │   │   ├── q7.sql
│       │   │   ├── q70.sql
│       │   │   ├── q71.sql
│       │   │   ├── q72.sql
│       │   │   ├── q73.sql
│       │   │   ├── q74.sql
│       │   │   ├── q75.sql
│       │   │   ├── q76.sql
│       │   │   ├── q77.sql
│       │   │   ├── q78.sql
│       │   │   ├── q79.sql
│       │   │   ├── q8.sql
│       │   │   ├── q80.sql
│       │   │   ├── q81.sql
│       │   │   ├── q82.sql
│       │   │   ├── q83.sql
│       │   │   ├── q84.sql
│       │   │   ├── q85.sql
│       │   │   ├── q86.sql
│       │   │   ├── q87.sql
│       │   │   ├── q88.sql
│       │   │   ├── q89.sql
│       │   │   ├── q9.sql
│       │   │   ├── q90.sql
│       │   │   ├── q91.sql
│       │   │   ├── q92.sql
│       │   │   ├── q93.sql
│       │   │   ├── q94.sql
│       │   │   ├── q95.sql
│       │   │   ├── q96.sql
│       │   │   ├── q97.sql
│       │   │   ├── q98.sql
│       │   │   └── q99.sql
│       │   └── tpch/
│       │       ├── q1.sql
│       │       ├── q10.sql
│       │       ├── q11.sql
│       │       ├── q12.sql
│       │       ├── q13.sql
│       │       ├── q14.sql
│       │       ├── q15.sql
│       │       ├── q16.sql
│       │       ├── q17.sql
│       │       ├── q18.sql
│       │       ├── q19.sql
│       │       ├── q2.sql
│       │       ├── q20.sql
│       │       ├── q21.sql
│       │       ├── q22.sql
│       │       ├── q3.sql
│       │       ├── q4.sql
│       │       ├── q5.sql
│       │       ├── q6.sql
│       │       ├── q7.sql
│       │       ├── q8.sql
│       │       └── q9.sql
│       ├── run.py
│       └── tpcbench.py
├── common/
│   ├── pom.xml
│   └── src/
│       ├── main/
│       │   ├── java/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── arrow/
│       │   │           │   └── c/
│       │   │           │       ├── AbstractCometSchemaImporter.java
│       │   │           │       └── ArrowImporter.java
│       │   │           └── comet/
│       │   │               ├── CometNativeException.java
│       │   │               ├── CometOutOfMemoryError.java
│       │   │               ├── CometRuntimeException.java
│       │   │               ├── CometSchemaImporter.java
│       │   │               ├── IcebergApi.java
│       │   │               ├── NativeBase.java
│       │   │               ├── ParquetRuntimeException.java
│       │   │               ├── exceptions/
│       │   │               │   └── CometQueryExecutionException.java
│       │   │               ├── parquet/
│       │   │               │   ├── AbstractColumnReader.java
│       │   │               │   ├── ArrowConstantColumnReader.java
│       │   │               │   ├── ArrowRowIndexColumnReader.java
│       │   │               │   ├── BloomFilterReader.java
│       │   │               │   ├── ColumnIndexReader.java
│       │   │               │   ├── ColumnPageReader.java
│       │   │               │   ├── ColumnReader.java
│       │   │               │   ├── CometFileKeyUnwrapper.java
│       │   │               │   ├── CometInputFile.java
│       │   │               │   ├── DictionaryPageReader.java
│       │   │               │   ├── FileReader.java
│       │   │               │   ├── FooterReader.java
│       │   │               │   ├── IcebergCometNativeBatchReader.java
│       │   │               │   ├── IndexFilter.java
│       │   │               │   ├── LazyColumnReader.java
│       │   │               │   ├── Native.java
│       │   │               │   ├── NativeBatchReader.java
│       │   │               │   ├── NativeColumnReader.java
│       │   │               │   ├── ParquetColumnSpec.java
│       │   │               │   ├── ParquetMetadataSerializer.java
│       │   │               │   ├── ReadOptions.java
│       │   │               │   ├── RowGroupFilter.java
│       │   │               │   ├── RowGroupReader.java
│       │   │               │   ├── TypeUtil.java
│       │   │               │   ├── Utils.java
│       │   │               │   ├── WrappedInputFile.java
│       │   │               │   └── WrappedSeekableInputStream.java
│       │   │               └── vector/
│       │   │                   ├── CometDecodedVector.java
│       │   │                   ├── CometDelegateVector.java
│       │   │                   ├── CometDictionary.java
│       │   │                   ├── CometDictionaryVector.java
│       │   │                   ├── CometLazyVector.java
│       │   │                   ├── CometListVector.java
│       │   │                   ├── CometMapVector.java
│       │   │                   ├── CometPlainVector.java
│       │   │                   ├── CometSelectionVector.java
│       │   │                   ├── CometStructVector.java
│       │   │                   └── CometVector.java
│       │   ├── resources/
│       │   │   └── log4j2.properties
│       │   ├── scala/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   ├── CometConf.scala
│       │   │           │   ├── Constants.scala
│       │   │           │   ├── objectstore/
│       │   │           │   │   └── NativeConfig.scala
│       │   │           │   ├── package.scala
│       │   │           │   ├── parquet/
│       │   │           │   │   ├── CometParquetUtils.scala
│       │   │           │   │   └── CometReaderThreadPool.scala
│       │   │           │   └── vector/
│       │   │           │       ├── NativeUtil.scala
│       │   │           │       └── StreamReader.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       ├── CastOverflowException.scala
│       │   │                       ├── execution/
│       │   │                       │   └── arrow/
│       │   │                       │       ├── ArrowReaderIterator.scala
│       │   │                       │       ├── ArrowWriters.scala
│       │   │                       │       └── CometArrowConverters.scala
│       │   │                       ├── parquet/
│       │   │                       │   ├── CometParquetReadSupport.scala
│       │   │                       │   └── CometSparkToParquetSchemaConverter.scala
│       │   │                       └── util/
│       │   │                           └── Utils.scala
│       │   ├── spark-3.4/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── ShimBatchReader.scala
│       │   │           │       └── ShimFileFormat.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           └── ShimTaskMetrics.scala
│       │   ├── spark-3.5/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── ShimBatchReader.scala
│       │   │           │       └── ShimFileFormat.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           └── ShimTaskMetrics.scala
│       │   ├── spark-3.x/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           └── comet/
│       │   │               └── shims/
│       │   │                   ├── CometTypeShim.scala
│       │   │                   └── ShimCometConf.scala
│       │   └── spark-4.0/
│       │       └── org/
│       │           └── apache/
│       │               ├── comet/
│       │               │   └── shims/
│       │               │       ├── CometTypeShim.scala
│       │               │       ├── ShimBatchReader.scala
│       │               │       ├── ShimCometConf.scala
│       │               │       └── ShimFileFormat.scala
│       │               └── spark/
│       │                   └── sql/
│       │                       └── comet/
│       │                           └── shims/
│       │                               └── ShimTaskMetrics.scala
│       └── test/
│           ├── java/
│           │   └── org/
│           │       └── apache/
│           │           └── comet/
│           │               └── parquet/
│           │                   ├── TestColumnReader.java
│           │                   ├── TestCometInputFile.java
│           │                   ├── TestFileReader.java
│           │                   └── TestUtils.java
│           └── resources/
│               ├── log4j.properties
│               └── log4j2.properties
├── conf/
│   └── log4rs.yaml
├── dev/
│   ├── cargo.config
│   ├── changelog/
│   │   ├── 0.1.0.md
│   │   ├── 0.10.0.md
│   │   ├── 0.11.0.md
│   │   ├── 0.12.0.md
│   │   ├── 0.13.0.md
│   │   ├── 0.14.0.md
│   │   ├── 0.14.1.md
│   │   ├── 0.2.0.md
│   │   ├── 0.3.0.md
│   │   ├── 0.4.0.md
│   │   ├── 0.5.0.md
│   │   ├── 0.6.0.md
│   │   ├── 0.7.0.md
│   │   ├── 0.8.0.md
│   │   ├── 0.9.0.md
│   │   └── 0.9.1.md
│   ├── checkstyle-suppressions.xml
│   ├── ci/
│   │   ├── check-suites.py
│   │   └── check-working-tree-clean.sh
│   ├── copyright/
│   │   └── java-header.txt
│   ├── diffs/
│   │   ├── 3.4.3.diff
│   │   ├── 3.5.8.diff
│   │   ├── 4.0.1.diff
│   │   └── iceberg/
│   │       ├── 1.10.0.diff
│   │       ├── 1.8.1.diff
│   │       └── 1.9.1.diff
│   ├── ensure-jars-have-correct-contents.sh
│   ├── generate-release-docs.sh
│   ├── regenerate-golden-files.sh
│   ├── release/
│   │   ├── build-release-comet.sh
│   │   ├── check-rat-report.py
│   │   ├── comet-rm/
│   │   │   ├── Dockerfile
│   │   │   └── build-comet-native-libs.sh
│   │   ├── create-tarball.sh
│   │   ├── generate-changelog.py
│   │   ├── publish-to-maven.sh
│   │   ├── rat_exclude_files.txt
│   │   ├── release-tarball.sh
│   │   ├── requirements.txt
│   │   ├── run-rat.sh
│   │   ├── verify-release-candidate.sh
│   │   └── verifying-release-candidates.md
│   └── scalastyle-config.xml
├── docs/
│   ├── .gitignore
│   ├── Makefile
│   ├── README.md
│   ├── build.sh
│   ├── generate-versions.py
│   ├── make.bat
│   ├── requirements.txt
│   ├── source/
│   │   ├── _static/
│   │   │   └── theme_overrides.css
│   │   ├── _templates/
│   │   │   ├── docs-sidebar.html
│   │   │   └── layout.html
│   │   ├── about/
│   │   │   ├── gluten_comparison.md
│   │   │   └── index.md
│   │   ├── asf/
│   │   │   └── index.md
│   │   ├── conf.py
│   │   ├── contributor-guide/
│   │   │   ├── adding_a_new_expression.md
│   │   │   ├── adding_a_new_operator.md
│   │   │   ├── benchmark-results/
│   │   │   │   ├── blaze-0.5.0-tpcds.json
│   │   │   │   ├── blaze-0.5.0-tpch.json
│   │   │   │   ├── gluten-1.4.0-tpcds.json
│   │   │   │   ├── gluten-1.4.0-tpch.json
│   │   │   │   ├── spark-3.5.3-tpcds.json
│   │   │   │   ├── spark-3.5.3-tpch.json
│   │   │   │   ├── tpc-ds.md
│   │   │   │   └── tpc-h.md
│   │   │   ├── benchmarking.md
│   │   │   ├── benchmarking_aws_ec2.md
│   │   │   ├── benchmarking_macos.md
│   │   │   ├── benchmarking_spark_sql_perf.md
│   │   │   ├── bug_triage.md
│   │   │   ├── contributing.md
│   │   │   ├── debugging.md
│   │   │   ├── development.md
│   │   │   ├── expression-audit-log.md
│   │   │   ├── ffi.md
│   │   │   ├── iceberg-spark-tests.md
│   │   │   ├── index.md
│   │   │   ├── jvm_shuffle.md
│   │   │   ├── native_shuffle.md
│   │   │   ├── parquet_scans.md
│   │   │   ├── plugin_overview.md
│   │   │   ├── profiling.md
│   │   │   ├── release_process.md
│   │   │   ├── roadmap.md
│   │   │   ├── spark-sql-tests.md
│   │   │   ├── sql-file-tests.md
│   │   │   ├── sql_error_propagation.md
│   │   │   └── tracing.md
│   │   ├── index.md
│   │   └── user-guide/
│   │       ├── index.md
│   │       └── latest/
│   │           ├── compatibility.md
│   │           ├── configs.md
│   │           ├── datasources.md
│   │           ├── datatypes.md
│   │           ├── expressions.md
│   │           ├── iceberg.md
│   │           ├── index.rst
│   │           ├── installation.md
│   │           ├── kubernetes.md
│   │           ├── metrics.md
│   │           ├── operators.md
│   │           ├── source.md
│   │           └── tuning.md
│   └── spark_expressions_support.md
├── fuzz-testing/
│   ├── .gitignore
│   ├── README.md
│   ├── pom.xml
│   ├── run.sh
│   └── src/
│       └── main/
│           └── scala/
│               └── org/
│                   └── apache/
│                       └── comet/
│                           └── fuzz/
│                               ├── ComparisonTool.scala
│                               ├── Main.scala
│                               ├── Meta.scala
│                               ├── QueryGen.scala
│                               ├── QueryRunner.scala
│                               └── Utils.scala
├── kube/
│   ├── Dockerfile
│   └── local/
│       ├── hadoop.env
│       └── hdfs-docker-compose.yml
├── mvnw
├── mvnw.cmd
├── native/
│   ├── Cargo.toml
│   ├── README.md
│   ├── common/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       ├── bin/
│   │       │   └── analyze_trace.rs
│   │       ├── error.rs
│   │       ├── lib.rs
│   │       ├── query_context.rs
│   │       ├── tracing.rs
│   │       └── utils.rs
│   ├── core/
│   │   ├── Cargo.toml
│   │   ├── benches/
│   │   │   ├── array_element_append.rs
│   │   │   ├── bit_util.rs
│   │   │   ├── common.rs
│   │   │   ├── parquet_decode.rs
│   │   │   ├── parquet_read.rs
│   │   │   └── perf.rs
│   │   └── src/
│   │       ├── common/
│   │       │   ├── bit.rs
│   │       │   ├── buffer.rs
│   │       │   └── mod.rs
│   │       ├── execution/
│   │       │   ├── columnar_to_row.rs
│   │       │   ├── expressions/
│   │       │   │   ├── arithmetic.rs
│   │       │   │   ├── bitwise.rs
│   │       │   │   ├── comparison.rs
│   │       │   │   ├── logical.rs
│   │       │   │   ├── mod.rs
│   │       │   │   ├── nullcheck.rs
│   │       │   │   ├── partition.rs
│   │       │   │   ├── random.rs
│   │       │   │   ├── strings.rs
│   │       │   │   ├── subquery.rs
│   │       │   │   └── temporal.rs
│   │       │   ├── jni_api.rs
│   │       │   ├── memory_pools/
│   │       │   │   ├── config.rs
│   │       │   │   ├── fair_pool.rs
│   │       │   │   ├── logging_pool.rs
│   │       │   │   ├── mod.rs
│   │       │   │   ├── task_shared.rs
│   │       │   │   └── unified_pool.rs
│   │       │   ├── metrics/
│   │       │   │   ├── mod.rs
│   │       │   │   └── utils.rs
│   │       │   ├── mod.rs
│   │       │   ├── operators/
│   │       │   │   ├── copy.rs
│   │       │   │   ├── csv_scan.rs
│   │       │   │   ├── expand.rs
│   │       │   │   ├── iceberg_scan.rs
│   │       │   │   ├── mod.rs
│   │       │   │   ├── parquet_writer.rs
│   │       │   │   ├── projection.rs
│   │       │   │   ├── scan.rs
│   │       │   │   └── shuffle_scan.rs
│   │       │   ├── planner/
│   │       │   │   ├── expression_registry.rs
│   │       │   │   ├── macros.rs
│   │       │   │   └── operator_registry.rs
│   │       │   ├── planner.rs
│   │       │   ├── serde.rs
│   │       │   ├── sort.rs
│   │       │   ├── spark_config.rs
│   │       │   ├── spark_plan.rs
│   │       │   ├── tracing.rs
│   │       │   └── utils.rs
│   │       ├── lib.rs
│   │       └── parquet/
│   │           ├── cast_column.rs
│   │           ├── data_type.rs
│   │           ├── encryption_support.rs
│   │           ├── mod.rs
│   │           ├── mutable_vector.rs
│   │           ├── objectstore/
│   │           │   ├── mod.rs
│   │           │   └── s3.rs
│   │           ├── parquet_exec.rs
│   │           ├── parquet_read_cached_factory.rs
│   │           ├── parquet_support.rs
│   │           ├── read/
│   │           │   ├── column.rs
│   │           │   ├── levels.rs
│   │           │   ├── mod.rs
│   │           │   └── values.rs
│   │           ├── schema_adapter.rs
│   │           └── util/
│   │               ├── bit_packing.rs
│   │               ├── buffer.rs
│   │               ├── jni.rs
│   │               ├── memory.rs
│   │               ├── mod.rs
│   │               └── test_common/
│   │                   ├── mod.rs
│   │                   ├── page_util.rs
│   │                   └── rand_gen.rs
│   ├── fs-hdfs/
│   │   ├── Cargo.toml
│   │   ├── LICENSE.txt
│   │   ├── README.md
│   │   ├── build.rs
│   │   ├── c_src/
│   │   │   ├── libhdfs/
│   │   │   │   ├── config.h
│   │   │   │   ├── exception.c
│   │   │   │   ├── exception.h
│   │   │   │   ├── hdfs.c
│   │   │   │   ├── hdfs.h
│   │   │   │   ├── htable.c
│   │   │   │   ├── htable.h
│   │   │   │   ├── jni_helper.c
│   │   │   │   ├── jni_helper.h
│   │   │   │   └── os/
│   │   │   │       ├── mutexes.h
│   │   │   │       ├── posix/
│   │   │   │       │   ├── mutexes.c
│   │   │   │       │   ├── platform.h
│   │   │   │       │   ├── thread.c
│   │   │   │       │   └── thread_local_storage.c
│   │   │   │       ├── thread.h
│   │   │   │       └── thread_local_storage.h
│   │   │   ├── libminidfs/
│   │   │   │   ├── native_mini_dfs.c
│   │   │   │   └── native_mini_dfs.h
│   │   │   └── wrapper.h
│   │   ├── rustfmt.toml
│   │   └── src/
│   │       ├── err.rs
│   │       ├── hdfs.rs
│   │       ├── lib.rs
│   │       ├── minidfs.rs
│   │       ├── native.rs
│   │       ├── util.rs
│   │       └── walkdir/
│   │           ├── mod.rs
│   │           └── tree_iter.rs
│   ├── hdfs/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       ├── lib.rs
│   │       └── object_store/
│   │           ├── hdfs.rs
│   │           └── mod.rs
│   ├── jni-bridge/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   ├── batch_iterator.rs
│   │   │   ├── comet_exec.rs
│   │   │   ├── comet_metric_node.rs
│   │   │   ├── comet_task_memory_manager.rs
│   │   │   ├── errors.rs
│   │   │   ├── lib.rs
│   │   │   └── shuffle_block_iterator.rs
│   │   └── testdata/
│   │       ├── backtrace.txt
│   │       └── stacktrace.txt
│   ├── proto/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── build.rs
│   │   └── src/
│   │       ├── lib.rs
│   │       └── proto/
│   │           ├── config.proto
│   │           ├── expr.proto
│   │           ├── literal.proto
│   │           ├── metric.proto
│   │           ├── operator.proto
│   │           ├── partitioning.proto
│   │           └── types.proto
│   ├── rustfmt.toml
│   ├── shuffle/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── benches/
│   │   │   ├── row_columnar.rs
│   │   │   └── shuffle_writer.rs
│   │   └── src/
│   │       ├── bin/
│   │       │   └── shuffle_bench.rs
│   │       ├── comet_partitioning.rs
│   │       ├── ipc.rs
│   │       ├── lib.rs
│   │       ├── metrics.rs
│   │       ├── partitioners/
│   │       │   ├── empty_schema.rs
│   │       │   ├── mod.rs
│   │       │   ├── multi_partition.rs
│   │       │   ├── partitioned_batch_iterator.rs
│   │       │   ├── single_partition.rs
│   │       │   └── traits.rs
│   │       ├── shuffle_writer.rs
│   │       ├── spark_crc32c_hasher.rs
│   │       ├── spark_unsafe/
│   │       │   ├── list.rs
│   │       │   ├── map.rs
│   │       │   ├── mod.rs
│   │       │   ├── row.rs
│   │       │   └── unsafe_object.rs
│   │       └── writers/
│   │           ├── buf_batch_writer.rs
│   │           ├── checksum.rs
│   │           ├── mod.rs
│   │           ├── shuffle_block_writer.rs
│   │           └── spill.rs
│   └── spark-expr/
│       ├── Cargo.toml
│       ├── README.md
│       ├── benches/
│       │   ├── aggregate.rs
│       │   ├── bloom_filter_agg.rs
│       │   ├── cast_from_boolean.rs
│       │   ├── cast_from_string.rs
│       │   ├── cast_int_to_timestamp.rs
│       │   ├── cast_non_int_numeric_timestamp.rs
│       │   ├── cast_numeric.rs
│       │   ├── conditional.rs
│       │   ├── date_trunc.rs
│       │   ├── decimal_div.rs
│       │   ├── normalize_nan.rs
│       │   ├── padding.rs
│       │   ├── to_csv.rs
│       │   └── wide_decimal.rs
│       ├── src/
│       │   ├── agg_funcs/
│       │   │   ├── avg.rs
│       │   │   ├── avg_decimal.rs
│       │   │   ├── correlation.rs
│       │   │   ├── covariance.rs
│       │   │   ├── mod.rs
│       │   │   ├── stddev.rs
│       │   │   ├── sum_decimal.rs
│       │   │   ├── sum_int.rs
│       │   │   └── variance.rs
│       │   ├── array_funcs/
│       │   │   ├── array_compact.rs
│       │   │   ├── array_insert.rs
│       │   │   ├── get_array_struct_fields.rs
│       │   │   ├── list_extract.rs
│       │   │   ├── mod.rs
│       │   │   └── size.rs
│       │   ├── bloom_filter/
│       │   │   ├── bit.rs
│       │   │   ├── bloom_filter_agg.rs
│       │   │   ├── bloom_filter_might_contain.rs
│       │   │   ├── mod.rs
│       │   │   ├── spark_bit_array.rs
│       │   │   └── spark_bloom_filter.rs
│       │   ├── comet_scalar_funcs.rs
│       │   ├── conditional_funcs/
│       │   │   ├── if_expr.rs
│       │   │   └── mod.rs
│       │   ├── conversion_funcs/
│       │   │   ├── boolean.rs
│       │   │   ├── cast.rs
│       │   │   ├── mod.rs
│       │   │   ├── numeric.rs
│       │   │   ├── string.rs
│       │   │   ├── temporal.rs
│       │   │   └── utils.rs
│       │   ├── csv_funcs/
│       │   │   ├── csv_write_options.rs
│       │   │   ├── mod.rs
│       │   │   └── to_csv.rs
│       │   ├── datetime_funcs/
│       │   │   ├── date_diff.rs
│       │   │   ├── date_from_unix_date.rs
│       │   │   ├── date_trunc.rs
│       │   │   ├── extract_date_part.rs
│       │   │   ├── hours.rs
│       │   │   ├── make_date.rs
│       │   │   ├── mod.rs
│       │   │   ├── timestamp_trunc.rs
│       │   │   └── unix_timestamp.rs
│       │   ├── error.rs
│       │   ├── hash_funcs/
│       │   │   ├── mod.rs
│       │   │   ├── murmur3.rs
│       │   │   ├── utils.rs
│       │   │   └── xxhash64.rs
│       │   ├── json_funcs/
│       │   │   ├── from_json.rs
│       │   │   ├── mod.rs
│       │   │   └── to_json.rs
│       │   ├── kernels/
│       │   │   ├── mod.rs
│       │   │   ├── strings.rs
│       │   │   └── temporal.rs
│       │   ├── lib.rs
│       │   ├── math_funcs/
│       │   │   ├── abs.rs
│       │   │   ├── ceil.rs
│       │   │   ├── checked_arithmetic.rs
│       │   │   ├── div.rs
│       │   │   ├── floor.rs
│       │   │   ├── internal/
│       │   │   │   ├── checkoverflow.rs
│       │   │   │   ├── decimal_rescale_check.rs
│       │   │   │   ├── make_decimal.rs
│       │   │   │   ├── mod.rs
│       │   │   │   ├── normalize_nan.rs
│       │   │   │   └── unscaled_value.rs
│       │   │   ├── log.rs
│       │   │   ├── mod.rs
│       │   │   ├── modulo_expr.rs
│       │   │   ├── negative.rs
│       │   │   ├── round.rs
│       │   │   ├── unhex.rs
│       │   │   ├── utils.rs
│       │   │   └── wide_decimal_binary_expr.rs
│       │   ├── nondetermenistic_funcs/
│       │   │   ├── internal/
│       │   │   │   ├── mod.rs
│       │   │   │   └── rand_utils.rs
│       │   │   ├── mod.rs
│       │   │   ├── monotonically_increasing_id.rs
│       │   │   ├── rand.rs
│       │   │   └── randn.rs
│       │   ├── predicate_funcs/
│       │   │   ├── is_nan.rs
│       │   │   ├── mod.rs
│       │   │   └── rlike.rs
│       │   ├── query_context.rs
│       │   ├── static_invoke/
│       │   │   ├── char_varchar_utils/
│       │   │   │   ├── mod.rs
│       │   │   │   └── read_side_padding.rs
│       │   │   └── mod.rs
│       │   ├── string_funcs/
│       │   │   ├── contains.rs
│       │   │   ├── get_json_object.rs
│       │   │   ├── mod.rs
│       │   │   ├── split.rs
│       │   │   └── substring.rs
│       │   ├── struct_funcs/
│       │   │   ├── create_named_struct.rs
│       │   │   ├── get_struct_field.rs
│       │   │   └── mod.rs
│       │   ├── test_common/
│       │   │   ├── file_util.rs
│       │   │   └── mod.rs
│       │   ├── timezone.rs
│       │   ├── unbound.rs
│       │   └── utils.rs
│       └── tests/
│           └── spark_expr_reg.rs
├── pom.xml
├── rust-toolchain.toml
├── scalafmt.conf
├── spark/
│   ├── README.md
│   ├── inspections/
│   │   ├── CometTPCDSQueriesList-results.txt
│   │   └── CometTPCHQueriesList-results.txt
│   ├── pom.xml
│   └── src/
│       ├── main/
│       │   ├── java/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   ├── CometBatchIterator.java
│       │   │           │   ├── CometShuffleBlockIterator.java
│       │   │           │   └── NativeColumnarToRowInfo.java
│       │   │           ├── parquet/
│       │   │           │   └── filter2/
│       │   │           │       └── predicate/
│       │   │           │           └── SparkFilterApi.java
│       │   │           └── spark/
│       │   │               ├── CometTaskMemoryManager.java
│       │   │               ├── shuffle/
│       │   │               │   ├── comet/
│       │   │               │   │   ├── CometBoundedShuffleMemoryAllocator.java
│       │   │               │   │   ├── CometShuffleChecksumSupport.java
│       │   │               │   │   ├── CometShuffleMemoryAllocator.java
│       │   │               │   │   ├── CometShuffleMemoryAllocatorTrait.java
│       │   │               │   │   ├── CometUnifiedShuffleMemoryAllocator.java
│       │   │               │   │   └── TooLargePageException.java
│       │   │               │   └── sort/
│       │   │               │       ├── CometShuffleExternalSorter.java
│       │   │               │       ├── CometShuffleExternalSorterAsync.java
│       │   │               │       ├── CometShuffleExternalSorterSync.java
│       │   │               │       └── SpillSorter.java
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       ├── CometScalarSubquery.java
│       │   │                       └── execution/
│       │   │                           └── shuffle/
│       │   │                               ├── CometBypassMergeSortShuffleWriter.java
│       │   │                               ├── CometDiskBlockWriter.java
│       │   │                               ├── CometUnsafeShuffleWriter.java
│       │   │                               ├── ExposedByteArrayOutputStream.java
│       │   │                               ├── ShuffleThreadPool.java
│       │   │                               ├── SpillInfo.java
│       │   │                               └── SpillWriter.java
│       │   ├── resources/
│       │   │   └── log4j2.properties
│       │   ├── scala/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   ├── CometExecIterator.scala
│       │   │           │   ├── CometFallback.scala
│       │   │           │   ├── CometMetricsListener.scala
│       │   │           │   ├── CometSparkSessionExtensions.scala
│       │   │           │   ├── DataTypeSupport.scala
│       │   │           │   ├── ExtendedExplainInfo.scala
│       │   │           │   ├── GenerateDocs.scala
│       │   │           │   ├── MetricsSupport.scala
│       │   │           │   ├── Native.scala
│       │   │           │   ├── NativeColumnarToRowConverter.scala
│       │   │           │   ├── SparkErrorConverter.scala
│       │   │           │   ├── Tracing.scala
│       │   │           │   ├── expressions/
│       │   │           │   │   ├── CometCast.scala
│       │   │           │   │   ├── CometEvalMode.scala
│       │   │           │   │   └── RegExp.scala
│       │   │           │   ├── iceberg/
│       │   │           │   │   └── IcebergReflection.scala
│       │   │           │   ├── parquet/
│       │   │           │   │   ├── CometParquetFileFormat.scala
│       │   │           │   │   ├── ParquetFilters.scala
│       │   │           │   │   └── SourceFilterSerde.scala
│       │   │           │   ├── rules/
│       │   │           │   │   ├── CometExecRule.scala
│       │   │           │   │   ├── CometScanRule.scala
│       │   │           │   │   ├── EliminateRedundantTransitions.scala
│       │   │           │   │   └── RewriteJoin.scala
│       │   │           │   ├── serde/
│       │   │           │   │   ├── CometAggregateExpressionSerde.scala
│       │   │           │   │   ├── CometBloomFilterMightContain.scala
│       │   │           │   │   ├── CometExpressionSerde.scala
│       │   │           │   │   ├── CometOperatorSerde.scala
│       │   │           │   │   ├── CometScalarFunction.scala
│       │   │           │   │   ├── CometScalarSubquery.scala
│       │   │           │   │   ├── CometSortOrder.scala
│       │   │           │   │   ├── QueryPlanSerde.scala
│       │   │           │   │   ├── SupportLevel.scala
│       │   │           │   │   ├── aggregates.scala
│       │   │           │   │   ├── arithmetic.scala
│       │   │           │   │   ├── arrays.scala
│       │   │           │   │   ├── bitwise.scala
│       │   │           │   │   ├── collectionOperations.scala
│       │   │           │   │   ├── conditional.scala
│       │   │           │   │   ├── contraintExpressions.scala
│       │   │           │   │   ├── datetime.scala
│       │   │           │   │   ├── decimalExpressions.scala
│       │   │           │   │   ├── hash.scala
│       │   │           │   │   ├── literals.scala
│       │   │           │   │   ├── maps.scala
│       │   │           │   │   ├── math.scala
│       │   │           │   │   ├── namedExpressions.scala
│       │   │           │   │   ├── nondetermenistic.scala
│       │   │           │   │   ├── operator/
│       │   │           │   │   │   ├── CometDataWritingCommand.scala
│       │   │           │   │   │   ├── CometIcebergNativeScan.scala
│       │   │           │   │   │   ├── CometNativeScan.scala
│       │   │           │   │   │   ├── CometSink.scala
│       │   │           │   │   │   └── package.scala
│       │   │           │   │   ├── predicates.scala
│       │   │           │   │   ├── statics.scala
│       │   │           │   │   ├── strings.scala
│       │   │           │   │   ├── structs.scala
│       │   │           │   │   └── unixtime.scala
│       │   │           │   └── testing/
│       │   │           │       ├── FuzzDataGenerator.scala
│       │   │           │       └── ParquetGenerator.scala
│       │   │           └── spark/
│       │   │               ├── CometSource.scala
│       │   │               ├── Plugins.scala
│       │   │               ├── shuffle/
│       │   │               │   └── sort/
│       │   │               │       └── RowPartition.scala
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       ├── CometBatchScanExec.scala
│       │   │                       ├── CometBroadcastExchangeExec.scala
│       │   │                       ├── CometCoalesceExec.scala
│       │   │                       ├── CometCollectLimitExec.scala
│       │   │                       ├── CometColumnarToRowExec.scala
│       │   │                       ├── CometCsvNativeScanExec.scala
│       │   │                       ├── CometExecRDD.scala
│       │   │                       ├── CometExecUtils.scala
│       │   │                       ├── CometIcebergNativeScanExec.scala
│       │   │                       ├── CometLocalTableScanExec.scala
│       │   │                       ├── CometMetricNode.scala
│       │   │                       ├── CometNativeColumnarToRowExec.scala
│       │   │                       ├── CometNativeScanExec.scala
│       │   │                       ├── CometNativeWriteExec.scala
│       │   │                       ├── CometPlan.scala
│       │   │                       ├── CometScanExec.scala
│       │   │                       ├── CometScanUtils.scala
│       │   │                       ├── CometSparkToColumnarExec.scala
│       │   │                       ├── CometTakeOrderedAndProjectExec.scala
│       │   │                       ├── CometWindowExec.scala
│       │   │                       ├── DecimalPrecision.scala
│       │   │                       ├── execution/
│       │   │                       │   └── shuffle/
│       │   │                       │       ├── CometBlockStoreShuffleReader.scala
│       │   │                       │       ├── CometNativeShuffleWriter.scala
│       │   │                       │       ├── CometShuffleDependency.scala
│       │   │                       │       ├── CometShuffleExchangeExec.scala
│       │   │                       │       ├── CometShuffleManager.scala
│       │   │                       │       ├── CometShuffledRowRDD.scala
│       │   │                       │       └── NativeBatchDecoderIterator.scala
│       │   │                       ├── operators.scala
│       │   │                       └── plans/
│       │   │                           └── AliasAwareOutputExpression.scala
│       │   ├── spark-3.4/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── CometExprShim.scala
│       │   │           │       ├── ShimCometBroadcastExchangeExec.scala
│       │   │           │       ├── ShimSQLConf.scala
│       │   │           │       └── ShimSubqueryBroadcast.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           ├── ShimCometScanExec.scala
│       │   │                           └── ShimSparkErrorConverter.scala
│       │   ├── spark-3.5/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── CometExprShim.scala
│       │   │           │       ├── ShimCometBroadcastExchangeExec.scala
│       │   │           │       ├── ShimSQLConf.scala
│       │   │           │       └── ShimSubqueryBroadcast.scala
│       │   │           └── spark/
│       │   │               └── sql/
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           ├── ShimCometScanExec.scala
│       │   │                           └── ShimSparkErrorConverter.scala
│       │   ├── spark-3.x/
│       │   │   └── org/
│       │   │       └── apache/
│       │   │           ├── comet/
│       │   │           │   └── shims/
│       │   │           │       ├── ShimCometShuffleExchangeExec.scala
│       │   │           │       └── ShimCometSparkSessionExtensions.scala
│       │   │           └── spark/
│       │   │               ├── comet/
│       │   │               │   └── shims/
│       │   │               │       └── ShimCometDriverPlugin.scala
│       │   │               └── sql/
│       │   │                   ├── ExtendedExplainGenerator.scala
│       │   │                   └── comet/
│       │   │                       └── shims/
│       │   │                           ├── ShimCometShuffleWriteProcessor.scala
│       │   │                           └── ShimStreamSourceAwareSparkPlan.scala
│       │   └── spark-4.0/
│       │       └── org/
│       │           └── apache/
│       │               ├── comet/
│       │               │   └── shims/
│       │               │       ├── CometExprShim.scala
│       │               │       ├── ShimCometBroadcastExchangeExec.scala
│       │               │       ├── ShimCometShuffleExchangeExec.scala
│       │               │       ├── ShimCometSparkSessionExtensions.scala
│       │               │       ├── ShimSQLConf.scala
│       │               │       └── ShimSubqueryBroadcast.scala
│       │               └── spark/
│       │                   ├── comet/
│       │                   │   └── shims/
│       │                   │       └── ShimCometDriverPlugin.scala
│       │                   └── sql/
│       │                       └── comet/
│       │                           └── shims/
│       │                               ├── ShimCometScanExec.scala
│       │                               ├── ShimCometShuffleWriteProcessor.scala
│       │                               ├── ShimSparkErrorConverter.scala
│       │                               └── ShimStreamSourceAwareSparkPlan.scala
│       └── test/
│           ├── java/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   ├── IntegrationTestSuite.java
│           │           │   └── hadoop/
│           │           │       └── fs/
│           │           │           └── FakeHDFSFileSystem.java
│           │           └── iceberg/
│           │               └── rest/
│           │                   └── RESTCatalogAdapter.java
│           ├── resources/
│           │   ├── log4j.properties
│           │   ├── log4j2.properties
│           │   ├── sql-tests/
│           │   │   └── expressions/
│           │   │       ├── aggregate/
│           │   │       │   ├── aggregate_filter.sql
│           │   │       │   ├── avg.sql
│           │   │       │   ├── bit_agg.sql
│           │   │       │   ├── corr.sql
│           │   │       │   ├── count.sql
│           │   │       │   ├── covariance.sql
│           │   │       │   ├── first_last.sql
│           │   │       │   ├── min_max.sql
│           │   │       │   ├── stddev.sql
│           │   │       │   ├── sum.sql
│           │   │       │   └── variance.sql
│           │   │       ├── array/
│           │   │       │   ├── array_append.sql
│           │   │       │   ├── array_compact.sql
│           │   │       │   ├── array_concat.sql
│           │   │       │   ├── array_contains.sql
│           │   │       │   ├── array_distinct.sql
│           │   │       │   ├── array_except.sql
│           │   │       │   ├── array_filter.sql
│           │   │       │   ├── array_insert.sql
│           │   │       │   ├── array_insert_legacy.sql
│           │   │       │   ├── array_intersect.sql
│           │   │       │   ├── array_join.sql
│           │   │       │   ├── array_max.sql
│           │   │       │   ├── array_min.sql
│           │   │       │   ├── array_remove.sql
│           │   │       │   ├── array_repeat.sql
│           │   │       │   ├── array_union.sql
│           │   │       │   ├── arrays_overlap.sql
│           │   │       │   ├── create_array.sql
│           │   │       │   ├── element_at.sql
│           │   │       │   ├── element_at_ansi.sql
│           │   │       │   ├── flatten.sql
│           │   │       │   ├── get_array_item.sql
│           │   │       │   ├── get_array_item_ansi.sql
│           │   │       │   ├── get_array_struct_fields.sql
│           │   │       │   ├── size.sql
│           │   │       │   └── sort_array.sql
│           │   │       ├── bitwise/
│           │   │       │   └── bitwise.sql
│           │   │       ├── cast/
│           │   │       │   ├── cast.sql
│           │   │       │   ├── cast_decimal_to_primitive.sql
│           │   │       │   └── cast_double_to_string.sql
│           │   │       ├── conditional/
│           │   │       │   ├── boolean.sql
│           │   │       │   ├── case_when.sql
│           │   │       │   ├── coalesce.sql
│           │   │       │   ├── if_expr.sql
│           │   │       │   ├── in_set.sql
│           │   │       │   ├── is_not_null.sql
│           │   │       │   ├── is_null.sql
│           │   │       │   └── predicates.sql
│           │   │       ├── datetime/
│           │   │       │   ├── date_add.sql
│           │   │       │   ├── date_diff.sql
│           │   │       │   ├── date_format.sql
│           │   │       │   ├── date_format_enabled.sql
│           │   │       │   ├── date_from_unix_date.sql
│           │   │       │   ├── date_sub.sql
│           │   │       │   ├── datetime.sql
│           │   │       │   ├── from_unix_time.sql
│           │   │       │   ├── from_unix_time_enabled.sql
│           │   │       │   ├── hour.sql
│           │   │       │   ├── last_day.sql
│           │   │       │   ├── make_date.sql
│           │   │       │   ├── minute.sql
│           │   │       │   ├── next_day.sql
│           │   │       │   ├── second.sql
│           │   │       │   ├── trunc_date.sql
│           │   │       │   ├── trunc_timestamp.sql
│           │   │       │   ├── unix_date.sql
│           │   │       │   └── unix_timestamp.sql
│           │   │       ├── decimal/
│           │   │       │   ├── decimal_div.sql
│           │   │       │   ├── decimal_div_ansi.sql
│           │   │       │   └── decimal_ops.sql
│           │   │       ├── hash/
│           │   │       │   ├── crc32.sql
│           │   │       │   └── hash.sql
│           │   │       ├── map/
│           │   │       │   ├── get_map_value.sql
│           │   │       │   ├── map_contains_key.sql
│           │   │       │   ├── map_entries.sql
│           │   │       │   ├── map_from_arrays.sql
│           │   │       │   ├── map_from_entries.sql
│           │   │       │   ├── map_keys.sql
│           │   │       │   └── map_values.sql
│           │   │       ├── math/
│           │   │       │   ├── abs.sql
│           │   │       │   ├── abs_ansi.sql
│           │   │       │   ├── acos.sql
│           │   │       │   ├── arithmetic.sql
│           │   │       │   ├── arithmetic_ansi.sql
│           │   │       │   ├── asin.sql
│           │   │       │   ├── atan.sql
│           │   │       │   ├── atan2.sql
│           │   │       │   ├── bin.sql
│           │   │       │   ├── ceil.sql
│           │   │       │   ├── cos.sql
│           │   │       │   ├── cosh.sql
│           │   │       │   ├── cot.sql
│           │   │       │   ├── exp.sql
│           │   │       │   ├── expm1.sql
│           │   │       │   ├── floor.sql
│           │   │       │   ├── isnan.sql
│           │   │       │   ├── log.sql
│           │   │       │   ├── log10.sql
│           │   │       │   ├── log2.sql
│           │   │       │   ├── pow.sql
│           │   │       │   ├── round.sql
│           │   │       │   ├── signum.sql
│           │   │       │   ├── sin.sql
│           │   │       │   ├── sinh.sql
│           │   │       │   ├── sqrt.sql
│           │   │       │   ├── tan.sql
│           │   │       │   └── tanh.sql
│           │   │       ├── misc/
│           │   │       │   ├── parquet_default_values.sql
│           │   │       │   ├── scalar_subquery.sql
│           │   │       │   └── width_bucket.sql
│           │   │       ├── string/
│           │   │       │   ├── ascii.sql
│           │   │       │   ├── bit_length.sql
│           │   │       │   ├── chr.sql
│           │   │       │   ├── concat.sql
│           │   │       │   ├── concat_ws.sql
│           │   │       │   ├── contains.sql
│           │   │       │   ├── ends_with.sql
│           │   │       │   ├── get_json_object.sql
│           │   │       │   ├── hex.sql
│           │   │       │   ├── init_cap.sql
│           │   │       │   ├── init_cap_enabled.sql
│           │   │       │   ├── left.sql
│           │   │       │   ├── length.sql
│           │   │       │   ├── like.sql
│           │   │       │   ├── lower.sql
│           │   │       │   ├── lower_enabled.sql
│           │   │       │   ├── luhn_check.sql
│           │   │       │   ├── octet_length.sql
│           │   │       │   ├── regexp_replace.sql
│           │   │       │   ├── regexp_replace_enabled.sql
│           │   │       │   ├── reverse.sql
│           │   │       │   ├── right.sql
│           │   │       │   ├── rlike.sql
│           │   │       │   ├── rlike_enabled.sql
│           │   │       │   ├── starts_with.sql
│           │   │       │   ├── string.sql
│           │   │       │   ├── string_instr.sql
│           │   │       │   ├── string_lpad.sql
│           │   │       │   ├── string_repeat.sql
│           │   │       │   ├── string_replace.sql
│           │   │       │   ├── string_rpad.sql
│           │   │       │   ├── string_space.sql
│           │   │       │   ├── string_translate.sql
│           │   │       │   ├── string_trim.sql
│           │   │       │   ├── substring.sql
│           │   │       │   ├── unhex.sql
│           │   │       │   ├── upper.sql
│           │   │       │   └── upper_enabled.sql
│           │   │       ├── struct/
│           │   │       │   ├── create_named_struct.sql
│           │   │       │   ├── get_struct_field.sql
│           │   │       │   ├── json_to_structs.sql
│           │   │       │   └── structs_to_json.sql
│           │   │       └── window/
│           │   │           └── lag_lead.sql
│           │   ├── test-data/
│           │   │   ├── before_1582_date_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_date_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_date_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_dict_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_dict_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_dict_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_plain_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_plain_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_int96_plain_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_micros_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_micros_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_micros_v3_2_0.snappy.parquet
│           │   │   ├── before_1582_timestamp_millis_v2_4_5.snappy.parquet
│           │   │   ├── before_1582_timestamp_millis_v2_4_6.snappy.parquet
│           │   │   ├── before_1582_timestamp_millis_v3_2_0.snappy.parquet
│           │   │   ├── csv-test-1.csv
│           │   │   ├── csv-test-2.csv
│           │   │   ├── dec-in-fixed-len.parquet
│           │   │   ├── decimal32-written-as-64-bit-dict.snappy.parquet
│           │   │   ├── decimal32-written-as-64-bit.snappy.parquet
│           │   │   └── json-test-1.ndjson
│           │   ├── tpcds-extended/
│           │   │   └── q72.sql
│           │   ├── tpcds-micro-benchmarks/
│           │   │   ├── add_decimals.sql
│           │   │   ├── add_many_decimals.sql
│           │   │   ├── add_many_integers.sql
│           │   │   ├── agg_high_cardinality.sql
│           │   │   ├── agg_low_cardinality.sql
│           │   │   ├── agg_stddev.sql
│           │   │   ├── agg_sum_decimals_no_grouping.sql
│           │   │   ├── agg_sum_integers_no_grouping.sql
│           │   │   ├── agg_sum_integers_with_grouping.sql
│           │   │   ├── case_when_column_or_null.sql
│           │   │   ├── case_when_scalar.sql
│           │   │   ├── char_type.sql
│           │   │   ├── explode.sql
│           │   │   ├── filter_highly_selective.sql
│           │   │   ├── filter_less_selective.sql
│           │   │   ├── if_column_or_null.sql
│           │   │   ├── join_anti.sql
│           │   │   ├── join_condition.sql
│           │   │   ├── join_exploding_output.sql
│           │   │   ├── join_inner.sql
│           │   │   ├── join_left_outer.sql
│           │   │   ├── join_semi.sql
│           │   │   ├── rlike.sql
│           │   │   ├── scan_decimal.sql
│           │   │   └── to_json.sql
│           │   ├── tpcds-plan-stability/
│           │   │   ├── approved-plans-v1_4/
│           │   │   │   ├── q1.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q1.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q99.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q99.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v1_4-spark3_5/
│           │   │   │   ├── q1.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q1.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q99.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q99.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v1_4-spark4_0/
│           │   │   │   ├── q1.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q1.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q13.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q15.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q16.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q17.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q19.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q2.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q21.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q23b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q25.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q26.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q28.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q29.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q3.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q30.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q31.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q32.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q33.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q37.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q38.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q39b.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q4.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q40.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q41.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q42.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q43.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q44.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q45.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q46.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q48.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q50.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q52.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q53.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q54.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q55.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q56.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q58.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q59.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q60.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q61.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q62.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q63.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q65.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q66.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q68.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q69.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q7.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q71.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q73.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q76.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q79.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q8.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q81.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q82.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.ansi.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q83.ansi.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q84.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q85.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q87.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q88.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q89.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q9.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q90.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q91.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q92.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q93.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q94.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q95.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q96.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q97.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q99.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q99.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v2_7/
│           │   │   │   ├── q10a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q98.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   ├── approved-plans-v2_7-spark3_5/
│           │   │   │   ├── q10a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q10a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q11.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q12.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q14a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q18a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q20.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q22a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q24.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q27a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q34.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q35a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q36a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q47.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q49.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q51a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q57.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q5a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q6.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q64.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q67a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q70a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q72.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q74.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q75.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q77a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q78.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q80a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q86a.native_iceberg_compat/
│           │   │   │   │   └── extended.txt
│           │   │   │   ├── q98.native_datafusion/
│           │   │   │   │   └── extended.txt
│           │   │   │   └── q98.native_iceberg_compat/
│           │   │   │       └── extended.txt
│           │   │   └── approved-plans-v2_7-spark4_0/
│           │   │       ├── q10a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q10a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q11.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q11.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q12.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q12.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q14.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q14.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q14a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q14a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q18a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q18a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q20.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q20.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q22.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q22.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q22a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q22a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q24.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q24.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q27a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q27a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q34.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q34.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q35.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q35.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q35a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q35a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q36a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q36a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q47.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q47.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q49.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q49.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q51a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q51a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q57.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q57.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q5a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q5a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q6.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q6.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q64.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q64.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q67a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q67a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q70a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q70a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q72.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q72.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q74.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q74.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q75.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q75.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q77a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q77a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q78.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q78.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q80a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q80a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q86a.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       ├── q86a.native_iceberg_compat/
│           │   │       │   └── extended.txt
│           │   │       ├── q98.native_datafusion/
│           │   │       │   └── extended.txt
│           │   │       └── q98.native_iceberg_compat/
│           │   │           └── extended.txt
│           │   ├── tpcds-query-results/
│           │   │   ├── extended/
│           │   │   │   └── q72.sql.out
│           │   │   ├── v1_4/
│           │   │   │   ├── q1.sql.out
│           │   │   │   ├── q10.sql.out
│           │   │   │   ├── q11.sql.out
│           │   │   │   ├── q12.sql.out
│           │   │   │   ├── q13.sql.out
│           │   │   │   ├── q14a.sql.out
│           │   │   │   ├── q14b.sql.out
│           │   │   │   ├── q15.sql.out
│           │   │   │   ├── q16.sql.out
│           │   │   │   ├── q17.sql.out
│           │   │   │   ├── q18.sql.out
│           │   │   │   ├── q19.sql.out
│           │   │   │   ├── q2.sql.out
│           │   │   │   ├── q20.sql.out
│           │   │   │   ├── q21.sql.out
│           │   │   │   ├── q22.sql.out
│           │   │   │   ├── q23a.sql.out
│           │   │   │   ├── q23b.sql.out
│           │   │   │   ├── q24a.sql.out
│           │   │   │   ├── q24b.sql.out
│           │   │   │   ├── q25.sql.out
│           │   │   │   ├── q26.sql.out
│           │   │   │   ├── q27.sql.out
│           │   │   │   ├── q28.sql.out
│           │   │   │   ├── q29.sql.out
│           │   │   │   ├── q3.sql.out
│           │   │   │   ├── q30.sql.out
│           │   │   │   ├── q31.sql.out
│           │   │   │   ├── q32.sql.out
│           │   │   │   ├── q33.sql.out
│           │   │   │   ├── q34.sql.out
│           │   │   │   ├── q35.sql.out
│           │   │   │   ├── q36.sql.out
│           │   │   │   ├── q37.sql.out
│           │   │   │   ├── q38.sql.out
│           │   │   │   ├── q39a.sql.out
│           │   │   │   ├── q39b.sql.out
│           │   │   │   ├── q4.sql.out
│           │   │   │   ├── q40.sql.out
│           │   │   │   ├── q41.sql.out
│           │   │   │   ├── q42.sql.out
│           │   │   │   ├── q43.sql.out
│           │   │   │   ├── q44.sql.out
│           │   │   │   ├── q45.sql.out
│           │   │   │   ├── q46.sql.out
│           │   │   │   ├── q47.sql.out
│           │   │   │   ├── q48.sql.out
│           │   │   │   ├── q49.sql.out
│           │   │   │   ├── q5.sql.out
│           │   │   │   ├── q50.sql.out
│           │   │   │   ├── q51.sql.out
│           │   │   │   ├── q52.sql.out
│           │   │   │   ├── q53.sql.out
│           │   │   │   ├── q54.sql.out
│           │   │   │   ├── q55.sql.out
│           │   │   │   ├── q56.sql.out
│           │   │   │   ├── q57.sql.out
│           │   │   │   ├── q58.sql.out
│           │   │   │   ├── q59.sql.out
│           │   │   │   ├── q6.sql.out
│           │   │   │   ├── q60.sql.out
│           │   │   │   ├── q61.sql.out
│           │   │   │   ├── q62.sql.out
│           │   │   │   ├── q63.sql.out
│           │   │   │   ├── q64.sql.out
│           │   │   │   ├── q65.sql.out
│           │   │   │   ├── q66.sql.out
│           │   │   │   ├── q67.sql.out
│           │   │   │   ├── q68.sql.out
│           │   │   │   ├── q69.sql.out
│           │   │   │   ├── q7.sql.out
│           │   │   │   ├── q70.sql.out
│           │   │   │   ├── q71.sql.out
│           │   │   │   ├── q72.sql.out
│           │   │   │   ├── q73.sql.out
│           │   │   │   ├── q74.sql.out
│           │   │   │   ├── q75.sql.out
│           │   │   │   ├── q76.sql.out
│           │   │   │   ├── q77.sql.out
│           │   │   │   ├── q78.sql.out
│           │   │   │   ├── q79.sql.out
│           │   │   │   ├── q8.sql.out
│           │   │   │   ├── q80.sql.out
│           │   │   │   ├── q81.sql.out
│           │   │   │   ├── q82.sql.out
│           │   │   │   ├── q83.sql.out
│           │   │   │   ├── q84.sql.out
│           │   │   │   ├── q85.sql.out
│           │   │   │   ├── q86.sql.out
│           │   │   │   ├── q87.sql.out
│           │   │   │   ├── q88.sql.out
│           │   │   │   ├── q89.sql.out
│           │   │   │   ├── q9.sql.out
│           │   │   │   ├── q90.sql.out
│           │   │   │   ├── q91.sql.out
│           │   │   │   ├── q92.sql.out
│           │   │   │   ├── q93.sql.out
│           │   │   │   ├── q94.sql.out
│           │   │   │   ├── q95.sql.out
│           │   │   │   ├── q96.sql.out
│           │   │   │   ├── q97.sql.out
│           │   │   │   ├── q98.sql.out
│           │   │   │   └── q99.sql.out
│           │   │   ├── v2_7/
│           │   │   │   ├── q10a.sql.out
│           │   │   │   ├── q11.sql.out
│           │   │   │   ├── q12.sql.out
│           │   │   │   ├── q14.sql.out
│           │   │   │   ├── q14a.sql.out
│           │   │   │   ├── q18a.sql.out
│           │   │   │   ├── q20.sql.out
│           │   │   │   ├── q22.sql.out
│           │   │   │   ├── q22a.sql.out
│           │   │   │   ├── q24.sql.out
│           │   │   │   ├── q27a.sql.out
│           │   │   │   ├── q34.sql.out
│           │   │   │   ├── q35.sql.out
│           │   │   │   ├── q35a.sql.out
│           │   │   │   ├── q36a.sql.out
│           │   │   │   ├── q47.sql.out
│           │   │   │   ├── q49.sql.out
│           │   │   │   ├── q51a.sql.out
│           │   │   │   ├── q57.sql.out
│           │   │   │   ├── q5a.sql.out
│           │   │   │   ├── q6.sql.out
│           │   │   │   ├── q64.sql.out
│           │   │   │   ├── q67a.sql.out
│           │   │   │   ├── q70a.sql.out
│           │   │   │   ├── q72.sql.out
│           │   │   │   ├── q74.sql.out
│           │   │   │   ├── q75.sql.out
│           │   │   │   ├── q77a.sql.out
│           │   │   │   ├── q78.sql.out
│           │   │   │   ├── q80a.sql.out
│           │   │   │   ├── q86a.sql.out
│           │   │   │   └── q98.sql.out
│           │   │   └── v2_7-spark4_0/
│           │   │       └── q36a.sql.out
│           │   ├── tpch-extended/
│           │   │   └── q1.sql
│           │   └── tpch-query-results/
│           │       ├── q1.sql.out
│           │       ├── q10.sql.out
│           │       ├── q11.sql.out
│           │       ├── q12.sql.out
│           │       ├── q13.sql.out
│           │       ├── q14.sql.out
│           │       ├── q15.sql.out
│           │       ├── q16.sql.out
│           │       ├── q17.sql.out
│           │       ├── q18.sql.out
│           │       ├── q19.sql.out
│           │       ├── q2.sql.out
│           │       ├── q20.sql.out
│           │       ├── q21.sql.out
│           │       ├── q22.sql.out
│           │       ├── q3.sql.out
│           │       ├── q4.sql.out
│           │       ├── q5.sql.out
│           │       ├── q6.sql.out
│           │       ├── q7.sql.out
│           │       ├── q8.sql.out
│           │       └── q9.sql.out
│           ├── scala/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   ├── CometArrayExpressionSuite.scala
│           │           │   ├── CometBitwiseExpressionSuite.scala
│           │           │   ├── CometCastSuite.scala
│           │           │   ├── CometCsvExpressionSuite.scala
│           │           │   ├── CometDateTimeUtilsSuite.scala
│           │           │   ├── CometExpressionSuite.scala
│           │           │   ├── CometFuzzAggregateSuite.scala
│           │           │   ├── CometFuzzIcebergBase.scala
│           │           │   ├── CometFuzzIcebergSuite.scala
│           │           │   ├── CometFuzzMathSuite.scala
│           │           │   ├── CometFuzzTestBase.scala
│           │           │   ├── CometFuzzTestSuite.scala
│           │           │   ├── CometHashExpressionSuite.scala
│           │           │   ├── CometIcebergNativeSuite.scala
│           │           │   ├── CometJsonExpressionSuite.scala
│           │           │   ├── CometMapExpressionSuite.scala
│           │           │   ├── CometMathExpressionSuite.scala
│           │           │   ├── CometNativeSuite.scala
│           │           │   ├── CometS3TestBase.scala
│           │           │   ├── CometSparkSessionExtensionsSuite.scala
│           │           │   ├── CometSqlFileTestSuite.scala
│           │           │   ├── CometStringExpressionSuite.scala
│           │           │   ├── CometTemporalExpressionSuite.scala
│           │           │   ├── DataGenerator.scala
│           │           │   ├── DataGeneratorSuite.scala
│           │           │   ├── IcebergReadFromS3Suite.scala
│           │           │   ├── SparkErrorConverterSuite.scala
│           │           │   ├── SqlFileTestParser.scala
│           │           │   ├── WithHdfsCluster.scala
│           │           │   ├── csv/
│           │           │   │   └── CometCsvNativeReadSuite.scala
│           │           │   ├── exec/
│           │           │   │   ├── CometAggregateSuite.scala
│           │           │   │   ├── CometColumnarShuffleSuite.scala
│           │           │   │   ├── CometExec3_4PlusSuite.scala
│           │           │   │   ├── CometExecSuite.scala
│           │           │   │   ├── CometGenerateExecSuite.scala
│           │           │   │   ├── CometJoinSuite.scala
│           │           │   │   ├── CometNativeColumnarToRowSuite.scala
│           │           │   │   ├── CometNativeReaderSuite.scala
│           │           │   │   ├── CometNativeShuffleSuite.scala
│           │           │   │   └── CometWindowExecSuite.scala
│           │           │   ├── expressions/
│           │           │   │   └── conditional/
│           │           │   │       ├── CometCaseWhenSuite.scala
│           │           │   │       ├── CometCoalesceSuite.scala
│           │           │   │       └── CometIfSuite.scala
│           │           │   ├── objectstore/
│           │           │   │   └── NativeConfigSuite.scala
│           │           │   ├── parquet/
│           │           │   │   ├── CometParquetWriterSuite.scala
│           │           │   │   ├── ParquetReadFromFakeHadoopFsSuite.scala
│           │           │   │   ├── ParquetReadFromS3Suite.scala
│           │           │   │   └── ParquetReadSuite.scala
│           │           │   └── rules/
│           │           │       ├── CometExecRuleSuite.scala
│           │           │       └── CometScanRuleSuite.scala
│           │           └── spark/
│           │               ├── CometPluginsSuite.scala
│           │               ├── shuffle/
│           │               │   └── sort/
│           │               │       └── SpillSorterSuite.scala
│           │               └── sql/
│           │                   ├── CometSQLQueryTestHelper.scala
│           │                   ├── CometTPCDSQueriesList.scala
│           │                   ├── CometTPCDSQuerySuite.scala
│           │                   ├── CometTPCDSQueryTestSuite.scala
│           │                   ├── CometTPCHQueriesList.scala
│           │                   ├── CometTPCHQuerySuite.scala
│           │                   ├── CometTPCQueryBase.scala
│           │                   ├── CometTPCQueryListBase.scala
│           │                   ├── CometTestBase.scala
│           │                   ├── GenTPCHData.scala
│           │                   ├── TPCDSQueries.scala
│           │                   ├── TPCH.scala
│           │                   ├── Tables.scala
│           │                   ├── benchmark/
│           │                   │   ├── CometAggregateExpressionBenchmark.scala
│           │                   │   ├── CometArithmeticBenchmark.scala
│           │                   │   ├── CometArrayExpressionBenchmark.scala
│           │                   │   ├── CometBenchmarkBase.scala
│           │                   │   ├── CometCastBooleanBenchmark.scala
│           │                   │   ├── CometCastNumericToNumericBenchmark.scala
│           │                   │   ├── CometCastNumericToStringBenchmark.scala
│           │                   │   ├── CometCastNumericToTemporalBenchmark.scala
│           │                   │   ├── CometCastStringToNumericBenchmark.scala
│           │                   │   ├── CometCastStringToTemporalBenchmark.scala
│           │                   │   ├── CometCastTemporalToNumericBenchmark.scala
│           │                   │   ├── CometCastTemporalToStringBenchmark.scala
│           │                   │   ├── CometCastTemporalToTemporalBenchmark.scala
│           │                   │   ├── CometColumnarToRowBenchmark.scala
│           │                   │   ├── CometComparisonExpressionBenchmark.scala
│           │                   │   ├── CometConditionalExpressionBenchmark.scala
│           │                   │   ├── CometCsvExpressionBenchmark.scala
│           │                   │   ├── CometDatetimeExpressionBenchmark.scala
│           │                   │   ├── CometExecBenchmark.scala
│           │                   │   ├── CometGetJsonObjectBenchmark.scala
│           │                   │   ├── CometHashExpressionBenchmark.scala
│           │                   │   ├── CometIcebergReadBenchmark.scala
│           │                   │   ├── CometJsonExpressionBenchmark.scala
│           │                   │   ├── CometOperatorSerdeBenchmark.scala
│           │                   │   ├── CometPartitionColumnBenchmark.scala
│           │                   │   ├── CometPredicateExpressionBenchmark.scala
│           │                   │   ├── CometReadBenchmark.scala
│           │                   │   ├── CometShuffleBenchmark.scala
│           │                   │   ├── CometStringExpressionBenchmark.scala
│           │                   │   ├── CometTPCDSMicroBenchmark.scala
│           │                   │   ├── CometTPCDSQueryBenchmark.scala
│           │                   │   ├── CometTPCHQueryBenchmark.scala
│           │                   │   └── CometTPCQueryBenchmarkBase.scala
│           │                   └── comet/
│           │                       ├── CometDppFallbackRepro3949Suite.scala
│           │                       ├── CometPlanChecker.scala
│           │                       ├── CometPlanStabilitySuite.scala
│           │                       ├── CometShuffleFallbackStickinessSuite.scala
│           │                       ├── CometTaskMetricsSuite.scala
│           │                       └── ParquetEncryptionITCase.scala
│           ├── spark-3.4/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   └── shims/
│           │           │       └── ShimCometTPCHQuerySuite.scala
│           │           └── spark/
│           │               └── sql/
│           │                   └── ShimCometTestBase.scala
│           ├── spark-3.5/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   └── shims/
│           │           │       └── ShimCometTPCHQuerySuite.scala
│           │           └── spark/
│           │               └── sql/
│           │                   ├── CometToPrettyStringSuite.scala
│           │                   └── ShimCometTestBase.scala
│           ├── spark-3.x/
│           │   └── org/
│           │       └── apache/
│           │           ├── comet/
│           │           │   └── iceberg/
│           │           │       └── RESTCatalogHelper.scala
│           │           ├── iceberg/
│           │           │   └── rest/
│           │           │       └── RESTCatalogServlet.java
│           │           └── spark/
│           │               └── sql/
│           │                   └── comet/
│           │                       └── shims/
│           │                           └── ShimCometTPCDSQuerySuite.scala
│           └── spark-4.0/
│               └── org/
│                   └── apache/
│                       ├── comet/
│                       │   ├── exec/
│                       │   │   └── CometShuffle4_0Suite.scala
│                       │   ├── iceberg/
│                       │   │   └── RESTCatalogHelper.scala
│                       │   └── shims/
│                       │       └── ShimCometTPCHQuerySuite.scala
│                       ├── iceberg/
│                       │   └── rest/
│                       │       └── RESTCatalogServlet.java
│                       └── spark/
│                           ├── comet/
│                           │   └── shims/
│                           │       └── ShimTestUtils.scala
│                           └── sql/
│                               ├── CometToPrettyStringSuite.scala
│                               ├── ShimCometTestBase.scala
│                               └── comet/
│                                   └── shims/
│                                       └── ShimCometTPCDSQuerySuite.scala
└── spark-integration/
    └── pom.xml
Download .txt
Showing preview only (357K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (4143 symbols across 300 files)

FILE: benchmarks/pyspark/benchmarks/__init__.py
  function get_benchmark (line 38) | def get_benchmark(name: str) -> Type[Benchmark]:
  function list_benchmarks (line 59) | def list_benchmarks() -> List[tuple[str, str]]:

FILE: benchmarks/pyspark/benchmarks/base.py
  class Benchmark (line 30) | class Benchmark(ABC):
    method __init__ (line 33) | def __init__(self, spark: SparkSession, data_path: str, mode: str):
    method name (line 48) | def name(cls) -> str:
    method description (line 54) | def description(cls) -> str:
    method run (line 59) | def run(self) -> Dict[str, Any]:
    method execute_timed (line 68) | def execute_timed(self) -> Dict[str, Any]:
    method _print_spark_config (line 105) | def _print_spark_config(self):
    method _time_operation (line 114) | def _time_operation(self, operation_fn):

FILE: benchmarks/pyspark/benchmarks/shuffle.py
  class ShuffleBenchmark (line 32) | class ShuffleBenchmark(Benchmark):
    method __init__ (line 35) | def __init__(self, spark, data_path: str, mode: str, num_partitions: i...
    method _read_and_count (line 48) | def _read_and_count(self) -> tuple[DataFrame, int]:
    method _repartition (line 54) | def _repartition(self, df: DataFrame) -> DataFrame:
    method _write_output (line 66) | def _write_output(self, df: DataFrame, output_path: str):
    method run (line 70) | def run(self) -> Dict[str, Any]:
  class ShuffleHashBenchmark (line 101) | class ShuffleHashBenchmark(ShuffleBenchmark):
    method name (line 105) | def name(cls) -> str:
    method description (line 109) | def description(cls) -> str:
    method _repartition (line 112) | def _repartition(self, df: DataFrame) -> DataFrame:
  class ShuffleRoundRobinBenchmark (line 117) | class ShuffleRoundRobinBenchmark(ShuffleBenchmark):
    method name (line 121) | def name(cls) -> str:
    method description (line 125) | def description(cls) -> str:
    method _repartition (line 128) | def _repartition(self, df: DataFrame) -> DataFrame:

FILE: benchmarks/pyspark/generate_data.py
  function generate_data (line 37) | def generate_data(output_path: str, num_rows: int, num_partitions: int):
  function main (line 415) | def main():

FILE: benchmarks/pyspark/run_benchmark.py
  function main (line 34) | def main():

FILE: benchmarks/tpc/create-iceberg-tables.py
  function main (line 87) | def main(benchmark: str, parquet_path: str, warehouse: str, catalog: str...

FILE: benchmarks/tpc/generate-comparison.py
  function geomean (line 27) | def geomean(data):
  function get_durations (line 30) | def get_durations(result, query_key):
  function get_all_queries (line 37) | def get_all_queries(results):
  function get_common_queries (line 51) | def get_common_queries(results, labels):
  function check_result_consistency (line 65) | def check_result_consistency(results, labels, benchmark):
  function generate_query_rel_speedup_chart (line 95) | def generate_query_rel_speedup_chart(baseline, comparison, label1: str, ...
  function generate_query_abs_speedup_chart (line 152) | def generate_query_abs_speedup_chart(baseline, comparison, label1: str, ...
  function generate_query_comparison_chart (line 202) | def generate_query_comparison_chart(results, labels, benchmark: str, tit...
  function generate_summary (line 240) | def generate_summary(results, labels, benchmark: str, title: str, common...
  function query_count (line 272) | def query_count(benchmark: str):
  function main (line 280) | def main(files, labels, benchmark: str, title: str):

FILE: benchmarks/tpc/queries/tpch/q15.sql
  type revenue0 (line 3) | create view revenue0 (supplier_no, total_revenue) as

FILE: benchmarks/tpc/run.py
  function load_toml (line 43) | def load_toml(path):
  function _parse_toml (line 49) | def _parse_toml(text):
  function _parse_value (line 77) | def _parse_value(raw):
  function load_toml (line 98) | def load_toml(path):
  function resolve_env (line 144) | def resolve_env(value):
  function resolve_env_in_list (line 155) | def resolve_env_in_list(lst):
  function load_engine_config (line 159) | def load_engine_config(engine_name):
  function apply_env_defaults (line 175) | def apply_env_defaults(config):
  function apply_env_exports (line 183) | def apply_env_exports(config):
  function check_required_env (line 190) | def check_required_env(config):
  function check_common_env (line 202) | def check_common_env():
  function check_benchmark_env (line 210) | def check_benchmark_env(config, benchmark):
  function build_spark_submit_cmd (line 233) | def build_spark_submit_cmd(config, benchmark, args):
  function restart_spark (line 346) | def restart_spark():
  function main (line 376) | def main():

FILE: benchmarks/tpc/tpcbench.py
  function dedup_columns (line 36) | def dedup_columns(df):
  function result_hash (line 50) | def result_hash(rows):
  function main (line 59) | def main(

FILE: common/src/main/java/org/apache/arrow/c/AbstractCometSchemaImporter.java
  class AbstractCometSchemaImporter (line 29) | public abstract class AbstractCometSchemaImporter {
    method AbstractCometSchemaImporter (line 34) | public AbstractCometSchemaImporter(BufferAllocator allocator) {
    method getAllocator (line 39) | public BufferAllocator getAllocator() {
    method getProvider (line 43) | public CDataDictionaryProvider getProvider() {
    method importField (line 47) | public Field importField(ArrowSchema schema) {
    method importVector (line 64) | public FieldVector importVector(ArrowArray array, ArrowSchema schema) {
    method close (line 72) | @IcebergApi

FILE: common/src/main/java/org/apache/arrow/c/ArrowImporter.java
  class ArrowImporter (line 33) | public class ArrowImporter {
    method ArrowImporter (line 37) | public ArrowImporter(BufferAllocator allocator) {
    method importField (line 42) | Field importField(ArrowSchema schema, CDataDictionaryProvider provider) {
    method importVector (line 51) | public FieldVector importVector(

FILE: common/src/main/java/org/apache/comet/CometNativeException.java
  class CometNativeException (line 23) | public class CometNativeException extends CometRuntimeException {
    method CometNativeException (line 24) | public CometNativeException(String message) {

FILE: common/src/main/java/org/apache/comet/CometOutOfMemoryError.java
  class CometOutOfMemoryError (line 23) | public class CometOutOfMemoryError extends OutOfMemoryError {
    method CometOutOfMemoryError (line 24) | public CometOutOfMemoryError(String msg) {

FILE: common/src/main/java/org/apache/comet/CometRuntimeException.java
  class CometRuntimeException (line 23) | public class CometRuntimeException extends RuntimeException {
    method CometRuntimeException (line 24) | public CometRuntimeException(String message) {
    method CometRuntimeException (line 28) | public CometRuntimeException(String message, Throwable cause) {

FILE: common/src/main/java/org/apache/comet/CometSchemaImporter.java
  class CometSchemaImporter (line 26) | @IcebergApi
    method CometSchemaImporter (line 28) | @IcebergApi

FILE: common/src/main/java/org/apache/comet/NativeBase.java
  class NativeBase (line 41) | public abstract class NativeBase {
    method isLoaded (line 64) | public static synchronized boolean isLoaded() throws Throwable {
    method setLoaded (line 72) | static synchronized void setLoaded(boolean b) {
    method load (line 76) | static synchronized void load() {
    method bundleLoadLibrary (line 111) | private static void bundleLoadLibrary() {
    method initWithLogConf (line 155) | private static void initWithLogConf() {
    method cleanupOldTempLibs (line 178) | private static void cleanupOldTempLibs() {
    method setArrowProperties (line 206) | private static void setArrowProperties() {
    method setPropertyIfNull (line 211) | private static void setPropertyIfNull(String key, String value) {
    type OS (line 224) | private enum OS {
      method OS (line 232) | OS(String name, String libExtension) {
    method arch (line 238) | private static String arch() {
    method os (line 242) | private static OS os() {
    method checkArch (line 260) | private static boolean checkArch() {
    method resourceName (line 282) | private static String resourceName() {
    method init (line 294) | static native void init(String logConfPath, String logLevel);
    method isFeatureEnabled (line 302) | public static native boolean isFeatureEnabled(String featureName);

FILE: common/src/main/java/org/apache/comet/ParquetRuntimeException.java
  class ParquetRuntimeException (line 23) | public class ParquetRuntimeException extends CometRuntimeException {
    method ParquetRuntimeException (line 24) | public ParquetRuntimeException(String message) {
    method ParquetRuntimeException (line 28) | public ParquetRuntimeException(String message, Throwable cause) {

FILE: common/src/main/java/org/apache/comet/exceptions/CometQueryExecutionException.java
  class CometQueryExecutionException (line 41) | public final class CometQueryExecutionException extends CometNativeExcep...
    method CometQueryExecutionException (line 48) | public CometQueryExecutionException(String jsonMessage) {
    method isJsonMessage (line 58) | public boolean isJsonMessage() {

FILE: common/src/main/java/org/apache/comet/parquet/AbstractColumnReader.java
  class AbstractColumnReader (line 35) | @IcebergApi
    method AbstractColumnReader (line 68) | AbstractColumnReader(
    method AbstractColumnReader (line 81) | AbstractColumnReader(
    method getDescriptor (line 90) | ColumnDescriptor getDescriptor() {
    method getPath (line 94) | String getPath() {
    method setBatchSize (line 101) | @IcebergApi
    method readBatch (line 114) | public abstract void readBatch(int total);
    method currentBatch (line 117) | public abstract CometVector currentBatch();
    method close (line 119) | @IcebergApi
    method initNative (line 129) | protected void initNative() {

FILE: common/src/main/java/org/apache/comet/parquet/ArrowConstantColumnReader.java
  class ArrowConstantColumnReader (line 45) | public class ArrowConstantColumnReader extends AbstractColumnReader {
    method ArrowConstantColumnReader (line 55) | ArrowConstantColumnReader(StructField field, int batchSize, boolean us...
    method ArrowConstantColumnReader (line 65) | ArrowConstantColumnReader(
    method setBatchSize (line 74) | @Override
    method readBatch (line 81) | @Override
    method currentBatch (line 89) | @Override
    method close (line 94) | @Override
    method initVector (line 106) | private void initVector(Object value, int count) {
    method createNullVector (line 119) | private FieldVector createNullVector(int count) {
    method createFilledVector (line 159) | private FieldVector createFilledVector(Object value, int count) {

FILE: common/src/main/java/org/apache/comet/parquet/ArrowRowIndexColumnReader.java
  class ArrowRowIndexColumnReader (line 38) | public class ArrowRowIndexColumnReader extends AbstractColumnReader {
    method ArrowRowIndexColumnReader (line 50) | public ArrowRowIndexColumnReader(StructField field, int batchSize, lon...
    method setBatchSize (line 56) | @Override
    method readBatch (line 62) | @Override
    method currentBatch (line 93) | @Override
    method close (line 98) | @Override

FILE: common/src/main/java/org/apache/comet/parquet/BloomFilterReader.java
  class BloomFilterReader (line 47) | public class BloomFilterReader implements FilterPredicate.Visitor<Boolea...
    method BloomFilterReader (line 57) | BloomFilterReader(
    method visit (line 67) | @Override
    method visit (line 98) | @Override
    method visit (line 103) | @Override
    method visit (line 108) | @Override
    method visit (line 113) | @Override
    method visit (line 118) | @Override
    method visit (line 123) | @Override
    method visit (line 128) | @Override
    method visit (line 133) | @Override
    method visit (line 142) | @Override
    method visit (line 148) | @Override
    method visit (line 154) | private <T extends Comparable<T>, U extends UserDefinedPredicate<T>> B...
    method readBloomFilter (line 159) | BloomFilter readBloomFilter(ColumnChunkMetaData meta) {
    method readBloomFilterInternal (line 180) | private BloomFilter readBloomFilterInternal(ColumnChunkMetaData meta) ...

FILE: common/src/main/java/org/apache/comet/parquet/ColumnIndexReader.java
  class ColumnIndexReader (line 47) | class ColumnIndexReader implements ColumnIndexStore {
    method getColumnIndex (line 53) | @Override
    method getOffsetIndex (line 58) | @Override
    method getColumnIndex (line 66) | @Override
    method getOffsetIndex (line 71) | @Override
    method create (line 85) | static ColumnIndexReader create(
    method ColumnIndexReader (line 97) | private ColumnIndexReader(
    method getColumnIndex (line 114) | @Override
    method getOffsetIndex (line 119) | @Override
    type IndexStore (line 124) | private interface IndexStore {
      method getColumnIndex (line 125) | ColumnIndex getColumnIndex();
      method getOffsetIndex (line 127) | OffsetIndex getOffsetIndex();
    class IndexStoreImpl (line 130) | private class IndexStoreImpl implements IndexStore {
      method IndexStoreImpl (line 136) | IndexStoreImpl(ColumnChunkMetaData meta) {
      method getColumnIndex (line 153) | @Override
      method getOffsetIndex (line 168) | @Override
    method readColumnIndex (line 175) | ColumnIndex readColumnIndex(ColumnChunkMetaData column) throws IOExcep...
    method readOffsetIndex (line 204) | OffsetIndex readOffsetIndex(ColumnChunkMetaData column) throws IOExcep...

FILE: common/src/main/java/org/apache/comet/parquet/ColumnPageReader.java
  class ColumnPageReader (line 40) | public class ColumnPageReader implements PageReader {
    method ColumnPageReader (line 54) | ColumnPageReader(
    method getTotalValueCount (line 93) | @Override
    method getPageValueCount (line 99) | public int getPageValueCount() {
    method skipPage (line 104) | public void skipPage() {
    method readPage (line 109) | @Override
    method readDictionaryPage (line 225) | @Override
    method getPageOrdinal (line 249) | private int getPageOrdinal(int currentPageIndex) {

FILE: common/src/main/java/org/apache/comet/parquet/ColumnReader.java
  class ColumnReader (line 52) | @IcebergApi
    method ColumnReader (line 96) | ColumnReader(
    method setPageReader (line 116) | @IcebergApi
    method setRowGroupReader (line 132) | @IcebergApi
    method readBatch (line 139) | @Override
    method currentBatch (line 169) | @Override
    method close (line 174) | @Override
    method loadVector (line 184) | public CometDecodedVector loadVector() {
    method readPage (line 250) | protected void readPage() {
    method isValidValueEncoding (line 303) | @SuppressWarnings("deprecation")

FILE: common/src/main/java/org/apache/comet/parquet/CometFileKeyUnwrapper.java
  class CometFileKeyUnwrapper (line 93) | public class CometFileKeyUnwrapper {
    method normalizeS3Scheme (line 112) | private String normalizeS3Scheme(final String filePath) {
    method storeDecryptionKeyRetriever (line 131) | public void storeDecryptionKeyRetriever(final String filePath, final C...
    method getKey (line 159) | public byte[] getKey(final String filePath, final byte[] keyMetadata)

FILE: common/src/main/java/org/apache/comet/parquet/CometInputFile.java
  class CometInputFile (line 42) | public class CometInputFile implements InputFile {
    method fromPath (line 50) | public static CometInputFile fromPath(Path path, Configuration conf) t...
    method CometInputFile (line 55) | private CometInputFile(FileSystem fs, FileStatus stat, Configuration c...
    method getLength (line 61) | @Override
    method getConf (line 66) | public Configuration getConf() {
    method getFileSystem (line 70) | public FileSystem getFileSystem() {
    method getPath (line 74) | public Path getPath() {
    method newStream (line 78) | @Override
    method newStream (line 105) | public SeekableInputStream newStream(long offset, long length) throws ...
    method toString (line 137) | @Override
    method isAtLeastHadoop33 (line 142) | private static boolean isAtLeastHadoop33() {
    method isAtLeastHadoop33 (line 147) | static boolean isAtLeastHadoop33(String version) {

FILE: common/src/main/java/org/apache/comet/parquet/DictionaryPageReader.java
  class DictionaryPageReader (line 47) | public class DictionaryPageReader implements DictionaryPageReadStore {
    method DictionaryPageReader (line 54) | DictionaryPageReader(
    method readDictionaryPage (line 70) | @Override
    method readDictionary (line 98) | DictionaryPage readDictionary(ColumnChunkMetaData meta) throws IOExcep...
    method readCompressedDictionary (line 157) | private DictionaryPage readCompressedDictionary(
    method reusableCopy (line 184) | private static DictionaryPage reusableCopy(DictionaryPage dict) throws...

FILE: common/src/main/java/org/apache/comet/parquet/FileReader.java
  class FileReader (line 106) | @IcebergApi
    method FileReader (line 135) | FileReader(InputFile file, ParquetReadOptions options, ReadOptions com...
    method FileReader (line 141) | @IcebergApi
    method FileReader (line 183) | FileReader(
    method FileReader (line 192) | FileReader(
    method getFooter (line 232) | ParquetMetadata getFooter() {
    method getFileMetaData (line 237) | FileMetaData getFileMetaData() {
    method getInputStream (line 242) | public SeekableInputStream getInputStream() {
    method getOptions (line 247) | public ParquetReadOptions getOptions() {
    method getRowGroups (line 252) | public List<BlockMetaData> getRowGroups() {
    method setRequestedSchema (line 257) | public void setRequestedSchema(List<ColumnDescriptor> projection) {
    method setRequestedSchemaFromSpecs (line 265) | @IcebergApi
    method buildParquetReadOptions (line 274) | private static ParquetReadOptions buildParquetReadOptions(
    method getRecordCount (line 319) | public long getRecordCount() {
    method getFilteredRecordCount (line 331) | public long getFilteredRecordCount() {
    method skipNextRowGroup (line 344) | @IcebergApi
    method readNextRowGroup (line 353) | @IcebergApi
    method readNextFilteredRowGroup (line 404) | public PageReadStore readNextFilteredRowGroup() throws IOException {
    method getColumnIndexReader (line 462) | ColumnIndexReader getColumnIndexReader(int blockIndex) {
    method readChunks (line 471) | private RowGroupReader readChunks(
    method shouldReadParallel (line 490) | private boolean shouldReadParallel() {
    method shouldReadParallel (line 499) | static boolean shouldReadParallel(ReadOptions options, String scheme) {
    method shouldReadParallelForScheme (line 503) | private static boolean shouldReadParallelForScheme(String scheme) {
    class ReadRange (line 517) | static class ReadRange {
      method toString (line 523) | @Override
    method getReadRanges (line 536) | List<ReadRange> getReadRanges(List<ConsecutivePartList> allParts, int ...
    method readAllRangesParallel (line 569) | private void readAllRangesParallel(List<ReadRange> allRanges) {
    method readAllPartsParallel (line 627) | public void readAllPartsParallel(List<ConsecutivePartList> allParts, C...
    method readChunkPages (line 656) | private void readChunkPages(Chunk chunk, BlockMetaData block) throws I...
    method advanceToNextBlock (line 678) | private boolean advanceToNextBlock() {
    method getRowIndices (line 687) | public long[] getRowIndices() {
    method getRowIndices (line 691) | public static long[] getRowIndices(List<BlockMetaData> blocks) {
    method getRowIndexOffset (line 705) | public static long getRowIndexOffset(BlockMetaData metaData) {
    method getRowRanges (line 715) | private RowRanges getRowRanges(int blockIndex) {
    method readFooter (line 732) | private static ParquetMetadata readFooter(
    method filterRowGroups (line 812) | private List<BlockMetaData> filterRowGroups(List<BlockMetaData> blocks) {
    method filterRowGroups (line 816) | public static List<BlockMetaData> filterRowGroups(
    method filterRowGroups (line 840) | public static List<BlockMetaData> filterRowGroups(
    method listWithNulls (line 864) | private static <T> List<T> listWithNulls(int size) {
    method closeStream (line 868) | public void closeStream() throws IOException {
    method close (line 874) | @IcebergApi
    class ChunkListBuilder (line 891) | private class ChunkListBuilder {
      class ChunkData (line 892) | private class ChunkData {
      method add (line 899) | void add(ChunkDescriptor descriptor, List<ByteBuffer> buffers) {
      method setOffsetIndex (line 908) | void setOffsetIndex(ChunkDescriptor descriptor, OffsetIndex offsetIn...
      method build (line 917) | List<Chunk> build() {
    class Chunk (line 929) | private class Chunk {
      method Chunk (line 939) | Chunk(ChunkDescriptor descriptor, List<ByteBuffer> buffers, OffsetIn...
      method readPageHeader (line 945) | protected PageHeader readPageHeader(BlockCipher.Decryptor blockDecry...
      method verifyCrc (line 954) | private void verifyCrc(int referenceCrc, byte[] bytes, String except...
      method readAllPages (line 962) | private ColumnPageReader readAllPages() throws IOException {
      method readAllPages (line 966) | private ColumnPageReader readAllPages(
      method hasMorePages (line 1128) | private boolean hasMorePages(long valuesCountReadSoFar, int dataPage...
      method getPageOrdinal (line 1134) | private int getPageOrdinal(int dataPageCountReadSoFar) {
      method readAsBytesInput (line 1147) | public BytesInput readAsBytesInput(int size) throws IOException {
    class ConsecutivePartList (line 1156) | private class ConsecutivePartList {
      method ConsecutivePartList (line 1170) | ConsecutivePartList(long offset) {
      method addChunk (line 1188) | public void addChunk(ChunkDescriptor descriptor) {
      method allocateReadBuffers (line 1193) | private void allocateReadBuffers() {
      method readAll (line 1214) | public void readAll(SeekableInputStream f, ChunkListBuilder builder)...
      method setReadMetrics (line 1239) | private void setReadMetrics(long startNs) {
      method endPos (line 1265) | public long endPos() {
    class ChunkDescriptor (line 1271) | private static class ChunkDescriptor {
      method ChunkDescriptor (line 1284) | ChunkDescriptor(
      method hashCode (line 1292) | @Override
      method equals (line 1297) | @Override

FILE: common/src/main/java/org/apache/comet/parquet/FooterReader.java
  class FooterReader (line 39) | public class FooterReader {
    method readFooter (line 40) | public static ParquetMetadata readFooter(Configuration configuration, ...

FILE: common/src/main/java/org/apache/comet/parquet/IcebergCometNativeBatchReader.java
  class IcebergCometNativeBatchReader (line 34) | public class IcebergCometNativeBatchReader extends NativeBatchReader {
    method IcebergCometNativeBatchReader (line 36) | public IcebergCometNativeBatchReader(StructType requiredSchema) {
    method init (line 42) | public void init(
    method getSparkSchema (line 82) | public StructType getSparkSchema() {

FILE: common/src/main/java/org/apache/comet/parquet/IndexFilter.java
  class IndexFilter (line 29) | public class IndexFilter {
    method IndexFilter (line 34) | public IndexFilter(RowRanges rowRanges, OffsetIndex offsetIndex, long ...
    method filterOffsetIndex (line 40) | OffsetIndex filterOffsetIndex() {
    method calculateOffsetRanges (line 56) | List<OffsetRange> calculateOffsetRanges(OffsetIndex filteredOffsetInde...
    class FilteredOffsetIndex (line 83) | private static class FilteredOffsetIndex implements OffsetIndex {
      method FilteredOffsetIndex (line 87) | private FilteredOffsetIndex(OffsetIndex offsetIndex, int[] indexMap) {
      method getPageOrdinal (line 92) | @Override
      method getPageCount (line 97) | @Override
      method getOffset (line 102) | @Override
      method getCompressedPageSize (line 107) | @Override
      method getFirstRowIndex (line 112) | @Override
      method getLastRowIndex (line 117) | @Override
    class OffsetRange (line 127) | static class OffsetRange {
      method OffsetRange (line 131) | private OffsetRange(long offset, int length) {
      method extend (line 136) | private boolean extend(long offset, int length) {

FILE: common/src/main/java/org/apache/comet/parquet/LazyColumnReader.java
  class LazyColumnReader (line 32) | public class LazyColumnReader extends ColumnReader {
    method LazyColumnReader (line 46) | LazyColumnReader(
    method setPageReader (line 58) | @Override
    method readBatch (line 74) | @Override
    method currentBatch (line 90) | @Override
    method readAllBatch (line 96) | public void readAllBatch() {
    method materializeUpToIfNecessary (line 110) | public boolean materializeUpToIfNecessary(int rowId) {
    method readBatch (line 129) | private void readBatch(int rowId, int numNullRowsToPad) {
    method tryPageSkip (line 160) | private int tryPageSkip(int rowId) {

FILE: common/src/main/java/org/apache/comet/parquet/Native.java
  class Native (line 27) | public final class Native extends NativeBase {
    method readBatch (line 28) | public static int[] readBatch(long handle, int batchSize) {
    method skipBatch (line 32) | public static int skipBatch(long handle, int batchSize) {
    method initColumnReader (line 66) | public static native long initColumnReader(
    method setDictionaryPage (line 97) | public static native void setDictionaryPage(
    method setPageV1 (line 108) | public static native void setPageV1(
    method setPageV2 (line 121) | public static native void setPageV2(
    method resetBatch (line 135) | @IcebergApi
    method readBatch (line 160) | public static native int[] readBatch(long handle, int batchSize, int n...
    method skipBatch (line 180) | public static native int skipBatch(long handle, int batchSize, boolean...
    method currentBatch (line 189) | public static native void currentBatch(long handle, long arrayAddr, lo...
    method closeColumnReader (line 196) | public static native void closeColumnReader(long handle);
    method validateObjectStoreConfig (line 207) | public static native void validateObjectStoreConfig(
    method initRecordBatchReader (line 218) | public static native long initRecordBatchReader(
    method readNextRecordBatch (line 241) | public static native int readNextRecordBatch(long handle);
    method currentColumnBatch (line 254) | public static native void currentColumnBatch(
    method closeRecordBatchReader (line 264) | public static native void closeRecordBatchReader(long handle);

FILE: common/src/main/java/org/apache/comet/parquet/NativeBatchReader.java
  class NativeBatchReader (line 104) | public class NativeBatchReader extends RecordReader<Void, ColumnarBatch>...
    class FileInfo (line 110) | public static class FileInfo {
      method FileInfo (line 116) | public FileInfo(long start, long length, String filePath, long fileS...
      method fromPartitionedFile (line 128) | public static FileInfo fromPartitionedFile(PartitionedFile file) thr...
      method start (line 132) | public long start() {
      method length (line 136) | public long length() {
      method filePath (line 140) | public String filePath() {
      method fileSize (line 144) | public long fileSize() {
      method pathUri (line 148) | public URI pathUri() throws URISyntaxException {
    method NativeBatchReader (line 218) | protected NativeBatchReader() {
    method NativeBatchReader (line 224) | public NativeBatchReader(String file, int capacity) {
    method NativeBatchReader (line 229) | public NativeBatchReader(
    method NativeBatchReader (line 235) | public NativeBatchReader(
    method NativeBatchReader (line 256) | private NativeBatchReader(AbstractColumnReader[] columnReaders) {
    method NativeBatchReader (line 269) | NativeBatchReader(
    method NativeBatchReader (line 304) | NativeBatchReader(
    method init (line 342) | public void init() throws Throwable {
    method getParquetColumn (line 597) | private ParquetColumn getParquetColumn(MessageType schema, StructType ...
    method getIdToParquetFieldMap (line 610) | private Map<Integer, List<Type>> getIdToParquetFieldMap(GroupType type) {
    method getCaseSensitiveParquetFieldMap (line 616) | private Map<String, List<Type>> getCaseSensitiveParquetFieldMap(GroupT...
    method getCaseInsensitiveParquetFieldMap (line 620) | private Map<String, List<Type>> getCaseInsensitiveParquetFieldMap(Grou...
    method buildCaseSensitiveNameMap (line 625) | private Map<String, Type> buildCaseSensitiveNameMap(List<Type> types) {
    method buildCaseInsensitiveNameMap (line 629) | private Map<String, Type> buildCaseInsensitiveNameMap(List<Type> types) {
    method getMatchingParquetFieldById (line 634) | private Type getMatchingParquetFieldById(
    method getMatchingNameById (line 664) | private String getMatchingNameById(
    method getSparkSchemaByFieldId (line 682) | private StructType getSparkSchemaByFieldId(
    method isPrimitiveCatalystType (line 714) | private static boolean isPrimitiveCatalystType(DataType dataType) {
    method getSparkTypeByFieldId (line 720) | private DataType getSparkTypeByFieldId(
    method getSparkArrayTypeByFieldId (line 762) | private DataType getSparkArrayTypeByFieldId(
    method checkParquetType (line 793) | private void checkParquetType(ParquetColumn column) throws IOException {
    method getColumnIndexFromParquetColumn (line 825) | private int getColumnIndexFromParquetColumn(ParquetColumn column) {
    method checkColumn (line 849) | private void checkColumn(ParquetColumn column) throws IOException {
    method containsPath (line 877) | private boolean containsPath(Type parquetType, String[] path) {
    method containsPath (line 881) | private boolean containsPath(Type parquetType, String[] path, int dept...
    method setSparkSchema (line 893) | public void setSparkSchema(StructType schema) {
    method getColumnReaders (line 897) | public AbstractColumnReader[] getColumnReaders() {
    method initialize (line 901) | @Override
    method nextKeyValue (line 907) | @Override
    method getCurrentKey (line 912) | @Override
    method getCurrentValue (line 917) | @Override
    method getProgress (line 922) | @Override
    method currentBatch (line 932) | public ColumnarBatch currentBatch() {
    method nextBatch (line 941) | public boolean nextBatch() throws IOException {
    method close (line 987) | @Override
    method loadNextBatch (line 1007) | @SuppressWarnings("deprecation")
    method getTaskAccumulator (line 1067) | @SuppressWarnings("unchecked")
    method serializeArrowSchema (line 1087) | private byte[] serializeArrowSchema(Schema schema) throws IOException {

FILE: common/src/main/java/org/apache/comet/parquet/NativeColumnReader.java
  class NativeColumnReader (line 39) | public class NativeColumnReader extends AbstractColumnReader {
    method NativeColumnReader (line 74) | NativeColumnReader(
    method initNative (line 95) | @Override
    method readBatch (line 103) | @Override
    method currentBatch (line 111) | @Override
    method close (line 116) | @Override
    method loadVector (line 126) | public CometDecodedVector loadVector() {

FILE: common/src/main/java/org/apache/comet/parquet/ParquetColumnSpec.java
  class ParquetColumnSpec (line 32) | @IcebergApi
    method ParquetColumnSpec (line 47) | @IcebergApi
    method getFieldId (line 69) | @IcebergApi
    method getPath (line 74) | @IcebergApi
    method getPhysicalType (line 79) | @IcebergApi
    method getTypeLength (line 84) | @IcebergApi
    method isRepeated (line 89) | public boolean isRepeated() {
    method getMaxRepetitionLevel (line 93) | @IcebergApi
    method getMaxDefinitionLevel (line 98) | @IcebergApi
    method getLogicalTypeName (line 103) | @IcebergApi
    method getLogicalTypeParams (line 108) | @IcebergApi

FILE: common/src/main/java/org/apache/comet/parquet/ParquetMetadataSerializer.java
  class ParquetMetadataSerializer (line 36) | public class ParquetMetadataSerializer {
    method ParquetMetadataSerializer (line 40) | public ParquetMetadataSerializer() {
    method ParquetMetadataSerializer (line 44) | public ParquetMetadataSerializer(ParquetMetadataConverter converter) {
    method serialize (line 55) | public byte[] serialize(ParquetMetadata metadata) throws IOException {
    method deserialize (line 69) | public ParquetMetadata deserialize(byte[] bytes) throws IOException {

FILE: common/src/main/java/org/apache/comet/parquet/ReadOptions.java
  class ReadOptions (line 37) | @IcebergApi
    method ReadOptions (line 58) | ReadOptions(
    method isParallelIOEnabled (line 71) | public boolean isParallelIOEnabled() {
    method parallelIOThreadPoolSize (line 75) | public int parallelIOThreadPoolSize() {
    method isIOMergeRangesEnabled (line 79) | public boolean isIOMergeRangesEnabled() {
    method getIOMergeRangesDelta (line 83) | public int getIOMergeRangesDelta() {
    method adjustReadRangesSkew (line 87) | public boolean adjustReadRangesSkew() {
    method builder (line 91) | @IcebergApi
    class Builder (line 96) | @IcebergApi
      method enableParallelIO (line 110) | public Builder enableParallelIO(boolean b) {
      method withParallelIOThreadPoolSize (line 121) | public Builder withParallelIOThreadPoolSize(int numThreads) {
      method enableIOMergeRanges (line 126) | public Builder enableIOMergeRanges(boolean enableIOMergeRanges) {
      method withIOMergeRangesDelta (line 131) | public Builder withIOMergeRangesDelta(int ioMergeRangesDelta) {
      method adjustReadRangeSkew (line 136) | public Builder adjustReadRangeSkew(boolean adjustReadRangeSkew) {
      method build (line 141) | @IcebergApi
      method Builder (line 151) | @IcebergApi
      method setS3Config (line 180) | private void setS3Config() {
      method setS3ConfIfGreater (line 197) | private void setS3ConfIfGreater(Configuration conf, String key, int ...
      method setS3ConfIfGreater (line 209) | private void setS3ConfIfGreater(Configuration conf, String key, long...

FILE: common/src/main/java/org/apache/comet/parquet/RowGroupFilter.java
  class RowGroupFilter (line 36) | public class RowGroupFilter implements Visitor<List<BlockMetaData>> {
    type FilterLevel (line 42) | public enum FilterLevel {
    method filterRowGroups (line 48) | public static List<BlockMetaData> filterRowGroups(
    method filterRowGroups (line 53) | public static List<BlockMetaData> filterRowGroups(
    method RowGroupFilter (line 58) | private RowGroupFilter(List<FilterLevel> levels, List<BlockMetaData> b...
    method RowGroupFilter (line 65) | private RowGroupFilter(List<FilterLevel> levels, List<BlockMetaData> b...
    method visit (line 72) | @Override
    method visit (line 115) | @Override
    method visit (line 121) | @Override

FILE: common/src/main/java/org/apache/comet/parquet/RowGroupReader.java
  class RowGroupReader (line 34) | @IcebergApi
    method RowGroupReader (line 41) | public RowGroupReader(long rowCount, long rowIndexOffset) {
    method RowGroupReader (line 47) | RowGroupReader(RowRanges rowRanges) {
    method getRowCount (line 53) | @IcebergApi
    method getPageReader (line 59) | @Override
    method getPageReader (line 64) | public PageReader getPageReader(String[] path) {
    method getRowIndexes (line 73) | @Override
    method getRowIndexOffset (line 78) | @Override
    method addColumn (line 83) | void addColumn(ColumnDescriptor path, ColumnPageReader reader) {

FILE: common/src/main/java/org/apache/comet/parquet/TypeUtil.java
  class TypeUtil (line 37) | public class TypeUtil {
    method convertToParquet (line 44) | @IcebergApi
    method convertToParquetSpec (line 115) | public static ParquetColumnSpec convertToParquetSpec(StructField field) {
    method checkParquetType (line 129) | public static void checkParquetType(ColumnDescriptor descriptor, DataT...
    method validateTimestampType (line 244) | private static void validateTimestampType(
    method convertErrorForTimestampNTZ (line 256) | private static void convertErrorForTimestampNTZ(String parquetType) {
    method canReadAsIntDecimal (line 264) | private static boolean canReadAsIntDecimal(ColumnDescriptor descriptor...
    method canReadAsLongDecimal (line 270) | private static boolean canReadAsLongDecimal(ColumnDescriptor descripto...
    method canReadAsBinaryDecimal (line 276) | private static boolean canReadAsBinaryDecimal(ColumnDescriptor descrip...
    method isLongDecimal (line 281) | private static boolean isLongDecimal(DataType dt) {
    method isDecimalTypeMatched (line 289) | private static boolean isDecimalTypeMatched(ColumnDescriptor descripto...
    method isTimestampTypeMatched (line 323) | private static boolean isTimestampTypeMatched(
    method isUnsignedIntTypeMatched (line 329) | private static boolean isUnsignedIntTypeMatched(
    method isSpark40Plus (line 336) | static boolean isSpark40Plus() {

FILE: common/src/main/java/org/apache/comet/parquet/Utils.java
  class Utils (line 35) | public class Utils {
    method getColumnReader (line 38) | @IcebergApi
    method getColumnReader (line 64) | @IcebergApi
    method getColumnReader (line 77) | public static ColumnReader getColumnReader(
    method supportLazyMaterialization (line 94) | private static boolean supportLazyMaterialization(DataType type) {
    method initColumnReader (line 110) | public static long initColumnReader(
    class TypePromotionInfo (line 179) | static class TypePromotionInfo {
      method TypePromotionInfo (line 190) | TypePromotionInfo(int physicalTypeId, int precision, int scale, int ...
      method TypePromotionInfo (line 197) | TypePromotionInfo(DataType sparkReadType) {
    method getPhysicalTypeId (line 233) | static int getPhysicalTypeId(PrimitiveType.PrimitiveTypeName typeName) {
    method getLogicalTypeId (line 262) | static int getLogicalTypeId(LogicalTypeAnnotation annotation) {
    method getTimeUnitId (line 284) | static int getTimeUnitId(LogicalTypeAnnotation.TimeUnit tu) {
    method buildColumnDescriptor (line 297) | @IcebergApi
    method reconstructLogicalType (line 343) | private static LogicalTypeAnnotation reconstructLogicalType(
    method descriptorToParquetColumnSpec (line 463) | @IcebergApi

FILE: common/src/main/java/org/apache/comet/parquet/WrappedInputFile.java
  class WrappedInputFile (line 35) | @IcebergApi
    method WrappedInputFile (line 39) | @IcebergApi
    method getLength (line 44) | @Override
    method newStream (line 55) | @Override
    method toString (line 67) | @Override

FILE: common/src/main/java/org/apache/comet/parquet/WrappedSeekableInputStream.java
  class WrappedSeekableInputStream (line 34) | public class WrappedSeekableInputStream extends DelegatingSeekableInputS...
    method WrappedSeekableInputStream (line 38) | public WrappedSeekableInputStream(InputStream inputStream) {
    method getPos (line 43) | @Override
    method seek (line 54) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometDecodedVector.java
  class CometDecodedVector (line 29) | public abstract class CometDecodedVector extends CometVector {
    method CometDecodedVector (line 43) | protected CometDecodedVector(ValueVector vector, Field valueField, boo...
    method CometDecodedVector (line 47) | protected CometDecodedVector(
    method getValueVector (line 57) | @Override
    method setNumNulls (line 62) | @Override
    method setNumValues (line 71) | @Override
    method numValues (line 87) | public int numValues() {
    method hasNull (line 91) | @Override
    method numNulls (line 96) | @Override
    method isNullAt (line 101) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometDelegateVector.java
  class CometDelegateVector (line 32) | public class CometDelegateVector extends CometVector {
    method CometDelegateVector (line 35) | public CometDelegateVector(DataType dataType) {
    method CometDelegateVector (line 39) | public CometDelegateVector(DataType dataType, boolean useDecimal128) {
    method CometDelegateVector (line 43) | public CometDelegateVector(DataType dataType, CometVector delegate, bo...
    method setDelegate (line 51) | protected void setDelegate(CometVector delegate) {
    method setNumNulls (line 55) | @Override
    method setNumValues (line 60) | @Override
    method numValues (line 65) | @Override
    method hasNull (line 70) | @Override
    method numNulls (line 75) | @Override
    method isNullAt (line 80) | @Override
    method getBoolean (line 85) | @Override
    method getByte (line 90) | @Override
    method getShort (line 95) | @Override
    method getInt (line 100) | @Override
    method getLong (line 105) | @Override
    method getLongDecimal (line 110) | @Override
    method getFloat (line 115) | @Override
    method getDouble (line 120) | @Override
    method getDecimal (line 125) | @Override
    method getBinaryDecimal (line 130) | @Override
    method getUTF8String (line 135) | @Override
    method getBinary (line 140) | @Override
    method getArray (line 145) | @Override
    method getMap (line 150) | @Override
    method getChild (line 155) | @Override
    method getValueVector (line 160) | @Override
    method slice (line 165) | @Override
    method getDictionaryProvider (line 170) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometDictionary.java
  class CometDictionary (line 26) | public class CometDictionary implements AutoCloseable {
    method CometDictionary (line 35) | public CometDictionary(CometPlainVector values) {
    method setDictionaryVector (line 40) | public void setDictionaryVector(CometPlainVector values) {
    method getValueVector (line 47) | public ValueVector getValueVector() {
    method decodeToBoolean (line 51) | public boolean decodeToBoolean(int index) {
    method decodeToByte (line 55) | public byte decodeToByte(int index) {
    method decodeToShort (line 59) | public short decodeToShort(int index) {
    method decodeToInt (line 63) | public int decodeToInt(int index) {
    method decodeToLong (line 67) | public long decodeToLong(int index) {
    method decodeToLongDecimal (line 71) | public long decodeToLongDecimal(int index) {
    method decodeToFloat (line 75) | public float decodeToFloat(int index) {
    method decodeToDouble (line 79) | public double decodeToDouble(int index) {
    method decodeToBinary (line 83) | public byte[] decodeToBinary(int index) {
    method decodeToUTF8String (line 109) | public UTF8String decodeToUTF8String(int index) {
    method close (line 113) | @Override
    class ByteArrayWrapper (line 118) | private static class ByteArrayWrapper {
      method ByteArrayWrapper (line 121) | ByteArrayWrapper(byte[] bytes) {

FILE: common/src/main/java/org/apache/comet/vector/CometDictionaryVector.java
  class CometDictionaryVector (line 29) | public class CometDictionaryVector extends CometDecodedVector {
    method CometDictionaryVector (line 37) | public CometDictionaryVector(
    method CometDictionaryVector (line 45) | public CometDictionaryVector(
    method getDictionaryProvider (line 61) | @Override
    method close (line 66) | @Override
    method getBoolean (line 75) | @Override
    method getByte (line 80) | @Override
    method getShort (line 85) | @Override
    method getInt (line 90) | @Override
    method getLong (line 95) | @Override
    method getLongDecimal (line 100) | @Override
    method getFloat (line 105) | @Override
    method getDouble (line 110) | @Override
    method getUTF8String (line 115) | @Override
    method getBinary (line 120) | @Override
    method getBinaryDecimal (line 125) | @Override
    method slice (line 130) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometLazyVector.java
  class CometLazyVector (line 27) | public class CometLazyVector extends CometDelegateVector {
    method CometLazyVector (line 30) | public CometLazyVector(DataType type, LazyColumnReader columnReader, b...
    method getDecodedVector (line 35) | public CometDecodedVector getDecodedVector() {
    method getValueVector (line 39) | @Override
    method setNumNulls (line 46) | @Override
    method setNumValues (line 51) | @Override
    method close (line 56) | @Override
    method hasNull (line 61) | @Override
    method numNulls (line 68) | @Override
    method isNullAt (line 75) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometListVector.java
  class CometListVector (line 30) | public class CometListVector extends CometDecodedVector {
    method CometListVector (line 36) | public CometListVector(
    method getArray (line 46) | @Override
    method slice (line 55) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometMapVector.java
  class CometMapVector (line 31) | public class CometMapVector extends CometDecodedVector {
    method CometMapVector (line 40) | public CometMapVector(
    method getMap (line 66) | @Override
    method slice (line 75) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometPlainVector.java
  class CometPlainVector (line 34) | public class CometPlainVector extends CometDecodedVector {
    method CometPlainVector (line 43) | public CometPlainVector(ValueVector vector, boolean useDecimal128) {
    method CometPlainVector (line 47) | public CometPlainVector(ValueVector vector, boolean useDecimal128, boo...
    method CometPlainVector (line 51) | public CometPlainVector(
    method isReused (line 65) | public boolean isReused() {
    method setReused (line 69) | public void setReused(boolean isReused) {
    method setNumNulls (line 73) | @Override
    method getBoolean (line 79) | @Override
    method getByte (line 89) | @Override
    method getShort (line 94) | @Override
    method getInt (line 99) | @Override
    method getLong (line 104) | @Override
    method getLongDecimal (line 109) | @Override
    method getFloat (line 114) | @Override
    method getDouble (line 119) | @Override
    method getUTF8String (line 124) | @Override
    method getBinary (line 149) | @Override
    method getDictionaryProvider (line 172) | @Override
    method isNullAt (line 177) | @Override
    method slice (line 182) | @Override
    method convertToUuid (line 190) | private static UUID convertToUuid(byte[] buf) {

FILE: common/src/main/java/org/apache/comet/vector/CometSelectionVector.java
  class CometSelectionVector (line 42) | public class CometSelectionVector extends CometVector {
    method CometSelectionVector (line 73) | public CometSelectionVector(CometVector values, int[] indices, int num...
    method getValuesIndex (line 111) | private int getValuesIndex(int selectionIndex) {
    method getValues (line 126) | public CometVector getValues() {
    method getIndices (line 135) | public CometVector getIndices() {
    method getSelectedIndices (line 144) | private int[] getSelectedIndices() {
    method numValues (line 148) | @Override
    method setNumValues (line 153) | @Override
    method setNumNulls (line 160) | @Override
    method hasNull (line 167) | @Override
    method numNulls (line 172) | @Override
    method isNullAt (line 178) | @Override
    method getBoolean (line 183) | @Override
    method getByte (line 188) | @Override
    method getShort (line 193) | @Override
    method getInt (line 198) | @Override
    method getLong (line 203) | @Override
    method getLongDecimal (line 208) | @Override
    method getFloat (line 213) | @Override
    method getDouble (line 218) | @Override
    method getUTF8String (line 223) | @Override
    method getBinary (line 228) | @Override
    method getArray (line 233) | @Override
    method getMap (line 238) | @Override
    method getChild (line 243) | @Override
    method getDictionaryProvider (line 249) | @Override
    method slice (line 254) | @Override
    method getValueVector (line 268) | @Override
    method close (line 273) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometStructVector.java
  class CometStructVector (line 32) | public class CometStructVector extends CometDecodedVector {
    method CometStructVector (line 36) | public CometStructVector(
    method getChild (line 53) | @Override
    method slice (line 58) | @Override

FILE: common/src/main/java/org/apache/comet/vector/CometVector.java
  class CometVector (line 45) | @IcebergApi
    method CometVector (line 64) | @IcebergApi
    method setNumNulls (line 74) | @IcebergApi
    method setNumValues (line 81) | @IcebergApi
    method numValues (line 85) | @IcebergApi
    method isFixedLength (line 89) | public boolean isFixedLength() {
    method getDecimal (line 93) | @Override
    method createDecimal (line 109) | private Decimal createDecimal(long unscaled, int precision, int scale) {
    method createDecimal (line 118) | private Decimal createDecimal(BigDecimal value, int precision, int sca...
    method getBinaryDecimal (line 130) | byte[] getBinaryDecimal(int i) {
    method copyBinaryDecimal (line 135) | public byte[] copyBinaryDecimal(int i, byte[] dest) {
    method getBoolean (line 152) | @Override
    method getByte (line 157) | @Override
    method getShort (line 162) | @Override
    method getInt (line 167) | @Override
    method getLong (line 172) | @Override
    method getLongDecimal (line 177) | public long getLongDecimal(int rowId) {
    method getFloat (line 181) | @Override
    method getDouble (line 186) | @Override
    method getUTF8String (line 191) | @Override
    method getBinary (line 196) | @Override
    method getArray (line 201) | @Override
    method getMap (line 206) | @Override
    method getChild (line 211) | @Override
    method close (line 216) | @Override
    method getDictionaryProvider (line 221) | public DictionaryProvider getDictionaryProvider() {
    method getValueVector (line 225) | @IcebergApi
    method slice (line 235) | @IcebergApi
    method getVector (line 245) | public static CometVector getVector(
    method getVector (line 271) | protected static CometVector getVector(ValueVector vector, boolean use...
    method notImplementedException (line 275) | private UnsupportedOperationException notImplementedException() {

FILE: common/src/test/java/org/apache/comet/parquet/TestColumnReader.java
  class TestColumnReader (line 36) | public class TestColumnReader {
    method testIsFixedLength (line 37) | @Test

FILE: common/src/test/java/org/apache/comet/parquet/TestCometInputFile.java
  class TestCometInputFile (line 25) | public class TestCometInputFile {
    method testIsAtLeastHadoop33 (line 26) | @Test

FILE: common/src/test/java/org/apache/comet/parquet/TestFileReader.java
  class TestFileReader (line 79) | @SuppressWarnings("deprecation")
    method testEnableReadParallel (line 130) | @Test
    method testReadWrite (line 144) | @Test
    method testBloomFilterReadWrite (line 241) | @Test
    method testReadWriteDataPageV2 (line 287) | @Test
    method testColumnIndexFilter (line 395) | @Test
    method testColumnIndexReadWrite (line 488) | @Test
    method testWriteReadMergeScanRange (line 621) | @Test
    method addBloomFilter (line 634) | private void addBloomFilter(ParquetFileWriter w, String s, BloomFilter...
    method validateContains (line 642) | private void validateContains(PageReadStore pages, String[] path, int ...
    method validatePage (line 649) | private void validatePage(DataPage page, int values, BytesInput bytes)...
    method validateV2Page (line 654) | private void validateV2Page(
    method createStatistics (line 676) | private Statistics<?> createStatistics(String min, String max, ColumnD...
    method assertStatsValuesEqual (line 684) | public static void assertStatsValuesEqual(Statistics<?> expected, Stat...
    method statsC1 (line 696) | private Statistics<?> statsC1(Binary... values) {
    method generateAndWriteData (line 714) | private HashMap<String, byte[][]> generateAndWriteData(
    method readAndValidatePageData (line 754) | private void readAndValidatePageData(
    method testReadWrite (line 776) | public void testReadWrite(Configuration configuration, int numPages, i...
    class Generator (line 792) | static class Generator {
      method getString (line 799) | private byte[] getString(int minSize, int maxSize) {
      method generateValues (line 808) | private byte[] generateValues(int numValues, String type) throws IOE...
    method statsC2 (line 824) | private Statistics<?> statsC2(Long... values) {

FILE: common/src/test/java/org/apache/comet/parquet/TestUtils.java
  class TestUtils (line 34) | public class TestUtils {
    method testBuildColumnDescriptorWithTimestamp (line 36) | @Test
    method testBuildColumnDescriptorWithDecimal (line 70) | @Test
    method testBuildColumnDescriptorWithIntLogicalType (line 100) | @Test
    method testBuildColumnDescriptorWithStringLogicalType (line 128) | @Test

FILE: dev/ci/check-suites.py
  function file_to_class_name (line 21) | def file_to_class_name(path: Path) -> str | None:

FILE: dev/release/generate-changelog.py
  function print_pulls (line 25) | def print_pulls(repo_name, title, pulls):
  function generate_changelog (line 35) | def generate_changelog(repo, repo_name, tag1, tag2, version):
  function resolve_ref (line 145) | def resolve_ref(ref):
  function cli (line 157) | def cli(args=None):

FILE: docs/generate-versions.py
  function get_major_minor_version (line 29) | def get_major_minor_version(version: str):
  function get_version_from_pom (line 33) | def get_version_from_pom():
  function replace_in_files (line 46) | def replace_in_files(root: str, filename_pattern: str, search: str, repl...
  function insert_warning_after_asf_header (line 55) | def insert_warning_after_asf_header(root: str, warning: str):
  function get_user_guide_dir (line 69) | def get_user_guide_dir(major_minor: str):
  function publish_released_version (line 75) | def publish_released_version(version: str):
  function generate_docs (line 85) | def generate_docs(snapshot_version: str, latest_released_version: str, p...

FILE: native/common/src/bin/analyze_trace.rs
  type TraceEvent (line 32) | struct TraceEvent {
  type MemorySnapshot (line 43) | struct MemorySnapshot {
  function format_bytes (line 49) | fn format_bytes(bytes: u64) -> String {
  function main (line 54) | fn main() {

FILE: native/common/src/error.rs
  type SparkError (line 23) | pub enum SparkError {
    method to_json (line 199) | pub fn to_json(&self) -> String {
    method error_type_name (line 220) | pub(crate) fn error_type_name(&self) -> &'static str {
    method params_as_json (line 269) | pub(crate) fn params_as_json(&self) -> serde_json::Value {
    method exception_class (line 489) | pub fn exception_class(&self) -> &'static str {
    method error_class (line 554) | pub(crate) fn error_class(&self) -> Option<&'static str> {
    method from (line 730) | fn from(value: ArrowError) -> Self {
  type SparkResult (line 633) | pub type SparkResult<T> = Result<T, SparkError>;
  function decimal_overflow_error (line 636) | pub fn decimal_overflow_error(value: i128, precision: u8, scale: i8) -> ...
  type SparkErrorWithContext (line 648) | pub struct SparkErrorWithContext {
    method new (line 657) | pub fn new(error: SparkError) -> Self {
    method with_context (line 665) | pub fn with_context(error: SparkError, context: Arc<crate::QueryContex...
    method to_json (line 673) | pub fn to_json(&self) -> String {
    method fmt (line 706) | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
    method from (line 718) | fn from(error: SparkError) -> Self {
  method from (line 724) | fn from(value: SparkErrorWithContext) -> Self {
  method from (line 736) | fn from(value: SparkError) -> Self {
  function test_divide_by_zero_json (line 746) | fn test_divide_by_zero_json() {
  function test_remainder_by_zero_json (line 760) | fn test_remainder_by_zero_json() {
  function test_binary_overflow_json (line 769) | fn test_binary_overflow_json() {
  function test_invalid_array_index_json (line 789) | fn test_invalid_array_index_json() {
  function test_numeric_value_out_of_range_json (line 804) | fn test_numeric_value_out_of_range_json() {
  function test_cast_invalid_value_json (line 824) | fn test_cast_invalid_value_json() {
  function test_duplicated_map_key_json (line 841) | fn test_duplicated_map_key_json() {
  function test_null_map_key_json (line 854) | fn test_null_map_key_json() {
  function test_error_class_mapping (line 866) | fn test_error_class_mapping() {
  function test_exception_class_mapping (line 888) | fn test_exception_class_mapping() {

FILE: native/common/src/query_context.rs
  type QueryContext (line 32) | pub struct QueryContext {
    method new (line 63) | pub fn new(
    method char_index_to_byte_offset (line 85) | fn char_index_to_byte_offset(&self, char_index: usize) -> Option<usize> {
    method format_summary (line 101) | pub fn format_summary(&self) -> String {
    method fragment (line 159) | fn fragment(&self) -> String {
  type QueryContextMap (line 187) | pub struct QueryContextMap {
    method new (line 193) | pub fn new() -> Self {
    method register (line 206) | pub fn register(&self, expr_id: u64, context: QueryContext) {
    method get (line 217) | pub fn get(&self, expr_id: u64) -> Option<Arc<QueryContext>> {
    method clear (line 225) | pub fn clear(&self) {
    method len (line 231) | pub fn len(&self) -> usize {
    method is_empty (line 237) | pub fn is_empty(&self) -> bool {
  method default (line 243) | fn default() -> Self {
  function create_query_context_map (line 252) | pub fn create_query_context_map() -> Arc<QueryContextMap> {
  function test_query_context_creation (line 261) | fn test_query_context_creation() {
  function test_query_context_serialization (line 282) | fn test_query_context_serialization() {
  function test_format_summary (line 300) | fn test_format_summary() {
  function test_format_summary_without_object (line 319) | fn test_format_summary_without_object() {
  function test_fragment (line 329) | fn test_fragment() {
  function test_arc_string_sharing (line 336) | fn test_arc_string_sharing() {
  function test_json_with_optional_fields (line 346) | fn test_json_with_optional_fields() {
  function test_map_register_and_get (line 357) | fn test_map_register_and_get() {
  function test_map_get_nonexistent (line 370) | fn test_map_get_nonexistent() {
  function test_map_clear (line 376) | fn test_map_clear() {
  function test_fragment_non_ascii_accented (line 393) | fn test_fragment_non_ascii_accented() {

FILE: native/common/src/tracing.rs
  type Recorder (line 27) | pub struct Recorder {
    method new (line 39) | pub fn new() -> Self {
    method begin_task (line 58) | pub fn begin_task(&self, name: &str) {
    method end_task (line 62) | pub fn end_task(&self, name: &str) {
    method log_memory_usage (line 66) | pub fn log_memory_usage(&self, name: &str, usage_bytes: u64) {
    method log_event (line 78) | fn log_event(&self, name: &str, ph: &str) {
  method default (line 33) | fn default() -> Self {
  function get_thread_id (line 92) | pub fn get_thread_id() -> u64 {
  function trace_begin (line 101) | pub fn trace_begin(name: &str) {
  function trace_end (line 105) | pub fn trace_end(name: &str) {
  function log_memory_usage (line 109) | pub fn log_memory_usage(name: &str, value: u64) {
  function with_trace (line 113) | pub fn with_trace<T, F>(label: &str, tracing_enabled: bool, f: F) -> T
  function with_trace_async (line 130) | pub async fn with_trace_async<F, Fut, T>(label: &str, tracing_enabled: b...

FILE: native/common/src/utils.rs
  function bytes_to_i128 (line 20) | pub fn bytes_to_i128(slice: &[u8]) -> i128 {

FILE: native/core/benches/array_element_append.rs
  constant NUM_ELEMENTS (line 31) | const NUM_ELEMENTS: usize = 10000;
  function create_spark_unsafe_array_i32 (line 38) | fn create_spark_unsafe_array_i32(num_elements: usize, with_nulls: bool) ...
  function create_spark_unsafe_array_i64 (line 73) | fn create_spark_unsafe_array_i64(num_elements: usize, with_nulls: bool) ...
  function create_spark_unsafe_array_f64 (line 107) | fn create_spark_unsafe_array_f64(num_elements: usize, with_nulls: bool) ...
  function benchmark_array_conversion (line 140) | fn benchmark_array_conversion(c: &mut Criterion) {
  function config (line 263) | fn config() -> Criterion {

FILE: native/core/benches/bit_util.rs
  function criterion_benchmark (line 34) | fn criterion_benchmark(c: &mut Criterion) {
  function bench_get_vlq_int (line 199) | fn bench_get_vlq_int(reader: &mut BitReader) {
  function config (line 205) | fn config() -> Criterion {

FILE: native/core/benches/common.rs
  function seedable_rng (line 31) | pub fn seedable_rng() -> StdRng {
  function create_int64_array (line 35) | pub fn create_int64_array(size: usize, null_density: f32, min: i64, max:...
  function create_primitive_array (line 49) | pub fn create_primitive_array<T>(size: usize, null_density: f32) -> Prim...
  function create_dictionary_array (line 69) | pub fn create_dictionary_array<T>(

FILE: native/core/benches/parquet_decode.rs
  function criterion_benchmark (line 22) | fn criterion_benchmark(c: &mut Criterion) {
  function config (line 47) | fn config() -> Criterion {

FILE: native/core/benches/parquet_read.rs
  function bench (line 41) | fn bench(c: &mut Criterion) {
  function profiled (line 75) | fn profiled() -> Criterion {
  function build_test_schema (line 86) | fn build_test_schema() -> SchemaDescPtr {
  function seedable_rng (line 99) | fn seedable_rng() -> StdRng {
  constant NUM_PAGES (line 104) | const NUM_PAGES: usize = 1000;
  constant VALUES_PER_PAGE (line 105) | const VALUES_PER_PAGE: usize = 10_000;
  constant BATCH_SIZE (line 106) | const BATCH_SIZE: usize = 4096;
  function build_plain_int32_pages (line 108) | fn build_plain_int32_pages(
  type TestColumnReader (line 147) | struct TestColumnReader {
    method new (line 157) | pub fn new(
    method load_page (line 176) | fn load_page(&mut self) {
  type Item (line 186) | type Item = ArrayData;
  method next (line 188) | fn next(&mut self) -> Option<Self::Item> {

FILE: native/core/benches/perf.rs
  type FlamegraphProfiler (line 28) | pub struct FlamegraphProfiler<'a> {
  function new (line 34) | pub fn new(frequency: c_int) -> Self {
  method start_profiling (line 43) | fn start_profiling(&mut self, _benchmark_id: &str, _benchmark_dir: &Path) {
  method stop_profiling (line 47) | fn stop_profiling(&mut self, _benchmark_id: &str, benchmark_dir: &Path) {

FILE: native/core/src/common/bit.rs
  function from_ne_slice (line 28) | pub fn from_ne_slice<T: FromBytes>(bs: &[u8]) -> T {
  type FromBytes (line 38) | pub trait FromBytes: Sized {
    method from_le_bytes (line 40) | fn from_le_bytes(bs: Self::Buffer) -> Self;
    method from_be_bytes (line 41) | fn from_be_bytes(bs: Self::Buffer) -> Self;
    method from_ne_bytes (line 42) | fn from_ne_bytes(bs: Self::Buffer) -> Self;
    method from (line 43) | fn from(v: u64) -> Self;
    type Buffer (line 69) | type Buffer = [u8; 1];
    method from_le_bytes (line 70) | fn from_le_bytes(bs: Self::Buffer) -> Self {
    method from_be_bytes (line 73) | fn from_be_bytes(bs: Self::Buffer) -> Self {
    method from_ne_bytes (line 76) | fn from_ne_bytes(bs: Self::Buffer) -> Self {
    method from (line 83) | fn from(v: u64) -> Self {
  function read_num_bytes_u64 (line 109) | pub fn read_num_bytes_u64(size: usize, src: &[u8]) -> u64 {
  function read_num_bytes_u32 (line 122) | pub fn read_num_bytes_u32(size: usize, src: &[u8]) -> u32 {
  function read_u64 (line 133) | pub fn read_u64(src: &[u8]) -> u64 {
  function read_u32 (line 139) | pub fn read_u32(src: &[u8]) -> u32 {
  function memcpy (line 145) | pub fn memcpy(source: &[u8], target: &mut [u8]) {
  function memcpy_value (line 153) | pub fn memcpy_value<T>(source: &T, num_bytes: usize, target: &mut [u8])
  function log2 (line 168) | pub fn log2(mut x: u64) -> u32 {
  function trailing_bits (line 178) | pub fn trailing_bits(v: u64, num_bits: usize) -> u64 {
  function set_bit (line 189) | pub fn set_bit(bits: &mut [u8], i: usize) {
  function set_bit_raw (line 198) | pub unsafe fn set_bit_raw(bits: *mut u8, i: usize) {
  function unset_bit (line 203) | pub fn unset_bit(bits: &mut [u8], i: usize) {
  function set_bits (line 208) | pub fn set_bits(bits: &mut [u8], offset: usize, length: usize) {
  function mix_hash (line 239) | pub fn mix_hash(lower: u64, upper: u64) -> u64 {
  function get_bit (line 248) | pub fn get_bit(data: &[u8], i: usize) -> bool {
  function get_bit_raw (line 257) | pub unsafe fn get_bit_raw(ptr: *const u8, i: usize) -> bool {
  type BitWriter (line 263) | pub struct BitWriter {
    method new (line 273) | pub fn new(max_bytes: usize) -> Self {
    method new_from_buf (line 286) | pub fn new_from_buf(buffer: Vec<u8>, start: usize) -> Self {
    method extend (line 301) | pub fn extend(&mut self, increment: usize) {
    method capacity (line 309) | pub fn capacity(&mut self) -> usize {
    method consume (line 315) | pub fn consume(mut self) -> Vec<u8> {
    method flush_buffer (line 324) | pub fn flush_buffer(&mut self) -> &[u8] {
    method clear (line 331) | pub fn clear(&mut self) {
    method flush (line 339) | pub fn flush(&mut self) {
    method skip (line 360) | pub fn skip(&mut self, num_bytes: usize) -> Result<usize> {
    method get_next_byte_ptr (line 380) | pub fn get_next_byte_ptr(&mut self, num_bytes: usize) -> Result<&mut [...
    method bytes_written (line 386) | pub fn bytes_written(&self) -> usize {
    method buffer (line 391) | pub fn buffer(&self) -> &[u8] {
    method byte_offset (line 396) | pub fn byte_offset(&self) -> usize {
    method buffer_len (line 404) | pub fn buffer_len(&self) -> usize {
    method write_at (line 409) | pub fn write_at(&mut self, offset: usize, value: u8) {
    method put_value (line 419) | pub fn put_value(&mut self, v: u64, num_bits: usize) -> bool {
    method put_aligned (line 454) | pub fn put_aligned<T: AsBytes>(&mut self, val: T, num_bytes: usize) ->...
    method put_aligned_offset (line 474) | pub fn put_aligned_offset<T: AsBytes>(
    method put_vlq_int (line 495) | pub fn put_vlq_int(&mut self, mut v: u64) -> bool {
    method put_zigzag_vlq_int (line 512) | pub fn put_zigzag_vlq_int(&mut self, v: i64) -> bool {
  constant MAX_VLQ_BYTE_LEN (line 520) | pub const MAX_VLQ_BYTE_LEN: usize = 10;
  type BitReader (line 522) | pub struct BitReader {
    method new (line 549) | pub fn new(buf: Buffer, len: usize) -> Self {
    method new_all (line 564) | pub fn new_all(buf: Buffer) -> Self {
    method reset (line 569) | pub fn reset(&mut self, buf: Buffer) {
    method get_byte_offset (line 583) | pub fn get_byte_offset(&self) -> usize {
    method get_value (line 590) | pub fn get_value<T: FromBytes>(&mut self, num_bits: usize) -> Option<T> {
    method get_u32_value (line 613) | pub fn get_u32_value(&mut self, num_bits: usize) -> u32 {
    method get_u64_value (line 618) | fn get_u64_value(&mut self, num_bits: usize) -> u64 {
    method get_bits (line 645) | pub fn get_bits(&mut self, dst: &mut [u8], offset: usize, num_bits: us...
    method get_bits_buffered (line 693) | fn get_bits_buffered(&mut self, dst: &mut [u8], offset: usize, num_bit...
    method skip_bits (line 728) | pub fn skip_bits(&mut self, num_bits: usize) -> usize {
    method get_u32_batch (line 779) | pub unsafe fn get_u32_batch(&mut self, mut dst: *mut u32, total: usize...
    method get_batch (line 808) | pub fn get_batch<T: FromBytes>(&mut self, batch: &mut [T], num_bits: u...
    method get_aligned (line 889) | pub fn get_aligned<T: FromBytes>(&mut self, num_bytes: usize) -> Optio...
    method get_vlq_int (line 932) | pub fn get_vlq_int(&mut self) -> Option<i64> {
    method get_zigzag_vlq_int (line 959) | pub fn get_zigzag_vlq_int(&mut self) -> Option<i64> {
    method reload_buffer_values (line 966) | fn reload_buffer_values(&mut self) {
    method from (line 978) | fn from(vec: Vec<u8>) -> Self {
  function round_upto_power_of_2 (line 988) | pub fn round_upto_power_of_2(num: usize, factor: usize) -> usize {
  function test_read_num_bytes_u64 (line 1005) | fn test_read_num_bytes_u64() {
  function test_read_u64 (line 1016) | fn test_read_u64() {
  function test_read_num_bytes_u32 (line 1022) | fn test_read_num_bytes_u32() {
  function test_read_u32 (line 1033) | fn test_read_u32() {
  function test_bit_reader_get_byte_offset (line 1039) | fn test_bit_reader_get_byte_offset() {
  function test_bit_reader_get_value (line 1054) | fn test_bit_reader_get_value() {
  function test_bit_reader_get_value_boundary (line 1064) | fn test_bit_reader_get_value_boundary() {
  function test_bit_reader_get_aligned (line 1074) | fn test_bit_reader_get_aligned() {
  function test_bit_reader_get_vlq_int (line 1086) | fn test_bit_reader_get_vlq_int() {
  function test_bit_reader_get_zigzag_vlq_int (line 1095) | fn test_bit_reader_get_zigzag_vlq_int() {
  function test_set_bit (line 1105) | fn test_set_bit() {
  function test_set_bits (line 1124) | fn test_set_bits() {
  function test_get_bit (line 1139) | fn test_get_bit() {
  function test_log2 (line 1166) | fn test_log2() {
  function test_skip (line 1180) | fn test_skip() {
  function test_get_next_byte_ptr (line 1194) | fn test_get_next_byte_ptr() {
  function test_consume_flush_buffer (line 1208) | fn test_consume_flush_buffer() {
  function test_put_get_bool (line 1221) | fn test_put_get_bool() {
  function test_put_value_roundtrip (line 1272) | fn test_put_value_roundtrip() {
  function test_put_value_rand_numbers (line 1285) | fn test_put_value_rand_numbers(total: usize, num_bits: usize) {
  function test_get_bits (line 1314) | fn test_get_bits() {
  function test_skip_bits (line 1364) | fn test_skip_bits() {
  function test_get_batch (line 1407) | fn test_get_batch() {
  function test_get_batch_helper (line 1420) | fn test_get_batch_helper<T>(total: usize, num_bits: usize)
  function test_get_u32_batch (line 1454) | fn test_get_u32_batch() {
  function test_put_aligned_roundtrip (line 1484) | fn test_put_aligned_roundtrip() {
  function test_put_aligned_rand_numbers (line 1495) | fn test_put_aligned_rand_numbers<T>(total: usize, num_bits: usize)
  function test_put_vlq_int (line 1554) | fn test_put_vlq_int() {
  function test_put_zigzag_vlq_int (line 1579) | fn test_put_zigzag_vlq_int() {

FILE: native/core/src/common/buffer.rs
  type CometBuffer (line 41) | pub struct CometBuffer {
    method new (line 59) | pub fn new(capacity: usize) -> Self {
    method capacity (line 75) | pub fn capacity(&self) -> usize {
    method len (line 80) | pub fn len(&self) -> usize {
    method is_empty (line 85) | pub fn is_empty(&self) -> bool {
    method as_slice (line 90) | pub fn as_slice(&self) -> &[u8] {
    method as_slice_mut (line 95) | pub fn as_slice_mut(&mut self) -> &mut [u8] {
    method as_ptr (line 103) | pub const fn as_ptr(&self) -> *const u8 {
    method as_mut_ptr (line 110) | pub fn as_mut_ptr(&mut self) -> *mut u8 {
    method to_arrow (line 132) | pub unsafe fn to_arrow(&self) -> Result<ArrowBuffer, ExecutionError> {
    method check_reference (line 146) | pub fn check_reference(&self) -> Result<(), ExecutionError> {
    method reset (line 157) | pub fn reset(&mut self) {
    method resize (line 167) | pub fn resize(&mut self, new_capacity: usize) {
    method reallocate (line 181) | unsafe fn reallocate(
    type Target (line 229) | type Target = [u8];
    method deref (line 231) | fn deref(&self) -> &[u8] {
    method deref_mut (line 237) | fn deref_mut(&mut self) -> &mut [u8] {
    method from_ptr (line 258) | pub fn from_ptr(ptr: *const u8, len: usize, capacity: usize) -> Self {
    method extend_from_slice (line 279) | pub fn extend_from_slice(&mut self, offset: usize, src: &[u8]) {
  constant ALIGNMENT (line 55) | const ALIGNMENT: usize = 64;
  method drop (line 201) | fn drop(&mut self) {
  method eq (line 214) | fn eq(&self, other: &CometBuffer) -> bool {
  type CometBufferAllocation (line 244) | struct CometBufferAllocation {}
    method new (line 247) | fn new() -> Self {
  function test_buffer_new (line 297) | fn test_buffer_new() {
  function test_resize (line 305) | fn test_resize() {
  function test_extend_from_slice (line 321) | fn test_extend_from_slice() {
  function test_to_arrow (line 335) | fn test_to_arrow() {
  function test_unowned (line 353) | fn test_unowned() {

FILE: native/core/src/execution/columnar_to_row.rs
  constant MAX_LONG_DIGITS (line 49) | const MAX_LONG_DIGITS: u8 = 18;
  function write_bytes_padded (line 142) | fn write_bytes_padded(buffer: &mut Vec<u8>, bytes: &[u8]) -> usize {
  type TypedArray (line 153) | enum TypedArray<'a> {
  function from_array (line 183) | fn from_array(array: &'a ArrayRef) -> CometResult<Self> {
  function is_null (line 255) | fn is_null(&self, row_idx: usize) -> bool {
  function get_fixed_value (line 287) | fn get_fixed_value(&self, row_idx: usize) -> i64 {
  function is_variable_length (line 309) | fn is_variable_length(&self) -> bool {
  function write_variable_to_buffer (line 327) | fn write_variable_to_buffer(&self, buffer: &mut Vec<u8>, row_idx: usize)...
  type TypedElements (line 373) | enum TypedElements<'a> {
  function from_array (line 395) | fn from_array(array: &'a ArrayRef, element_type: &DataType) -> Self {
  function element_size (line 437) | fn element_size(&self) -> usize {
  function supports_bulk_copy (line 452) | fn supports_bulk_copy(&self) -> bool {
  function is_null_at (line 468) | fn is_null_at(&self, idx: usize) -> bool {
  function is_fixed_width (line 495) | fn is_fixed_width(&self) -> bool {
  function get_fixed_value (line 513) | fn get_fixed_value(&self, idx: usize) -> i64 {
  function write_variable_value (line 530) | fn write_variable_value(&self, buffer: &mut Vec<u8>, idx: usize) -> Come...
  function write_range_to_buffer (line 552) | fn write_range_to_buffer(
  function bulk_copy_range (line 599) | fn bulk_copy_range(
  function write_elements_slow (line 664) | fn write_elements_slow(
  function set_null_bit (line 808) | fn set_null_bit(buffer: &mut [u8], null_bitset_start: usize, idx: usize) {
  function is_fixed_width (line 820) | fn is_fixed_width(data_type: &DataType) -> bool {
  function is_all_fixed_width (line 838) | fn is_all_fixed_width(schema: &[DataType]) -> bool {
  type ColumnarToRowContext (line 847) | pub struct ColumnarToRowContext {
    method new (line 874) | pub fn new(schema: Vec<DataType>, batch_size: usize) -> Self {
    method calculate_bitset_width (line 904) | pub const fn calculate_bitset_width(num_fields: usize) -> usize {
    method round_up_to_8 (line 910) | const fn round_up_to_8(value: usize) -> usize {
    method convert (line 927) | pub fn convert(
    method maybe_cast_to_schema_type (line 998) | fn maybe_cast_to_schema_type(
    method convert_fixed_width (line 1084) | fn convert_fixed_width(
    method write_column_fixed_width (line 1123) | fn write_column_fixed_width(
    method write_row_typed (line 1258) | fn write_row_typed(
    method buffer_ptr (line 1322) | pub fn buffer_ptr(&self) -> *const u8 {
    method schema (line 1327) | pub fn schema(&self) -> &[DataType] {
  function get_field_value (line 1334) | fn get_field_value(data_type: &DataType, array: &ArrayRef, row_idx: usiz...
  function write_dictionary_to_buffer (line 1414) | fn write_dictionary_to_buffer(
  function write_dictionary_to_buffer_with_key (line 1455) | fn write_dictionary_to_buffer_with_key<K: ArrowDictionaryKeyType>(
  function i128_to_spark_decimal_bytes (line 1507) | fn i128_to_spark_decimal_bytes(value: i128) -> Vec<u8> {
  function round_up_to_8 (line 1536) | const fn round_up_to_8(value: usize) -> usize {
  function write_array_element (line 1542) | fn write_array_element(buffer: &mut [u8], data_type: &DataType, value: i...
  function write_struct_to_buffer_typed (line 1577) | fn write_struct_to_buffer_typed(
  function write_struct_to_buffer (line 1628) | fn write_struct_to_buffer(
  function write_list_to_buffer (line 1712) | fn write_list_to_buffer(
  function write_large_list_to_buffer (line 1738) | fn write_large_list_to_buffer(
  function write_map_to_buffer (line 1763) | fn write_map_to_buffer(
  function write_nested_variable_to_buffer (line 1816) | fn write_nested_variable_to_buffer(
  function test_bitset_width_calculation (line 1895) | fn test_bitset_width_calculation() {
  function test_round_up_to_8 (line 1905) | fn test_round_up_to_8() {
  function test_convert_int_array (line 1914) | fn test_convert_int_array() {
  function test_convert_multiple_columns (line 1934) | fn test_convert_multiple_columns() {
  function test_fixed_width_fast_path (line 1956) | fn test_fixed_width_fast_path() {
  function test_mixed_schema_uses_general_path (line 2015) | fn test_mixed_schema_uses_general_path() {
  function test_convert_string_array (line 2028) | fn test_convert_string_array() {
  function test_i128_to_spark_decimal_bytes (line 2048) | fn test_i128_to_spark_decimal_bytes() {
  function test_list_data_conversion (line 2063) | fn test_list_data_conversion() {
  function test_list_data_conversion_multiple_rows (line 2102) | fn test_list_data_conversion_multiple_rows() {
  function test_map_data_conversion (line 2148) | fn test_map_data_conversion() {
  function test_map_data_conversion_multiple_rows (line 2235) | fn test_map_data_conversion_multiple_rows() {
  function test_map_data_conversion_sliced_maparray (line 2343) | fn test_map_data_conversion_sliced_maparray() {
  function test_large_list_data_conversion (line 2423) | fn test_large_list_data_conversion() {
  function test_convert_fixed_size_binary_array (line 2464) | fn test_convert_fixed_size_binary_array() {
  function test_convert_dictionary_decimal_array (line 2503) | fn test_convert_dictionary_decimal_array() {
  function test_convert_int32_to_decimal128 (line 2549) | fn test_convert_int32_to_decimal128() {
  function test_convert_int64_to_decimal128 (line 2587) | fn test_convert_int64_to_decimal128() {

FILE: native/core/src/execution/expressions/arithmetic.rs
  type CheckedBinaryExpr (line 34) | pub struct CheckedBinaryExpr {
    method new (line 42) | pub fn new(child: Arc<dyn PhysicalExpr>, query_context: Option<Arc<Que...
    method eq (line 65) | fn eq(&self, other: &dyn Any) -> bool {
  method fmt (line 51) | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  method eq (line 57) | fn eq(&self, other: &Self) -> bool {
  method hash (line 74) | fn hash<H: Hasher>(&self, state: &mut H) {
  method as_any (line 80) | fn as_any(&self) -> &dyn Any {
  method fmt_sql (line 84) | fn fmt_sql(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  method data_type (line 88) | fn data_type(&self, input_schema: &Schema) -> datafusion::common::Result...
  method nullable (line 92) | fn nullable(&self, input_schema: &Schema) -> datafusion::common::Result<...
  method evaluate (line 96) | fn evaluate(&self, batch: &RecordBatch) -> datafusion::common::Result<Co...
  method children (line 116) | fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
  method with_new_children (line 120) | fn with_new_children(
  type IntegralDivideBuilder (line 202) | pub struct IntegralDivideBuilder;
  method build (line 205) | fn build(
  type RemainderBuilder (line 229) | pub struct RemainderBuilder;
  method build (line 232) | fn build(
  type UnaryMinusBuilder (line 259) | pub struct UnaryMinusBuilder;
  method build (line 262) | fn build(

FILE: native/core/src/execution/expressions/partition.rs
  type SparkPartitionIdBuilder (line 29) | pub struct SparkPartitionIdBuilder;
  method build (line 32) | fn build(
  type MonotonicallyIncreasingIdBuilder (line 44) | pub struct MonotonicallyIncreasingIdBuilder;
  method build (line 47) | fn build(

FILE: native/core/src/execution/expressions/random.rs
  type RandBuilder (line 28) | pub struct RandBuilder;
  method build (line 31) | fn build(
  type RandnBuilder (line 43) | pub struct RandnBuilder;
  method build (line 46) | fn build(

FILE: native/core/src/execution/expressions/strings.rs
  type SubstringBuilder (line 38) | pub struct SubstringBuilder;
  method build (line 41) | fn build(
  type LikeBuilder (line 63) | pub struct LikeBuilder;
  method build (line 66) | fn build(
  type RlikeBuilder (line 81) | pub struct RlikeBuilder;
  method build (line 84) | fn build(
  type FromJsonBuilder (line 103) | pub struct FromJsonBuilder;
  method build (line 106) | fn build(

FILE: native/core/src/execution/expressions/subquery.rs
  type Subquery (line 39) | pub struct Subquery {
    method new (line 50) | pub fn new(exec_context_id: i64, id: i64, data_type: DataType) -> Self {
  method fmt (line 60) | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  method as_any (line 66) | fn as_any(&self) -> &dyn Any {
  method fmt_sql (line 70) | fn fmt_sql(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  method data_type (line 74) | fn data_type(&self, _: &Schema) -> datafusion::common::Result<DataType> {
  method nullable (line 78) | fn nullable(&self, _: &Schema) -> datafusion::common::Result<bool> {
  method evaluate (line 82) | fn evaluate(&self, _: &RecordBatch) -> datafusion::common::Result<Column...
  method children (line 192) | fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
  method with_new_children (line 196) | fn with_new_children(

FILE: native/core/src/execution/expressions/temporal.rs
  type HourBuilder (line 38) | pub struct HourBuilder;
  method build (line 41) | fn build(
  type MinuteBuilder (line 65) | pub struct MinuteBuilder;
  method build (line 68) | fn build(
  type SecondBuilder (line 92) | pub struct SecondBuilder;
  method build (line 95) | fn build(
  type UnixTimestampBuilder (line 119) | pub struct UnixTimestampBuilder;
  method build (line 122) | fn build(
  type TruncTimestampBuilder (line 147) | pub struct TruncTimestampBuilder;
  method build (line 150) | fn build(
  type HoursTransformBuilder (line 165) | pub struct HoursTransformBuilder;
  method build (line 168) | fn build(

FILE: native/core/src/execution/jni_api.rs
  function log_jemalloc_usage (line 112) | fn log_jemalloc_usage() {
  type ThreadPoolMap (line 121) | type ThreadPoolMap = HashMap<u64, HashMap<i64, Arc<dyn MemoryPool>>>;
  function get_thread_memory_pools (line 125) | fn get_thread_memory_pools() -> &'static Mutex<ThreadPoolMap> {
  function register_memory_pool (line 129) | fn register_memory_pool(thread_id: u64, context_id: i64, pool: Arc<dyn M...
  function unregister_and_total (line 138) | fn unregister_and_total(thread_id: u64, context_id: i64) -> usize {
  function total_reserved_for_thread (line 158) | fn total_reserved_for_thread(thread_id: u64) -> usize {
  function parse_usize_env_var (line 176) | fn parse_usize_env_var(name: &str) -> Option<usize> {
  function build_runtime (line 180) | fn build_runtime(default_worker_threads: Option<usize>) -> Runtime {
  function init_runtime (line 202) | pub fn init_runtime(default_worker_threads: usize) {
  function get_runtime (line 207) | pub fn get_runtime() -> &'static Runtime {
  function op_name (line 212) | fn op_name(op: &OpStruct) -> &'static str {
  function build_tracing_event_name (line 235) | fn build_tracing_event_name(plan: &Operator) -> String {
  function collect_op_names (line 248) | fn collect_op_names<'a>(op: &'a Operator, names: &mut std::collections::...
  type ExecutionContext (line 258) | struct ExecutionContext {
  function Java_org_apache_comet_Native_createPlan (line 311) | pub unsafe extern "system" fn Java_org_apache_comet_Native_createPlan(
  function prepare_datafusion_session_context (line 486) | fn prepare_datafusion_session_context(
  function register_datafusion_spark_function (line 542) | fn register_datafusion_spark_function(session_ctx: &SessionContext) {
  function prepare_output (line 571) | fn prepare_output(
  function pull_input_batches (line 648) | fn pull_input_batches(exec_context: &mut ExecutionContext) -> Result<(),...
  function Java_org_apache_comet_Native_executePlan (line 664) | pub unsafe extern "system" fn Java_org_apache_comet_Native_executePlan(
  function Java_org_apache_comet_Native_releasePlan (line 855) | pub extern "system" fn Java_org_apache_comet_Native_releasePlan(
  function update_metrics (line 886) | fn update_metrics(env: &mut Env, exec_context: &mut ExecutionContext) ->...
  function log_plan_metrics (line 895) | fn log_plan_metrics(exec_context: &ExecutionContext, stage_id: jint, par...
  function convert_datatype_arrays (line 910) | fn convert_datatype_arrays(
  function get_execution_context (line 929) | fn get_execution_context<'a>(id: i64) -> &'a mut ExecutionContext {
  function Java_org_apache_comet_Native_writeSortedFileNative (line 941) | pub unsafe extern "system" fn Java_org_apache_comet_Native_writeSortedFi...
  function Java_org_apache_comet_Native_sortRowPartitionsNative (line 1023) | pub extern "system" fn Java_org_apache_comet_Native_sortRowPartitionsNat...
  function Java_org_apache_comet_Native_decodeShuffleBlock (line 1056) | pub unsafe extern "system" fn Java_org_apache_comet_Native_decodeShuffle...
  function Java_org_apache_comet_Native_traceBegin (line 1079) | pub unsafe extern "system" fn Java_org_apache_comet_Native_traceBegin(
  function Java_org_apache_comet_Native_traceEnd (line 1094) | pub unsafe extern "system" fn Java_org_apache_comet_Native_traceEnd(
  function Java_org_apache_comet_Native_logMemoryUsage (line 1109) | pub unsafe extern "system" fn Java_org_apache_comet_Native_logMemoryUsage(
  function Java_org_apache_comet_Native_getRustThreadId (line 1125) | pub extern "system" fn Java_org_apache_comet_Native_getRustThreadId(
  function Java_org_apache_comet_Native_columnarToRowInit (line 1145) | pub unsafe extern "system" fn Java_org_apache_comet_Native_columnarToRow...
  function Java_org_apache_comet_Native_columnarToRowConvert (line 1167) | pub unsafe extern "system" fn Java_org_apache_comet_Native_columnarToRow...
  function Java_org_apache_comet_Native_columnarToRowClose (line 1255) | pub unsafe extern "system" fn Java_org_apache_comet_Native_columnarToRow...

FILE: native/core/src/execution/memory_pools/config.rs
  type MemoryPoolType (line 21) | pub(crate) enum MemoryPoolType {
    method is_task_shared (line 34) | pub(crate) fn is_task_shared(&self) -> bool {
  type MemoryPoolConfig (line 45) | pub(crate) struct MemoryPoolConfig {
    method new (line 51) | pub(crate) fn new(pool_type: MemoryPoolType, pool_size: usize) -> Self {
  function parse_memory_pool_config (line 59) | pub(crate) fn parse_memory_pool_config(

FILE: native/core/src/execution/memory_pools/fair_pool.rs
  type CometFairMemoryPool (line 36) | pub struct CometFairMemoryPool {
    method new (line 59) | pub fn new(
    method acquire (line 70) | fn acquire(&self, additional: usize) -> CometResult<i64> {
    method release (line 78) | fn release(&self, size: usize) -> CometResult<()> {
  type CometFairPoolState (line 42) | struct CometFairPoolState {
  method fmt (line 48) | fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
  method register (line 90) | fn register(&self, _: &MemoryConsumer) {
  method unregister (line 98) | fn unregister(&self, _: &MemoryConsumer) {
  method grow (line 106) | fn grow(&self, _reservation: &MemoryReservation, additional: usize) {
  method shrink (line 110) | fn shrink(&self, _reservation: &MemoryReservation, subtractive: usize) {
  method try_grow (line 128) | fn try_grow(
  method reserved (line 172) | fn reserved(&self) -> usize {

FILE: native/core/src/execution/memory_pools/logging_pool.rs
  type LoggingMemoryPool (line 25) | pub(crate) struct LoggingMemoryPool {
    method new (line 31) | pub fn new(task_attempt_id: u64, pool: Arc<dyn MemoryPool>) -> Self {
  method register (line 40) | fn register(&self, consumer: &MemoryConsumer) {
  method unregister (line 49) | fn unregister(&self, consumer: &MemoryConsumer) {
  method grow (line 58) | fn grow(&self, reservation: &MemoryReservation, additional: usize) {
  method shrink (line 68) | fn shrink(&self, reservation: &MemoryReservation, shrink: usize) {
  method try_grow (line 78) | fn try_grow(
  method reserved (line 105) | fn reserved(&self) -> usize {
  method memory_limit (line 109) | fn memory_limit(&self) -> MemoryLimit {

FILE: native/core/src/execution/memory_pools/mod.rs
  function create_memory_pool (line 37) | pub(crate) fn create_memory_pool(

FILE: native/core/src/execution/memory_pools/task_shared.rs
  type PerTaskMemoryPool (line 28) | pub(crate) struct PerTaskMemoryPool {
    method new (line 34) | pub(crate) fn new(memory_pool: Arc<dyn MemoryPool>) -> Self {
  function handle_task_shared_pool_release (line 44) | pub(crate) fn handle_task_shared_pool_release(pool_type: MemoryPoolType,...

FILE: native/core/src/execution/memory_pools/unified_pool.rs
  type CometUnifiedMemoryPool (line 37) | pub struct CometUnifiedMemoryPool {
    method new (line 52) | pub fn new(
    method acquire_from_spark (line 64) | fn acquire_from_spark(&self, additional: usize) -> CometResult<i64> {
    method release_to_spark (line 73) | fn release_to_spark(&self, size: usize) -> CometResult<()> {
  method fmt (line 44) | fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
  method drop (line 82) | fn drop(&mut self) {
  method grow (line 97) | fn grow(&self, reservation: &MemoryReservation, additional: usize) {
  method shrink (line 101) | fn shrink(&self, _: &MemoryReservation, size: usize) {
  method try_grow (line 119) | fn try_grow(&self, _: &MemoryReservation, additional: usize) -> Result<(...
  method reserved (line 151) | fn reserved(&self) -> usize {

FILE: native/core/src/execution/metrics/utils.rs
  function update_comet_metric (line 30) | pub(crate) fn update_comet_metric(
  function to_native_metric_node (line 45) | pub(crate) fn to_native_metric_node(

FILE: native/core/src/execution/mod.rs
  function it_works (line 38) | fn it_works() {

FILE: native/core/src/execution/operators/copy.rs
  type CopyMode (line 26) | pub enum CopyMode {
  function copy_array (line 34) | pub(crate) fn copy_array(array: &dyn Array) -> ArrayRef {
  function copy_or_unpack_array (line 70) | pub(crate) fn copy_or_unpack_array(

FILE: native/core/src/execution/operators/csv_scan.rs
  function init_csv_datasource_exec (line 32) | pub fn init_csv_datasource_exec(
  function build_csv_source (line 55) | fn build_csv_source(schema: SchemaRef, options: &CsvOptions) -> Result<A...
  function string_to_u8 (line 81) | fn string_to_u8(option: &str, option_name: &str) -> Result<u8> {

FILE: native/core/src/execution/operators/expand.rs
  type ExpandExec (line 41) | pub struct ExpandExec {
    method new (line 50) | pub fn new(
  method fmt_as (line 72) | fn fmt_as(&self, t: DisplayFormatType, f: &mut std::fmt::Formatter) -> s...
  method as_any (line 94) | fn as_any(&self) -> &dyn Any {
  method schema (line 98) | fn schema(&self) -> SchemaRef {
  method children (line 102) | fn children(&self) -> Vec<&Arc<dyn ExecutionPlan>> {
  method with_new_children (line 106) | fn with_new_children(
  method execute (line 118) | fn execute(
  method properties (line 132) | fn properties(&self) -> &Arc<PlanProperties> {
  method name (line 136) | fn name(&self) -> &str {
  type ExpandStream (line 141) | pub struct ExpandStream {
    method new (line 152) | pub fn new(
    method expand (line 168) | fn expand(
  type Item (line 189) | type Item = datafusion::common::Result<RecordBatch>;
  method poll_next (line 191) | fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Opt...
  method schema (line 218) | fn schema(&self) -> SchemaRef {

FILE: native/core/src/execution/operators/iceberg_scan.rs
  type IcebergScanExec (line 55) | pub struct IcebergScanExec {
    method new (line 73) | pub fn new(
    method compute_properties (line 96) | fn compute_properties(schema: SchemaRef, num_partitions: usize) -> Arc...
    method execute_with_tasks (line 150) | fn execute_with_tasks(
    method storage_factory_for (line 195) | fn storage_factory_for(path: &str) -> Result<Arc<dyn StorageFactory>, ...
    method load_file_io (line 215) | fn load_file_io(
  method name (line 107) | fn name(&self) -> &str {
  method as_any (line 111) | fn as_any(&self) -> &dyn Any {
  method schema (line 115) | fn schema(&self) -> SchemaRef {
  method properties (line 119) | fn properties(&self) -> &Arc<PlanProperties> {
  method children (line 123) | fn children(&self) -> Vec<&Arc<dyn ExecutionPlan>> {
  method with_new_children (line 127) | fn with_new_children(
  method execute (line 134) | fn execute(
  method metrics (line 142) | fn metrics(&self) -> Option<MetricsSet> {
  type IcebergScanMetrics (line 231) | struct IcebergScanMetrics {
    method new (line 239) | fn new(metrics: &ExecutionPlanMetricsSet) -> Self {
  type IcebergStreamWrapper (line 250) | struct IcebergStreamWrapper<S> {
  type CachedProjection (line 263) | struct CachedProjection {
  type Item (line 272) | type Item = DFResult<RecordBatch>;
  method poll_next (line 274) | fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Opt...
  method schema (line 325) | fn schema(&self) -> SchemaRef {
  method fmt_as (line 331) | fn fmt_as(&self, _t: DisplayFormatType, f: &mut fmt::Formatter) -> fmt::...
  function build_projection_expressions (line 345) | fn build_projection_expressions(
  function adapt_batch_with_expressions (line 367) | fn adapt_batch_with_expressions(

FILE: native/core/src/execution/operators/parquet_writer.rs
  type ParquetWriter (line 62) | enum ParquetWriter {
    method write (line 80) | async fn write(
    method close (line 135) | async fn close(self) -> std::result::Result<(), parquet::errors::Parqu...
  type ParquetWriterExec (line 194) | pub struct ParquetWriterExec {
    method try_new (line 222) | pub fn try_new(
    method compression_to_parquet (line 258) | fn compression_to_parquet(&self) -> Result<Compression> {
    method create_arrow_writer (line 279) | fn create_arrow_writer(
  method fmt_as (line 391) | fn fmt_as(&self, t: DisplayFormatType, f: &mut Formatter) -> fmt::Result {
  method as_any (line 407) | fn as_any(&self) -> &dyn Any {
  method name (line 411) | fn name(&self) -> &str {
  method metrics (line 415) | fn metrics(&self) -> Option<MetricsSet> {
  method properties (line 419) | fn properties(&self) -> &Arc<PlanProperties> {
  method schema (line 423) | fn schema(&self) -> SchemaRef {
  method children (line 427) | fn children(&self) -> Vec<&Arc<dyn ExecutionPlan>> {
  method with_new_children (line 431) | fn with_new_children(
  method execute (line 453) | fn execute(
  function create_test_record_batch (line 587) | fn create_test_record_batch(batch_id: i32) -> Result<RecordBatch> {
  function test_write_to_hdfs_sync (line 612) | async fn test_write_to_hdfs_sync() -> Result<()> {
  function test_write_to_hdfs_streaming (line 663) | async fn test_write_to_hdfs_streaming() -> Result<()> {
  function test_parquet_writer_streaming (line 744) | async fn test_parquet_writer_streaming() -> Result<()> {
  function test_parquet_writer_exec_with_memory_input (line 797) | async fn test_parquet_writer_exec_with_memory_input() -> Result<()> {

FILE: native/core/src/execution/operators/projection.rs
  type ProjectionBuilder (line 35) | pub struct ProjectionBuilder;
  method build (line 38) | fn build(

FILE: native/core/src/execution/operators/scan.rs
  type ScanExec (line 57) | pub struct ScanExec {
    method new (line 83) | pub fn new(
    method unpack_dictionary_type (line 121) | fn unpack_dictionary_type(dt: &DataType) -> DataType {
    method set_input_batch (line 130) | pub fn set_input_batch(&mut self, input: InputBatch) {
    method get_next_batch (line 135) | pub fn get_next_batch(&mut self) -> Result<(), CometError> {
    method get_next (line 159) | fn get_next(
    method allocate_and_fetch_batch (line 262) | fn allocate_and_fetch_batch(
    method get_selection_indices (line 309) | fn get_selection_indices(
  function schema_from_data_types (line 371) | fn schema_from_data_types(data_types: &[DataType]) -> SchemaRef {
  method as_any (line 386) | fn as_any(&self) -> &dyn Any {
  method schema (line 390) | fn schema(&self) -> SchemaRef {
  method children (line 394) | fn children(&self) -> Vec<&Arc<dyn ExecutionPlan>> {
  method with_new_children (line 398) | fn with_new_children(
  method execute (line 405) | fn execute(
  method properties (line 418) | fn properties(&self) -> &Arc<PlanProperties> {
  method name (line 422) | fn name(&self) -> &str {
  method metrics (line 426) | fn metrics(&self) -> Option<MetricsSet> {
  method fmt_as (line 432) | fn fmt_as(&self, t: DisplayFormatType, f: &mut std::fmt::Formatter) -> s...
  type ScanStream (line 451) | struct ScanStream<'a> {
  function new (line 465) | pub fn new(
  function build_record_batch (line 481) | fn build_record_batch(
  type Item (line 514) | type Item = DataFusionResult<RecordBatch>;
  method poll_next (line 516) | fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<S...
  method schema (line 547) | fn schema(&self) -> SchemaRef {
  type InputBatch (line 553) | pub enum InputBatch {
    method new (line 567) | pub fn new(columns: Vec<ArrayRef>, num_rows: Option<usize>) -> Self {

FILE: native/core/src/execution/operators/shuffle_scan.rs
  type ShuffleScanExec (line 52) | pub struct ShuffleScanExec {
    method new (line 74) | pub fn new(
    method set_input_batch (line 106) | pub fn set_input_batch(&mut self, input: InputBatch) {
    method get_next_batch (line 112) | pub fn get_next_batch(&mut self) -> Result<(), CometError> {
    method get_next (line 136) | fn get_next(
  function unpack_dictionary (line 205) | fn unpack_dictionary(array: &ArrayRef) -> ArrayRef {
  function schema_from_data_types (line 213) | fn schema_from_data_types(data_types: &[DataType]) -> SchemaRef {
  method as_any (line 224) | fn as_any(&self) -> &dyn Any {
  method schema (line 228) | fn schema(&self) -> SchemaRef {
  method children (line 232) | fn children(&self) -> Vec<&Arc<dyn ExecutionPlan>> {
  method with_new_children (line 236) | fn with_new_children(
  method execute (line 243) | fn execute(
  method properties (line 255) | fn properties(&self) -> &Arc<PlanProperties> {
  method name (line 259) | fn name(&self) -> &str {
  method metrics (line 263) | fn metrics(&self) -> Option<MetricsSet> {
  method fmt_as (line 269) | fn fmt_as(&self, t: DisplayFormatType, f: &mut std::fmt::Formatter) -> s...
  type ShuffleScanStream (line 287) | struct ShuffleScanStream {
    method new (line 295) | pub fn new(
  type Item (line 308) | type Item = DataFusionResult<arrow::array::RecordBatch>;
  method poll_next (line 310) | fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<S...
  method schema (line 347) | fn schema(&self) -> SchemaRef {
  function test_read_compressed_ipc_block (line 366) | fn test_read_compressed_ipc_block() {
  function test_dictionary_encoded_shuffle_block_is_unpacked (line 412) | fn test_dictionary_encoded_shuffle_block_is_unpacked() {

FILE: native/core/src/execution/planner.rs
  type PhyAggResult (line 138) | type PhyAggResult = Result<Vec<AggregateFunctionExpr>, ExecutionError>;
  type PhyExprResult (line 139) | type PhyExprResult = Result<Vec<(Arc<dyn PhysicalExpr>, String)>, Execut...
  type PartitionPhyExprResult (line 140) | type PartitionPhyExprResult = Result<Vec<Arc<dyn PhysicalExpr>>, Executi...
  type PlanCreationResult (line 141) | pub type PlanCreationResult =
  type JoinParameters (line 144) | struct JoinParameters {
  type BinaryExprOptions (line 153) | pub struct BinaryExprOptions {
  constant TEST_EXEC_CONTEXT_ID (line 157) | pub const TEST_EXEC_CONTEXT_ID: i64 = -1;
  type PhysicalPlanner (line 160) | pub struct PhysicalPlanner {
    method new (line 175) | pub fn new(session_ctx: Arc<SessionContext>, partition: i32) -> Self {
    method with_exec_id (line 184) | pub fn with_exec_id(self, exec_context_id: i64) -> Self {
    method session_ctx (line 194) | pub fn session_ctx(&self) -> &Arc<SessionContext> {
    method partition (line 199) | pub fn partition(&self) -> i32 {
    method get_partitioned_files (line 204) | fn get_partitioned_files(
    method create_expr (line 256) | pub(crate) fn create_expr(
    method create_sort_expr (line 690) | fn create_sort_expr<'a>(
    method create_binary_expr (line 716) | pub fn create_binary_expr(
    method create_binary_expr_with_options (line 739) | pub fn create_binary_expr_with_options(
    method create_plan (line 915) | pub(crate) fn create_plan<'a>(
    method parse_join_parameters (line 1803) | fn parse_join_parameters(
    method create_agg_expr (line 1946) | fn create_agg_expr(
    method create_window_expr (line 2273) | fn create_window_expr<'a>(
    method process_agg_func (line 2447) | fn process_agg_func(
    method find_df_window_function (line 2488) | fn find_df_window_function(&self, name: &str) -> Option<WindowFunction...
    method create_partitioning (line 2503) | fn create_partitioning(
    method create_scalar_function_expr (line 2598) | fn create_scalar_function_expr(
    method create_aggr_func_expr (line 2747) | fn create_aggr_func_expr(
  method default (line 169) | fn default() -> Self {
  function expr_to_columns (line 2765) | fn expr_to_columns(
  type JoinFilterRewriter (line 2799) | struct JoinFilterRewriter<'a> {
  function new (line 2807) | fn new<'a>(
  type Node (line 2823) | type Node = Arc<dyn PhysicalExpr>;
  method f_down (line 2825) | fn f_down(&mut self, node: Self::Node) -> datafusion::common::Result<Tra...
  function rewrite_physical_expr (line 2874) | fn rewrite_physical_expr(
  function from_protobuf_eval_mode (line 2891) | pub fn from_protobuf_eval_mode(value: i32) -> Result<EvalMode, prost::Un...
  function convert_spark_types_to_arrow_schema (line 2899) | fn convert_spark_types_to_arrow_schema(
  function partition_value_to_literal (line 2918) | fn partition_value_to_literal(
  function partition_data_to_struct (line 3001) | fn partition_data_to_struct(
  function parse_file_scan_tasks_from_common (line 3020) | fn parse_file_scan_tasks_from_common(
  function create_case_expr (line 3215) | fn create_case_expr(
  function from_protobuf_binary_output_style (line 3272) | fn from_protobuf_binary_output_style(
  function literal_to_array_ref (line 3284) | fn literal_to_array_ref(
  function convert_spark_expr_to_predicate (line 3475) | fn convert_spark_expr_to_predicate(
  function convert_binary_to_predicate (line 3606) | fn convert_binary_to_predicate(
  function extract_column_reference (line 3654) | fn extract_column_reference(expr: &spark_expression::Expr) -> Option<Str...
  function extract_literal_as_datum (line 3663) | fn extract_literal_as_datum(expr: &spark_expression::Expr) -> Option<ice...
  function needs_fields_coercion (line 3711) | fn needs_fields_coercion(sig: &TypeSignature) -> bool {
  function test_unpack_dictionary_primitive (line 3764) | fn test_unpack_dictionary_primitive() {
  constant STRING_TYPE_ID (line 3827) | const STRING_TYPE_ID: i32 = 7;
  function test_unpack_dictionary_string (line 3830) | fn test_unpack_dictionary_string() {
  function to_datafusion_filter (line 3906) | async fn to_datafusion_filter() {
  function from_datafusion_error_to_comet (line 3929) | async fn from_datafusion_error_to_comet() {
  function create_filter (line 3938) | fn create_filter(child_op: spark_operator::Operator, value: i32) -> spar...
  function create_filter_literal (line 3951) | fn create_filter_literal(
  function spark_plan_metrics_filter (line 3992) | fn spark_plan_metrics_filter() {
  function spark_plan_metrics_hash_join (line 4006) | fn spark_plan_metrics_hash_join() {
  function create_bound_reference (line 4031) | fn create_bound_reference(index: i32) -> Expr {
  function create_scan (line 4042) | fn create_scan() -> Operator {
  function create_proto_datatype (line 4054) | fn create_proto_datatype() -> spark_expression::DataType {
  function test_create_array (line 4062) | fn test_create_array() {
  function test_array_repeat (line 4189) | fn test_array_repeat() {
  function make_parquet_data (line 4326) | async fn make_parquet_data(
  function test_nested_types_list_of_struct_by_index (line 4390) | async fn test_nested_types_list_of_struct_by_index() -> Result<(), DataF...
  function test_nested_types_map_keys (line 4431) | async fn test_nested_types_map_keys() -> Result<(), DataFusionError> {
  function test_nested_types_extract_missing_struct_names_non_overlap (line 4482) | async fn test_nested_types_extract_missing_struct_names_non_overlap(
  function test_nested_types_extract_missing_struct_names_single_field (line 4501) | async fn test_nested_types_extract_missing_struct_names_single_field(
  function test_nested_types_extract_missing_struct_names_missing_field (line 4523) | async fn test_nested_types_extract_missing_struct_names_missing_field(
  function test_literal_to_list (line 4547) | async fn test_literal_to_list() -> Result<(), DataFusionError> {
  function test_date_sub_with_int8_cast_error (line 4676) | fn test_date_sub_with_int8_cast_error() {

FILE: native/core/src/execution/planner/expression_registry.rs
  type ExpressionBuilder (line 30) | pub trait ExpressionBuilder: Send + Sync {
    method build (line 32) | fn build(
  type ExpressionType (line 42) | pub enum ExpressionType {
  type ExpressionRegistry (line 117) | pub struct ExpressionRegistry {
    method new (line 123) | fn new() -> Self {
    method global (line 133) | pub fn global() -> &'static ExpressionRegistry {
    method can_handle (line 139) | pub fn can_handle(&self, spark_expr: &Expr) -> bool {
    method create_expr (line 148) | pub fn create_expr(
    method register_all_expressions (line 167) | fn register_all_expressions(&mut self) {
    method register_arithmetic_expressions (line 197) | fn register_arithmetic_expressions(&mut self) {
    method register_comparison_expressions (line 219) | fn register_comparison_expressions(&mut self) {
    method register_bitwise_expressions (line 241) | fn register_bitwise_expressions(&mut self) {
    method register_logical_expressions (line 261) | fn register_logical_expressions(&mut self) {
    method register_null_check_expressions (line 273) | fn register_null_check_expressions(&mut self) {
    method register_string_expressions (line 283) | fn register_string_expressions(&mut self) {
    method register_temporal_expressions (line 297) | fn register_temporal_expressions(&mut self) {
    method get_expression_type (line 321) | fn get_expression_type(spark_expr: &Expr) -> Result<ExpressionType, Ex...
    method register_random_expressions (line 403) | fn register_random_expressions(&mut self) {
    method register_partition_expressions (line 413) | fn register_partition_expressions(&mut self) {

FILE: native/core/src/execution/planner/operator_registry.rs
  type OperatorBuilder (line 32) | pub trait OperatorBuilder: Send + Sync {
    method build (line 34) | fn build(
  type OperatorType (line 45) | pub enum OperatorType {
  type OperatorRegistry (line 64) | pub struct OperatorRegistry {
    method new (line 70) | fn new() -> Self {
    method global (line 77) | pub fn global() -> &'static OperatorRegistry {
    method can_handle (line 87) | pub fn can_handle(&self, spark_operator: &Operator) -> bool {
    method create_plan (line 94) | pub fn create_plan(
    method register_all_operators (line 119) | fn register_all_operators(&mut self) {
    method register_projection_operators (line 124) | fn register_projection_operators(&mut self) {
  function get_operator_type (line 133) | fn get_operator_type(spark_operator: &Operator) -> Option<OperatorType> {

FILE: native/core/src/execution/serde.rs
  function deserialize_expr (line 38) | pub fn deserialize_expr(buf: &[u8]) -> Result<spark_expression::Expr, Ex...
  function deserialize_op (line 46) | pub fn deserialize_op(buf: &[u8]) -> Result<spark_operator::Operator, Ex...
  function deserialize_config (line 54) | pub fn deserialize_config(buf: &[u8]) -> Result<spark_config::ConfigMap,...
  function deserialize_data_type (line 62) | pub fn deserialize_data_type(buf: &[u8]) -> Result<spark_expression::Dat...
  function to_arrow_datatype (line 70) | pub fn to_arrow_datatype(dt_value: &DataType) -> ArrowDataType {

FILE: native/core/src/execution/sort.rs
  type Rdx (line 23) | pub trait Rdx {
    method cfg_nbuckets (line 25) | fn cfg_nbuckets() -> usize;
    method cfg_nrounds (line 28) | fn cfg_nrounds() -> usize;
    method get_bucket (line 40) | fn get_bucket(&self, round: usize) -> usize;
    method reverse (line 44) | fn reverse(round: usize, bucket: usize) -> bool;
    method cfg_nbuckets (line 54) | fn cfg_nbuckets() -> usize {
    method cfg_nrounds (line 59) | fn cfg_nrounds() -> usize {
    method get_bucket (line 65) | fn get_bucket(&self, round: usize) -> usize {
    method reverse (line 73) | fn reverse(_round: usize, _bucket: usize) -> bool {
  constant MASK_LONG_LOWER_40_BITS (line 47) | const MASK_LONG_LOWER_40_BITS: u64 = (1u64 << 40) - 1;
  constant MASK_LONG_UPPER_24_BITS (line 48) | const MASK_LONG_UPPER_24_BITS: u64 = !MASK_LONG_LOWER_40_BITS;
  type RdxSort (line 79) | pub trait RdxSort {
    method rdxsort (line 81) | fn rdxsort(&mut self);
    method rdxsort (line 106) | fn rdxsort(&mut self) {
  function helper_bucket (line 85) | fn helper_bucket<T, I>(buckets_b: &mut [Vec<T>], iter: I, cfg_nbuckets: ...
  constant MAXIMUM_PARTITION_ID (line 184) | const MAXIMUM_PARTITION_ID: i32 = (1i32 << 24) - 1;
  constant MASK_LONG_LOWER_51_BITS (line 185) | const MASK_LONG_LOWER_51_BITS: i64 = (1i64 << 51) - 1;
  constant MASK_LONG_UPPER_13_BITS (line 186) | const MASK_LONG_UPPER_13_BITS: i64 = !MASK_LONG_LOWER_51_BITS;
  constant MASK_LONG_LOWER_27_BITS (line 187) | const MASK_LONG_LOWER_27_BITS: i64 = (1i64 << 27) - 1;
  function pack_pointer (line 190) | fn pack_pointer(pointer: i64, partition_id: i32) -> i64 {
  function test_rdxsort (line 199) | fn test_rdxsort() {

FILE: native/core/src/execution/spark_config.rs
  constant COMET_TRACING_ENABLED (line 20) | pub(crate) const COMET_TRACING_ENABLED: &str = "spark.comet.tracing.enab...
  constant COMET_DEBUG_ENABLED (line 21) | pub(crate) const COMET_DEBUG_ENABLED: &str = "spark.comet.debug.enabled";
  constant COMET_EXPLAIN_NATIVE_ENABLED (line 22) | pub(crate) const COMET_EXPLAIN_NATIVE_ENABLED: &str = "spark.comet.expla...
  constant COMET_MAX_TEMP_DIRECTORY_SIZE (line 23) | pub(crate) const COMET_MAX_TEMP_DIRECTORY_SIZE: &str = "spark.comet.maxT...
  constant COMET_DEBUG_MEMORY (line 24) | pub(crate) const COMET_DEBUG_MEMORY: &str = "spark.comet.debug.memory";
  constant SPARK_EXECUTOR_CORES (line 25) | pub(crate) const SPARK_EXECUTOR_CORES: &str = "spark.executor.cores";
  type SparkConfig (line 27) | pub(crate) trait SparkConfig {
    method get_bool (line 28) | fn get_bool(&self, name: &str) -> bool;
    method get_u64 (line 29) | fn get_u64(&self, name: &str, default_value: u64) -> u64;
    method get_usize (line 30) | fn get_usize(&self, name: &str, default_value: usize) -> usize;
    method get_bool (line 34) | fn get_bool(&self, name: &str) -> bool {
    method get_u64 (line 40) | fn get_u64(&self, name: &str, default_value: u64) -> u64 {
    method get_usize (line 46) | fn get_usize(&self, name: &str, default_value: usize) -> usize {

FILE: native/core/src/execution/spark_plan.rs
  type SparkPlan (line 26) | pub(crate) struct SparkPlan {
    method new (line 40) | pub(crate) fn new(
    method new_with_additional (line 54) | pub(crate) fn new_with_additional(
    method schema (line 73) | pub(crate) fn schema(&self) -> SchemaRef {
    method children (line 78) | pub(crate) fn children(&self) -> &Vec<Arc<SparkPlan>> {

FILE: native/core/src/execution/utils.rs
  type SparkArrowConvert (line 25) | pub trait SparkArrowConvert {
    method from_spark (line 28) | fn from_spark(addresses: (i64, i64)) -> Result<Self, ExecutionError>
    method move_to_spark (line 33) | fn move_to_spark(&self, array: i64, schema: i64) -> Result<(), Executi...
    method from_spark (line 37) | fn from_spark(addresses: (i64, i64)) -> Result<Self, ExecutionError> {
    method move_to_spark (line 65) | fn move_to_spark(&self, array: i64, schema: i64) -> Result<(), Executi...

FILE: native/core/src/lib.rs
  function Java_org_apache_comet_NativeBase_init (line 89) | pub extern "system" fn Java_org_apache_comet_NativeBase_init(
  constant LOG_PATTERN (line 126) | const LOG_PATTERN: &str = "{d(%y/%m/%d %H:%M:%S)} {l} {f}: {m}{n}";
  function Java_org_apache_comet_NativeBase_isFeatureEnabled (line 138) | pub extern "system" fn Java_org_apache_comet_NativeBase_isFeatureEnabled(
  function default_logger_config (line 158) | fn default_logger_config(log_level: &str) -> CometResult<Config> {

FILE: native/core/src/parquet/cast_column.rs
  function types_differ_only_in_field_names (line 42) | fn types_differ_only_in_field_names(physical: &DataType, logical: &DataT...
  function relabel_array (line 80) | fn relabel_array(array: ArrayRef, target_type: &DataType) -> ArrayRef {
  function cast_timestamp_micros_to_millis_array (line 148) | fn cast_timestamp_micros_to_millis_array(
  function cast_timestamp_micros_to_millis_scalar (line 172) | fn cast_timestamp_micros_to_millis_scalar(
  type CometCastColumnExpr (line 181) | pub struct CometCastColumnExpr {
    method new (line 219) | pub fn new(
    method with_parquet_options (line 235) | pub fn with_parquet_options(mut self, options: SparkParquetOptions) ->...
  method eq (line 198) | fn eq(&self, other: &Self) -> bool {
  method hash (line 208) | fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
  method fmt (line 242) | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
  method as_any (line 253) | fn as_any(&self) -> &dyn Any {
  method data_type (line 257) | fn data_type(&self, _input_schema: &Schema) -> DataFusionResult<DataType> {
  method nullable (line 261) | fn nullable(&self, _input_schema: &Schema) -> DataFusionResult<bool> {
  method evaluate (line 265) | fn evaluate(&self, batch: &RecordBatch) -> DataFusionResult<ColumnarValu...
  method return_field (line 323) | fn return_field(&self, _input_schema: &Schema) -> DataFusionResult<Field...
  method children (line 327) | fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
  method with_new_children (line 331) | fn with_new_children(
  method fmt_sql (line 349) | fn fmt_sql(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
  function test_cast_timestamp_micros_to_millis_array (line 362) | fn test_cast_timestamp_micros_to_millis_array() {
  function test_cast_timestamp_micros_to_millis_array_with_timezone (line 390) | fn test_cast_timestamp_micros_to_millis_array_with_timezone() {
  function test_cast_timestamp_micros_to_millis_scalar (line 411) | fn test_cast_timestamp_micros_to_millis_scalar() {
  function test_comet_cast_column_expr_evaluate_micros_to_millis_array (line 430) | fn test_comet_cast_column_expr_evaluate_micros_to_millis_array() {
  function test_comet_cast_column_expr_evaluate_micros_to_millis_scalar (line 475) | fn test_comet_cast_column_expr_evaluate_micros_to_millis_scalar() {
  function test_relabel_list_field_name (line 514) | fn test_relabel_list_field_name() {
  function test_relabel_map_entries_field_name (line 535) | fn test_relabel_map_entries_field_name() {
  function test_relabel_struct_metadata (line 571) | fn test_relabel_struct_metadata() {
  function test_relabel_nested_struct_containing_list (line 593) | fn test_relabel_nested_struct_containing_list() {

FILE: native/core/src/parquet/data_type.rs
  type DataType (line 20) | pub trait DataType: PlainDecoding + PlainDictDecoding + 'static {}
  type AsBytes (line 60) | pub trait AsBytes {
    method as_bytes (line 62) | fn as_bytes(&self) -> &[u8];
    method as_bytes (line 66) | fn as_bytes(&self) -> &[u8] {
    method as_bytes (line 72) | fn as_bytes(&self) -> &[u8] {
    method as_bytes (line 78) | fn as_bytes(&self) -> &[u8] {
    method as_bytes (line 84) | fn as_bytes(&self) -> &[u8] {

FILE: native/core/src/parquet/encryption_support.rs
  constant ENCRYPTION_FACTORY_ID (line 33) | pub const ENCRYPTION_FACTORY_ID: &str = "comet.jni_kms_encryption";
  type CometEncryptionFactory (line 44) | pub struct CometEncryptionFactory {
  method get_file_encryption_properties (line 52) | async fn get_file_encryption_properties(
  method get_file_decryption_properties (line 68) | async fn get_file_decryption_properties(
  type CometKeyRetriever (line 84) | pub struct CometKeyRetriever {
    method new (line 91) | pub fn new(
  method retrieve_key (line 115) | fn retrieve_key(&self, key_metadata: &[u8]) -> datafusion::parquet::erro...

FILE: native/core/src/parquet/mod.rs
  type Context (line 74) | struct Context {
  function Java_org_apache_comet_parquet_Native_initColumnReader (line 79) | pub extern "system" fn Java_org_apache_comet_parquet_Native_initColumnRe...
  function Java_org_apache_comet_parquet_Native_setDictionaryPage (line 141) | pub unsafe extern "system" fn Java_org_apache_comet_parquet_Native_setDi...
  function Java_org_apache_comet_parquet_Native_setPageV1 (line 168) | pub unsafe extern "system" fn Java_org_apache_comet_parquet_Native_setPa...
  function Java_org_apache_comet_parquet_Native_setPageV2 (line 195) | pub unsafe extern "system" fn Java_org_apache_comet_parquet_Native_setPa...
  function Java_org_apache_comet_parquet_Native_resetBatch (line 236) | pub extern "system" fn Java_org_apache_comet_parquet_Native_resetBatch(
  function Java_org_apache_comet_parquet_Native_readBatch (line 249) | pub extern "system" fn Java_org_apache_comet_parquet_Native_readBatch(
  function Java_org_apache_comet_parquet_Native_skipBatch (line 268) | pub extern "system" fn Java_org_apache_comet_parquet_Native_skipBatch(
  function Java_org_apache_comet_parquet_Native_currentBatch (line 282) | pub extern "system" fn Java_org_apache_comet_parquet_Native_currentBatch(
  function get_context (line 299) | fn get_context<'a>(handle: jlong) -> Result<&'a mut Context, CometError> {
  function get_reader (line 308) | fn get_reader<'a>(handle: jlong) -> Result<&'a mut ColumnReader, CometEr...
  function Java_org_apache_comet_parquet_Native_closeColumnReader (line 313) | pub extern "system" fn Java_org_apache_comet_parquet_Native_closeColumnR...
  function from_u8_slice (line 327) | fn from_u8_slice(src: &mut [u8]) -> &mut [i8] {
  type ParquetReaderState (line 333) | enum ParquetReaderState {
  type BatchContext (line 339) | struct BatchContext {
  function get_batch_context (line 348) | fn get_batch_context<'a>(handle: jlong) -> Result<&'a mut BatchContext, ...
  function get_file_groups_single_file (line 356) | fn get_file_groups_single_file(
  function get_object_store_options (line 377) | pub fn get_object_store_options(
  function Java_org_apache_comet_parquet_Native_validateObjectStoreConfig (line 403) | pub unsafe extern "system" fn Java_org_apache_comet_parquet_Native_valid...
  function Java_org_apache_comet_parquet_Native_initRecordBatchReader (line 428) | pub unsafe extern "system" fn Java_org_apache_comet_parquet_Native_initR...
  function Java_org_apache_comet_parquet_Native_readNextRecordBatch (line 535) | pub extern "system" fn Java_org_apache_comet_parquet_Native_readNextReco...
  function Java_org_apache_comet_parquet_Native_currentColumnBatch (line 580) | pub extern "system" fn Java_org_apache_comet_parquet_Native_currentColum...
  function Java_org_apache_comet_parquet_Native_closeRecordBatchReader (line 603) | pub extern "system" fn Java_org_apache_comet_parquet_Native_closeRecordB...

FILE: native/core/src/parquet/mutable_vector.rs
  constant DEFAULT_ARRAY_LEN (line 23) | const DEFAULT_ARRAY_LEN: usize = 4;
  type ParquetMutableVector (line 34) | pub struct ParquetMutableVector {
    method new (line 62) | pub fn new(capacity: usize, arrow_type: &ArrowDataType) -> Self {
    method new_with_bit_width (line 67) | pub fn new_with_bit_width(
    method is_null (line 116) | pub fn is_null(&self, idx: usize) -> bool {
    method reset (line 122) | pub fn reset(&mut self) {
    method put_null (line 140) | pub fn put_null(&mut self) {
    method put_nulls (line 146) | pub fn put_nulls(&mut self, n: usize) {
    method num_values (line 164) | pub fn num_values(&self) -> usize {
    method num_nulls (line 170) | pub fn num_nulls(&self) -> usize {
    method set_dictionary (line 175) | pub fn set_dictionary(&mut self, dict: ParquetMutableVector) {
    method get_array_data (line 188) | pub fn get_array_data(&mut self) -> Result<ArrayData, ExecutionError> {
    method bit_width (line 218) | pub fn bit_width(arrow_type: &ArrowDataType) -> usize {
    method is_binary_type (line 233) | fn is_binary_type(dt: &ArrowDataType) -> bool {
    method should_reset_value_buffer (line 238) | fn should_reset_value_buffer(dt: &ArrowDataType) -> bool {

FILE: native/core/src/parquet/objectstore/s3.rs
  function create_store (line 58) | pub fn create_store(
  function region_cache (line 136) | fn region_cache() -> &'static RwLock<HashMap<String, String>> {
  function resolve_bucket_region (line 148) | pub async fn resolve_bucket_region(bucket: &str) -> Result<String, Box<d...
  function extract_s3_config_options (line 202) | fn extract_s3_config_options(
  function normalize_endpoint (line 244) | fn normalize_endpoint(
  function get_config (line 277) | fn get_config<'a>(
  function get_config_trimmed (line 289) | fn get_config_trimmed<'a>(
  constant HADOOP_IAM_INSTANCE (line 298) | const HADOOP_IAM_INSTANCE: &str = "org.apache.hadoop.fs.s3a.auth.IAMInst...
  constant HADOOP_SIMPLE (line 299) | const HADOOP_SIMPLE: &str = "org.apache.hadoop.fs.s3a.SimpleAWSCredentia...
  constant HADOOP_TEMPORARY (line 300) | const HADOOP_TEMPORARY: &str = "org.apache.hadoop.fs.s3a.TemporaryAWSCre...
  constant HADOOP_ASSUMED_ROLE (line 301) | const HADOOP_ASSUMED_ROLE: &str = "org.apache.hadoop.fs.s3a.auth.Assumed...
  constant HADOOP_ANONYMOUS (line 302) | const HADOOP_ANONYMOUS: &str = "org.apache.hadoop.fs.s3a.AnonymousAWSCre...
  constant AWS_CONTAINER_CREDENTIALS (line 305) | const AWS_CONTAINER_CREDENTIALS: &str =
  constant AWS_CONTAINER_CREDENTIALS_V1 (line 307) | const AWS_CONTAINER_CREDENTIALS_V1: &str = "com.amazonaws.auth.Container...
  constant AWS_EC2_CONTAINER_CREDENTIALS (line 308) | const AWS_EC2_CONTAINER_CREDENTIALS: &str =
  constant AWS_INSTANCE_PROFILE (line 310) | const AWS_INSTANCE_PROFILE: &str =
  constant AWS_INSTANCE_PROFILE_V1 (line 312) | const AWS_INSTANCE_PROFILE_V1: &str = "com.amazonaws.auth.InstanceProfil...
  constant AWS_ENVIRONMENT (line 313) | const AWS_ENVIRONMENT: &str =
  constant AWS_ENVIRONMENT_V1 (line 315) | const AWS_ENVIRONMENT_V1: &str = "com.amazonaws.auth.EnvironmentVariable...
  constant AWS_WEB_IDENTITY (line 316) | const AWS_WEB_IDENTITY: &str =
  constant AWS_WEB_IDENTITY_V1 (line 318) | const AWS_WEB_IDENTITY_V1: &str = "com.amazonaws.auth.WebIdentityTokenCr...
  constant AWS_ANONYMOUS (line 319) | const AWS_ANONYMOUS: &str = "software.amazon.awssdk.auth.credentials.Ano...
  constant AWS_ANONYMOUS_V1 (line 320) | const AWS_ANONYMOUS_V1: &str = "com.amazonaws.auth.AnonymousAWSCredentia...
  function build_credential_provider (line 337) | async fn build_credential_provider(
  function parse_credential_provider_names (line 378) | fn parse_credential_provider_names(aws_credential_provider_names: &str) ...
  function is_anonymous_credential_provider (line 386) | fn is_anonymous_credential_provider(credential_provider_name: &str) -> b...
  function build_chained_aws_credential_provider_metadata (line 390) | fn build_chained_aws_credential_provider_metadata(
  function build_aws_credential_provider_metadata (line 422) | fn build_aws_credential_provider_metadata(
  function build_static_credential_provider_metadata (line 449) | fn build_static_credential_provider_metadata(
  function build_assume_role_credential_provider_metadata (line 479) | fn build_assume_role_credential_provider_metadata(
  type CachedAwsCredentialProvider (line 529) | struct CachedAwsCredentialProvider {
    method new (line 552) | fn new(
    method metadata (line 567) | fn metadata(&self) -> CredentialProviderMetadata {
    method fetch_credential (line 571) | fn fetch_credential(&self) -> Option<aws_credential_types::Credentials> {
    method refresh_credential (line 585) | async fn refresh_credential(&self) -> object_store::Result<aws_credent...
  type Credential (line 601) | type Credential = AwsCredential;
  method get_credential (line 604) | async fn get_credential(&self) -> object_store::Result<Arc<AwsCredential...
  type StaticCredentialProvider (line 624) | struct StaticCredentialProvider {
    method new (line 630) | fn new(is_valid: bool, ak: String, sk: String, token: Option<String>) ...
  method provide_credentials (line 644) | fn provide_credentials<'a>(
  type CredentialProviderMetadata (line 666) | enum CredentialProviderMetadata {
    method name (line 687) | fn name(&self) -> &'static str {
    method simple_string (line 702) | fn simple_string(&self) -> String {
    method create_credential_provider (line 738) | async fn create_credential_provider(
  type TestConfigBuilder (line 830) | struct TestConfigBuilder {
    method new (line 835) | fn new() -> Self {
    method with_region (line 839) | fn with_region(mut self, region: &str) -> Self {
    method with_credential_provider (line 845) | fn with_credential_provider(mut self, provider: &str) -> Self {
    method with_bucket_credential_provider (line 853) | fn with_bucket_credential_provider(mut self, bucket: &str, provider: &...
    method with_access_key (line 861) | fn with_access_key(mut self, key: &str) -> Self {
    method with_secret_key (line 867) | fn with_secret_key(mut self, key: &str) -> Self {
    method with_session_token (line 873) | fn with_session_token(mut self, token: &str) -> Self {
    method with_bucket_access_key (line 879) | fn with_bucket_access_key(mut self, bucket: &str, key: &str) -> Self {
    method with_bucket_secret_key (line 887) | fn with_bucket_secret_key(mut self, bucket: &str, key: &str) -> Self {
    method with_bucket_session_token (line 895) | fn with_bucket_session_token(mut self, bucket: &str, token: &str) -> S...
    method with_assume_role_arn (line 903) | fn with_assume_role_arn(mut self, arn: &str) -> Self {
    method with_assume_role_session_name (line 909) | fn with_assume_role_session_name(mut self, name: &str) -> Self {
    method with_assume_role_credentials_provider (line 917) | fn with_assume_role_credentials_provider(mut self, provider: &str) -> ...
    method build (line 925) | fn build(self) -> HashMap<String, String> {
  function test_create_store (line 932) | fn test_create_store() {
  function test_get_config_trimmed (line 947) | fn test_get_config_trimmed() {
  function test_parse_credential_provider_names (line 975) | fn test_parse_credential_provider_names() {
  function test_default_credential_provider (line 1013) | async fn test_default_credential_provider() {
  function test_anonymous_credential_provider (line 1040) | async fn test_anonymous_credential_provider() {
  function test_mixed_anonymous_and_other_providers_error (line 1056) | async fn test_mixed_anonymous_and_other_providers_error() {
  function test_simple_credential_provider (line 1077) | async fn test_simple_credential_provider() {
  function test_temporary_credential_provider (line 1106) | async fn test_temporary_credential_provider() {
  function test_missing_access_key (line 1135) | async fn test_missing_access_key() {
  function test_missing_secret_key (line 1161) | async fn test_missing_secret_key() {
  function test_missing_session_token_for_temporary (line 1187) | async fn test_missing_session_token_for_temporary() {
  function test_bucket_specific_configuration (line 1214) | async fn test_bucket_specific_configuration() {
  function test_assume_role_credential_provider (line 1300) | async fn test_assume_role_credential_provider() {
  function test_assume_role_missing_arn_error (line 1336) | async fn test_assume_role_missing_arn_error() {
  function test_unsupported_credential_provider_error (line 1353) | async fn test_unsupported_credential_provider_error() {
  function test_environment_credential_provider (line 1372) | async fn test_environment_credential_provider() {
  function test_ecs_credential_provider (line 1391) | async fn test_ecs_credential_provider() {
  function test_imds_credential_provider (line 1414) | async fn test_imds_credential_provider() {
  function test_web_identity_credential_provider (line 1433) | async fn test_web_identity_credential_provider() {
  function test_hadoop_iam_instance_credential_provider (line 1452) | async fn test_hadoop_iam_instance_credential_provider() {
  function test_chained_credential_providers (line 1474) | async fn test_chained_credential_providers() {
  function test_static_environment_web_identity_chain (line 1502) | async fn test_static_environment_web_identity_chain() {
  function test_assume_role_with_static_base_provider (line 1537) | async fn test_assume_role_with_static_base_provider() {
  function test_assume_role_with_web_identity_base_provider (line 1573) | async fn test_assume_role_with_web_identity_base_provider() {
  function test_assume_role_with_chained_base_providers (line 1601) | async fn test_assume_role_with_chained_base_providers() {
  function test_assume_role_chained_with_other_providers (line 1643) | async fn test_assume_role_chained_with_other_providers() {
  function test_assume_role_with_anonymous_base_provider_error (line 1691) | async fn test_assume_role_with_anonymous_base_provider_error() {
  function test_get_credential_from_static_credential_provider (line 1716) | async fn test_get_credential_from_static_credential_provider() {
  function test_get_credential_from_invalid_static_credential_provider (line 1754) | async fn test_get_credential_from_invalid_static_credential_provider() {
  function test_invalid_static_credential_provider_should_not_prevent_other_providers_from_working (line 1772) | async fn test_invalid_static_credential_provider_should_not_prevent_othe...
  type MockAwsCredentialProvider (line 1813) | struct MockAwsCredentialProvider {
  method provide_credentials (line 1818) | fn provide_credentials<'a>(
  function test_cached_credential_provider_refresh_credential (line 1837) | async fn test_cached_credential_provider_refresh_credential() {
  function test_cached_credential_provider_cache_credential (line 1857) | async fn test_cached_credential_provider_cache_credential() {
  function test_extract_s3_config_options (line 1876) | fn test_extract_s3_config_options() {
  function test_extract_s3_config_custom_endpoint (line 1898) | fn test_extract_s3_config_custom_endpoint() {
  function test_extract_s3_config_custom_endpoint_with_virtual_hosted_style (line 1919) | fn test_extract_s3_config_custom_endpoint_with_virtual_hosted_style() {
  function test_extract_s3_config_ignore_default_endpoint (line 1955) | fn test_extract_s3_config_ignore_default_endpoint() {
  function test_credential_provider_metadata_simple_string (line 1970) | fn test_credential_provider_metadata_simple_string() {

FILE: native/core/src/parquet/parquet_exec.rs
  function init_datasource_exec (line 63) | pub(crate) fn init_datasource_exec(
  function get_options (line 185) | fn get_options(
  function dbg_batch_stream (line 214) | pub fn dbg_batch_stream(stream: SendableRecordBatchStream) -> SendableRe...

FILE: native/core/src/parquet/parquet_read_cached_factory.rs
  type MetadataCell (line 47) | type MetadataCell = Arc<OnceCell<Arc<ParquetMetaData>>>;
  type CachingParquetReaderFactory (line 53) | pub struct CachingParquetReaderFactory {
    method new (line 59) | pub fn new(store: Arc<dyn ObjectStore>) -> Self {
  method create_reader (line 68) | fn create_reader(
  type CachingParquetFileReader (line 104) | struct CachingParquetFileReader {
  method get_bytes (line 112) | fn get_bytes(&mut self, range: Range<u64>) -> BoxFuture<'_, parquet::err...
  method get_byte_ranges (line 117) | fn get_byte_ranges(
  method get_metadata (line 129) | fn get_metadata<'a>(

FILE: native/core/src/parquet/parquet_support.rs
  type SparkParquetOptions (line 65) | pub struct SparkParquetOptions {
    method new (line 85) | pub fn new(eval_mode: EvalMode, timezone: &str, allow_incompat: bool) ...
    method new_without_timezone (line 97) | pub fn new_without_timezone(eval_mode: EvalMode, allow_incompat: bool)...
  function spark_parquet_convert (line 113) | pub fn spark_parquet_convert(
  function parquet_convert_array (line 138) | fn parquet_convert_array(
  function parquet_convert_struct_to_struct (line 238) | fn parquet_convert_struct_to_struct(
  function parquet_convert_map_to_map (line 302) | fn parquet_convert_map_to_map(
  function key_field (line 347) | fn key_field(entries_field: &FieldRef) -> Option<FieldRef> {
  function value_field (line 356) | fn value_field(entries_field: &FieldRef) -> Option<FieldRef> {
  function is_hdfs_scheme (line 364) | pub fn is_hdfs_scheme(url: &Url, object_store_configs: &HashMap<String, ...
  function create_hdfs_object_store (line 377) | fn create_hdfs_object_store(
  function create_hdfs_operator (line 395) | pub(crate) fn create_hdfs_operator(url: &Url) -> Result<opendal::Operato...
  function create_hdfs_object_store (line 409) | pub(crate) fn create_hdfs_object_store(
  function get_name_node_uri (line 419) | fn get_name_node_uri(url: &Url) -> Result<String, object_store::Error> {
  function create_hdfs_object_store (line 441) | fn create_hdfs_object_store(
  type ObjectStoreCache (line 450) | type ObjectStoreCache = RwLock<HashMap<(String, u64), Arc<dyn ObjectStor...
  function object_store_cache (line 483) | fn object_store_cache() -> &'static ObjectStoreCache {
  function hash_object_store_configs (line 489) | fn hash_object_store_configs(configs: &HashMap<String, String>) -> u64 {
  function prepare_object_store_with_configs (line 502) | pub(crate) fn prepare_object_store_with_configs(
  function prepare_object_store (line 600) | pub(crate) fn prepare_object_store(
  function prepare_object_store (line 610) | pub(crate) fn prepare_object_store(
  function test_prepare_object_store (line 621) | fn test_prepare_object_store() {
  function test_prepare_object_store (line 664) | fn test_prepare_object_store() {

FILE: native/core/src/parquet/read/column.rs
  constant DECIMAL_MAX_INT_DIGITS (line 43) | const DECIMAL_MAX_INT_DIGITS: i32 = 9;
  constant DECIMAL_MAX_LONG_DIGITS (line 46) | const DECIMAL_MAX_LONG_DIGITS: i32 = 18;
  type ColumnReader (line 48) | pub enum ColumnReader {
    method get (line 93) | pub fn get(
    method get_descriptor (line 531) | pub fn get_descriptor(&self) -> &ColumnDescriptor {
    method set_dictionary_page (line 536) | pub fn set_dictionary_page(
    method set_page_v1 (line 552) | pub fn set_page_v1(&mut self, page_value_count: usize, page_data: Buff...
    method set_page_v2 (line 557) | pub fn set_page_v2(
    method reset_batch (line 577) | pub fn reset_batch(&mut self) {
    method current_batch (line 582) | pub fn current_batch(&mut self) -> Result<ArrayData, ExecutionError> {
    method read_batch (line 587) | pub fn read_batch(&mut self, total: usize, null_pad_size: usize) -> (u...
    method skip_batch (line 592) | pub fn skip_batch(&mut self, total: usize, put_nulls: bool) -> usize {
  type TypedColumnReader (line 598) | pub struct TypedColumnReader<T: DataType> {
  function new (line 621) | pub fn new(
  function get_descriptor (line 645) | pub fn get_descriptor(&self) -> &ColumnDescriptor {
  function reset_batch (line 652) | pub fn reset_batch(&mut self) {
  function current_batch (line 661) | pub fn current_batch(&mut self) -> Result<ArrayData, ExecutionError> {
  function read_batch (line 673) | pub fn read_batch(&mut self, total: usize, null_pad_size: usize) -> (usi...
  function skip_batch (line 696) | pub fn skip_batch(&mut self, total: usize, put_nulls: bool) -> usize {
  function set_dictionary_page (line 719) | pub fn set_dictionary_page(
  function set_page_v1 (line 758) | pub fn set_page_v1(
  function set_page_v2 (line 794) | pub fn set_page_v2(
  function check_dictionary (line 819) | fn check_dictionary(&mut self, encoding: &Encoding) {
  function get_decoder (line 831) | fn get_decoder(&self, value_data: Buffer, encoding: Encoding) -> Box<dyn...

FILE: native/core/src/parquet/read/levels.rs
  constant INITIAL_BUF_LEN (line 29) | const INITIAL_BUF_LEN: usize = 16;
  type Mode (line 31) | enum Mode {
  type LevelDecoder (line 37) | pub struct LevelDecoder {
    method new (line 64) | pub fn new(desc: ColumnDescPtr, bit_width: u8, need_length: bool) -> S...
    method set_data (line 81) | pub fn set_data(&mut self, page_value_count: usize, page_data: &Buffer...
    method read_batch (line 103) | pub fn read_batch(
    method skip_batch (line 151) | pub fn skip_batch(
    method read_next_group (line 197) | fn read_next_group(&mut self) {

FILE: native/core/src/parquet/read/mod.rs
  type ReadOptions (line 31) | pub struct ReadOptions {
  type PlainDecoderInner (line 37) | pub struct PlainDecoderInner {
  type PlainDecoding (line 58) | pub trait PlainDecoding {
    method decode (line 63) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector,...
    method skip (line 69) | fn skip(src: &mut PlainDecoderInner, num: usize);
  type PlainDictDecoding (line 72) | pub trait PlainDictDecoding {
    method decode_dict (line 75) | fn decode_dict(src: ParquetMutableVector, dst: &mut ParquetMutableVect...
    method decode_dict_one (line 101) | fn decode_dict_one(

FILE: native/core/src/parquet/read/values.rs
  function get_decoder (line 35) | pub fn get_decoder<T: DataType>(
  type Decoder (line 53) | pub trait Decoder {
    method read (line 60) | fn read(&mut self, dst: &mut ParquetMutableVector);
    method read_batch (line 68) | fn read_batch(&mut self, dst: &mut ParquetMutableVector, num: usize);
    method skip_batch (line 75) | fn skip_batch(&mut self, num: usize);
    method encoding (line 78) | fn encoding(&self) -> Encoding;
    method read (line 899) | fn read(&mut self, dst: &mut ParquetMutableVector) {
    method read_batch (line 908) | fn read_batch(&mut self, dst: &mut ParquetMutableVector, num: usize) {
    method skip_batch (line 913) | fn skip_batch(&mut self, num: usize) {
    method encoding (line 918) | fn encoding(&self) -> Encoding {
    method read (line 975) | fn read(&mut self, dst: &mut ParquetMutableVector) {
    method read_batch (line 999) | fn read_batch(&mut self, dst: &mut ParquetMutableVector, num: usize) {
    method skip_batch (line 1036) | fn skip_batch(&mut self, num: usize) {
    method encoding (line 1057) | fn encoding(&self) -> Encoding {
  constant JULIAN_GREGORIAN_SWITCH_OFF_DAY (line 83) | const JULIAN_GREGORIAN_SWITCH_OFF_DAY: i32 = -141427;
  constant JULIAN_GREGORIAN_SWITCH_OFF_TS (line 87) | const JULIAN_GREGORIAN_SWITCH_OFF_TS: i64 = -2208988800000000;
  constant JULIAN_DAY_OF_EPOCH (line 91) | const JULIAN_DAY_OF_EPOCH: i32 = 2440588;
  constant MICROS_PER_MILLIS (line 94) | const MICROS_PER_MILLIS: i64 = 1000;
  constant MICROS_PER_DAY (line 96) | const MICROS_PER_DAY: i64 = 24_i64 * 60 * 60 * 1000 * 1000;
  type PlainDecoder (line 98) | pub struct PlainDecoder<T: DataType> {
  function new (line 107) | pub fn new(value_data: Buffer, desc: ColumnDescPtr, read_options: ReadOp...
  method decode (line 212) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector, n...
  method skip (line 252) | fn skip(src: &mut PlainDecoderInner, num: usize) {
  method decode (line 260) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector, n...
  method skip (line 303) | fn skip(src: &mut PlainDecoderInner, num: usize) {
  method decode (line 311) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector, n...
  method skip (line 357) | fn skip(src: &mut PlainDecoderInner, num: usize) {
  method decode (line 365) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector, n...
  method skip (line 404) | fn skip(src: &mut PlainDecoderInner, num: usize) {
  method decode (line 415) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector, n...
  method skip (line 421) | fn skip(src: &mut PlainDecoderInner, num: usize) {
  method decode_dict_one (line 429) | fn decode_dict_one(
  constant INT96_SRC_BYTE_WIDTH (line 815) | const INT96_SRC_BYTE_WIDTH: usize = 12;
  constant INT96_DST_BYTE_WIDTH (line 817) | const INT96_DST_BYTE_WIDTH: usize = 8;
  function int96_to_microsecond (line 819) | fn int96_to_microsecond(v: &[u8]) -> i64 {
  method decode (line 833) | fn decode(src: &mut PlainDecoderInner, dst: &mut ParquetMutableVector, n...
  method skip (line 874) | fn skip(src: &mut PlainDecoderInner, num: usize) {
  method decode_dict_one (line 880) | fn decode_dict_one(
  type DictDecoder (line 925) | pub struct DictDecoder {
    method new (line 943) | pub fn new(buf: Buffer) -> Self {
    method reload (line 959) | fn reload(&mut self) {
  function test_i32_to_i8 (line 1068) | fn test_i32_to_i8() {
  function test_i32_to_u8 (line 1080) | fn test_i32_to_u8() {
  function test_i32_to_i16 (line 1092) | fn test_i32_to_i16() {
  function test_i32_to_u16 (line 1104) | fn test_i32_to_u16() {
  function test_i32_to_u32 (line 1120) | fn test_i32_to_u32() {

FILE: native/core/src/parquet/schema_adapter.rs
  type SparkPhysicalExprAdapterFactory (line 41) | pub struct SparkPhysicalExprAdapterFactory {
    method new (line 51) | pub fn new(
  function remap_physical_schema_names (line 66) | fn remap_physical_schema_names(
  function check_column_duplicate (line 99) | fn check_column_duplicate(col_name: &str, physical_schema: &SchemaRef) -...
  method create (line 115) | fn create(
  type SparkPhysicalExprAdapter (line 187) | struct SparkPhysicalExprAdapter {
    method wrap_all_type_mismatches (line 293) | fn wrap_all_type_mismatches(
    method replace_with_spark_cast (line 374) | fn replace_with_spark_cast(
    method replace_missing_with_defaults (line 449) | fn replace_missing_with_defaults(
  method rewrite (line 211) | fn rewrite(&self, expr: Arc<dyn PhysicalExpr>) -> DataFusionResult<Arc<d...
  function parquet_roundtrip_int_as_string (line 546) | async fn parquet_roundtrip_int_as_string() -> Result<(), DataFusionError> {
  function parquet_roundtrip_unsigned_int (line 568) | async fn parquet_roundtrip_unsigned_int() -> Result<(), DataFusionError> {
  function roundtrip (line 583) | async fn roundtrip(
  function parquet_duplicate_fields_case_insensitive (line 620) | async fn parquet_duplicate_fields_case_insensitive() {

FILE: native/core/src/parquet/util/bit_packing.rs
  function unpack32 (line 29) | pub unsafe fn unpack32(mut in_ptr: *const u32, out_ptr: *mut u32, num_bi...
  function nullunpacker32 (line 69) | unsafe fn nullunpacker32(in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack1_32 (line 77) | unsafe fn unpack1_32(in_buf: *const u32, mut out: *mut u32) -> *const u32 {
  function unpack2_32 (line 145) | unsafe fn unpack2_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack3_32 (line 214) | unsafe fn unpack3_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack4_32 (line 288) | unsafe fn unpack4_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack5_32 (line 362) | unsafe fn unpack5_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack6_32 (line 442) | unsafe fn unpack6_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack7_32 (line 524) | unsafe fn unpack7_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack8_32 (line 610) | unsafe fn unpack8_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack9_32 (line 692) | unsafe fn unpack9_32(mut in_buf: *const u32, mut out: *mut u32) -> *cons...
  function unpack10_32 (line 784) | unsafe fn unpack10_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack11_32 (line 878) | unsafe fn unpack11_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack12_32 (line 976) | unsafe fn unpack12_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack13_32 (line 1074) | unsafe fn unpack13_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack14_32 (line 1178) | unsafe fn unpack14_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack15_32 (line 1284) | unsafe fn unpack15_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack16_32 (line 1394) | unsafe fn unpack16_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack17_32 (line 1492) | unsafe fn unpack17_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack18_32 (line 1608) | unsafe fn unpack18_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack19_32 (line 1726) | unsafe fn unpack19_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack20_32 (line 1848) | unsafe fn unpack20_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack21_32 (line 1970) | unsafe fn unpack21_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack22_32 (line 2098) | unsafe fn unpack22_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack23_32 (line 2228) | unsafe fn unpack23_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack24_32 (line 2362) | unsafe fn unpack24_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack25_32 (line 2492) | unsafe fn unpack25_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack26_32 (line 2632) | unsafe fn unpack26_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack27_32 (line 2774) | unsafe fn unpack27_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack28_32 (line 2920) | unsafe fn unpack28_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack29_32 (line 3066) | unsafe fn unpack29_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack30_32 (line 3218) | unsafe fn unpack30_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack31_32 (line 3372) | unsafe fn unpack31_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...
  function unpack32_32 (line 3530) | unsafe fn unpack32_32(mut in_buf: *const u32, mut out: *mut u32) -> *con...

FILE: native/core/src/parquet/util/buffer.rs
  type Buffer (line 21) | pub trait Buffer {
    method len (line 23) | fn len(&self) -> usize;
    method data (line 26) | fn data(&self) -> &[u8];
    method is_empty (line 29) | fn is_empty(&self) -> bool {
    method len (line 35) | fn len(&self) -> usize {
    method data (line 39) | fn data(&self) -> &[u8] {
  type BufferRef (line 44) | pub struct BufferRef {
    method new (line 51) | pub fn new(inner: Arc<dyn Buffer>) -> Self {
    method len (line 62) | pub fn len(&self) -> usize {
    method is_empty (line 67) | pub fn is_empty(&self) -> bool {
    method data (line 72) | pub fn data(&self) -> &[u8] {
    method slice (line 78) | pub fn slice(&self, offset: usize, len: usize) -> BufferRef {
    method start (line 98) | pub fn start(&self, offset: usize) -> BufferRef {
    method as_ref (line 113) | fn as_ref(&self) -> &[u8] {
    type Output (line 123) | type Output = Idx::Output;
    method index (line 125) | fn index(&self, index: Idx) -> &Self::Output {

FILE: native/core/src/parquet/util/jni.rs
  function convert_column_descriptor (line 39) | pub fn convert_column_descriptor(
  function convert_encoding (line 85) | pub fn convert_encoding(ordinal: jint) -> Encoding {
  type TypePromotionInfo (line 101) | pub struct TypePromotionInfo {
    method new_from_jni (line 109) | pub fn new_from_jni(
    method new (line 124) | pub fn new(physical_type: PhysicalType, precision: i32, scale: i32, bi...
  function convert_column_path (line 134) | fn convert_column_path(env: &mut Env, path_array: JObjectArray) -> JNIRe...
  function convert_physical_type (line 145) | fn convert_physical_type(id: jint) -> PhysicalType {
  function convert_logical_type (line 159) | fn convert_logical_type(
  function convert_time_unit (line 186) | fn convert_time_unit(time_unit: jint) -> TimeUnit {
  function fix_type_length (line 197) | fn fix_type_length(t: &PhysicalType, type_length: i32) -> i32 {
  function deserialize_schema (line 206) | pub fn deserialize_schema(ipc_bytes: &[u8]) -> Result<arrow::datatypes::...
  function get_file_path (line 215) | pub fn get_file_path(url_: String) -> Result<(ObjectStoreUrl, Path), Par...
  function test_get_file_path (line 246) | fn test_get_file_path() {

FILE: native/core/src/parquet/util/memory.rs
  type MemTrackerPtr (line 35) | pub type MemTrackerPtr = Arc<MemTracker>;
  type WeakMemTrackerPtr (line 37) | pub type WeakMemTrackerPtr = Weak<MemTracker>;
  type MemTracker (line 41) | pub struct MemTracker {
    method new (line 51) | pub fn new() -> MemTracker {
    method memory_usage (line 59) | pub fn memory_usage(&self) -> i64 {
    method max_memory_usage (line 64) | pub fn max_memory_usage(&self) -> i64 {
    method alloc (line 70) | pub fn alloc(&self, num_bytes: i64) {
  method default (line 81) | fn default() -> Self {
  type ByteBuffer (line 90) | pub type ByteBuffer = Buffer<u8>;
  type ByteBufferPtr (line 92) | pub type ByteBufferPtr = BufferPtr<u8>;
  type Buffer (line 101) | pub struct Buffer<T: Clone> {
  function new (line 109) | pub fn new() -> Self {
  function with_mem_tracker (line 119) | pub fn with_mem_tracker(mut self, mc: MemTrackerPtr) -> Self {
  function data (line 127) | pub fn data(&self) -> &[T] {
  function set_data (line 133) | pub fn set_data(&mut self, new_data: Vec<T>) {
  function resize (line 148) | pub fn resize(&mut self, new_size: usize, init_value: T) {
  function clear (line 159) | pub fn clear(&mut self) {
  function reserve (line 167) | pub fn reserve(&mut self, additional_capacity: usize) {
  function consume (line 181) | pub fn consume(&mut self) -> BufferPtr<T> {
  function push (line 192) | pub fn push(&mut self, value: T) {
  function capacity (line 198) | pub fn capacity(&self) -> usize {
  function size (line 204) | pub fn size(&self) -> usize {
  function is_mem_tracked (line 210) | pub fn is_mem_tracked(&self) -> bool {
  function mem_tracker (line 218) | pub fn mem_tracker(&self) -> &MemTrackerPtr {
  method default (line 224) | fn default() -> Self {
  type Output (line 230) | type Output = T;
  function index (line 232) | fn index(&self, index: usize) -> &T {
  function index_mut (line 238) | fn index_mut(&mut self, index: usize) -> &mut T {
  method write (line 246) | fn write(&mut self, buf: &[u8]) -> IoResult<usize> {
  method flush (line 257) | fn flush(&mut self) -> IoResult<()> {
  function as_ref (line 264) | fn as_ref(&self) -> &[u8] {
  method drop (line 271) | fn drop(&mut self) {
  type BufferPtr (line 286) | pub struct BufferPtr<T> {
  function new (line 296) | pub fn new(v: Vec<T>) -> Self {
  function data (line 308) | pub fn data(&self) -> &[T] {
  function with_range (line 316) | pub fn with_range(mut self, start: usize, len: usize) -> Self {
  function set_range (line 325) | pub fn set_range(&mut self, start: usize, len: usize) {
  function with_mem_tracker (line 332) | pub fn with_mem_tracker(mut self, mc: MemTrackerPtr) -> Self {
  function start (line 339) | pub fn start(&self) -> usize {
  function len (line 345) | pub fn len(&self) -> usize {
  function is_empty (line 351) | pub fn is_empty(&self) -> bool {
  function is_mem_tracked (line 356) | pub fn is_mem_tracked(&self) -> bool {
  function all (line 362) | pub fn all(&self) -> BufferPtr<T> {
  function start_from (line 372) | pub fn start_from(&self, start: usize) -> BufferPtr<T> {
  function range (line 383) | pub fn range(&self, start: usize, len: usize) -> BufferPtr<T> {
  type Output (line 395) | type Output = T;
  function index (line 397) | fn index(&self, index: usize) -> &T {
  method fmt (line 404) | fn fmt(&self, f: &mut Formatter) -> FmtResult {
  method drop (line 410) | fn drop(&mut self) {
  function as_ref (line 421) | fn as_ref(&self) -> &[u8] {
  function test_byte_buffer_mem_tracker (line 431) | fn test_byte_buffer_mem_tracker() {
  function test_byte_ptr_mem_tracker (line 467) | fn test_byte_ptr_mem_tracker() {
  function test_byte_buffer (line 493) | fn test_byte_buffer() {
  function test_byte_ptr (line 532) | fn test_byte_ptr() {

FILE: native/core/src/parquet/util/test_common/page_util.rs
  type DataPageBuilder (line 37) | pub trait DataPageBuilder {
    method add_rep_levels (line 38) | fn add_rep_levels(&mut self, max_level: i16, rep_levels: &[i16]);
    method add_def_levels (line 39) | fn add_def_levels(&mut self, max_level: i16, def_levels: &[i16]);
    method add_values (line 40) | fn add_values<T: DataType>(&mut self, encoding: Encoding, values: &[T:...
    method add_indices (line 41) | fn add_indices(&mut self, indices: Bytes);
    method consume (line 42) | fn consume(self) -> Page;
    method add_rep_levels (line 101) | fn add_rep_levels(&mut self, max_levels: i16, rep_levels: &[i16]) {
    method add_def_levels (line 106) | fn add_def_levels(&mut self, max_levels: i16, def_levels: &[i16]) {
    method add_values (line 115) | fn add_values<T: DataType>(&mut self, encoding: Encoding, values: &[T:...
    method add_indices (line 132) | fn add_indices(&mut self, indices: Bytes) {
    method consume (line 137) | fn consume(self) -> Page {
  type DataPageBuilderImpl (line 51) | pub struct DataPageBuilderImpl {
    method new (line 65) | pub fn new(desc: ColumnDescPtr, num_values: u32, datapage_v2: bool) ->...
    method add_levels (line 78) | fn add_levels(&mut self, max_level: i16, levels: &[i16]) -> u32 {
  type InMemoryPageReader (line 166) | pub struct InMemoryPageReader<P: Iterator<Item = Page>> {
  function new (line 171) | pub fn new(pages: impl IntoIterator<Item = Page, IntoIter = P>) -> Self {
  method get_next_page (line 179) | fn get_next_page(&mut self) -> Result<Option<Page>> {
  method peek_next_page (line 183) | fn peek_next_page(&mut self) -> Result<Option<PageMetadata>> {
  method skip_next_page (line 187) | fn skip_next_page(&mut self) -> Result<()> {
  type Item (line 193) | type Item = Result<Page>;
  method next (line 195) | fn next(&mut self) -> Option<Self::Item> {
  type InMemoryPageIterator (line 202) | pub struct InMemoryPageIterator<I: Iterator<Item = Vec<Page>>> {
  function new (line 207) | pub fn new(pages: impl IntoIterator<Item = Vec<Page>, IntoIter = I>) -> ...
  type Item (line 215) | type Item = Result<Box<dyn PageReader>>;
  method next (line 217) | fn next(&mut self) -> Option<Self::Item> {
  function make_pages (line 227) | pub fn make_pages<T: DataType>(

FILE: native/core/src/parquet/util/test_common/rand_gen.rs
  function random_bytes (line 23) | pub fn random_bytes(n: usize) -> Vec<u8> {
  function random_bools (line 32) | pub fn random_bools(n: usize) -> Vec<bool> {
  function random_numbers (line 41) | pub fn random_numbers<T>(n: usize) -> Vec<T>
  function random_numbers_range (line 49) | pub fn random_numbers_range<T>(n: usize, low: T, high: T, result: &mut V...

FILE: native/fs-hdfs/build.rs
  function main (line 21) | fn main() {
  function build_ffi (line 31) | fn build_ffi(flags: &[String]) {
  function build_hdfs_lib (line 60) | fn build_hdfs_lib(flags: &[String]) {
  function build_minidfs_lib (line 89) | fn build_minidfs_lib(flags: &[String]) {
  function get_build_flags (line 111) | fn get_build_flags() -> Vec<String> {
  function get_java_dependency (line 120) | fn get_java_dependency() -> Vec<String> {
  function get_hdfs_file_path (line 144) | fn get_hdfs_file_path(filename: &'static str) -> String {
  function get_hdfs_file_os_path (line 148) | fn get_hdfs_file_os_path(filename: &'static str) -> String {
  function get_hdfs_source_dir (line 152) | fn get_hdfs_source_dir() -> &'static str {
  function get_minidfs_file_path (line 156) | fn get_minidfs_file_path(filename: &'static str) -> String {

FILE: native/fs-hdfs/c_src/libhdfs/exception.c
  type ExceptionInfo (line 30) | struct ExceptionInfo {
  type ExceptionInfo (line 36) | struct ExceptionInfo
  function getExceptionInfo (line 94) | void getExceptionInfo(const char *excName, int noPrintFlags,
  function printExceptionAndFreeV (line 149) | int printExceptionAndFreeV(JNIEnv *env, jthrowable exc, int noPrintFlags,
  function printExceptionAndFree (line 206) | int printExceptionAndFree(JNIEnv *env, jthrowable exc, int noPrintFlags,
  function printPendingExceptionAndFree (line 218) | int printPendingExceptionAndFree(JNIEnv *env, int noPrintFlags,
  function jthrowable (line 241) | jthrowable getPendingExceptionAndClear(JNIEnv *env)
  function jthrowable (line 250) | jthrowable newRuntimeError(JNIEnv *env, const char *fmt, ...)

FILE: native/fs-hdfs/c_src/libhdfs/hdfs.c
  type hdfsStreamType (line 67) | enum hdfsStreamType
  type hdfsFile_internal (line 77) | struct hdfsFile_internal {
  type hdfsExtendedFileInfo (line 88) | struct hdfsExtendedFileInfo {
  function hdfsFileIsOpenForRead (line 92) | int hdfsFileIsOpenForRead(hdfsFile file)
  function hdfsGetHedgedReadMetrics (line 97) | int hdfsGetHedgedReadMetrics(hdfsFS fs, struct hdfsHedgedReadMetrics **m...
  function hdfsFreeHedgedReadMetrics (line 173) | void hdfsFreeHedgedReadMetrics(struct hdfsHedgedReadMetrics *metrics)
  function hdfsFileGetReadStatistics (line 178) | int hdfsFileGetReadStatistics(hdfsFile file,
  function hdfsReadStatisticsGetRemoteBytesRead (line 263) | int64_t hdfsReadStatisticsGetRemoteBytesRead(
  function hdfsFileClearReadStatistics (line 269) | int hdfsFileClearReadStatistics(hdfsFile file)
  function hdfsFileFreeReadStatistics (line 300) | void hdfsFileFreeReadStatistics(struct hdfsReadStatistics *stats)
  function hdfsFileIsOpenForWrite (line 305) | int hdfsFileIsOpenForWrite(hdfsFile file)
  function hdfsFileUsesDirectRead (line 310) | int hdfsFileUsesDirectRead(hdfsFile file)
  function hdfsFileDisableDirectRead (line 315) | void hdfsFileDisableDirectRead(hdfsFile file)
  function hdfsDisableDomainSocketSecurity (line 320) | int hdfsDisableDomainSocketSecurity(void)
  type hdfsJniEnv (line 343) | typedef struct
  function jthrowable (line 355) | static jthrowable constructNewObjectOfPath(JNIEnv *env, const char *path,
  function jthrowable (line 376) | static jthrowable hadoopConfGetStr(JNIEnv *env, jobject jConfiguration,
  function hdfsConfGetStr (line 399) | int hdfsConfGetStr(const char *key, char **val)
  function hdfsConfStrFree (line 431) | void hdfsConfStrFree(char *val)
  function jthrowable (line 436) | static jthrowable hadoopConfGetInt(JNIEnv *env, jobject jConfiguration,
  function hdfsConfGetInt (line 456) | int hdfsConfGetInt(const char *key, int32_t *val)
  type hdfsBuilderConfOpt (line 488) | struct hdfsBuilderConfOpt {
  type hdfsBuilder (line 494) | struct hdfsBuilder {
  type hdfsBuilder (line 503) | struct hdfsBuilder
  type hdfsBuilder (line 505) | struct hdfsBuilder
  type hdfsBuilder (line 505) | struct hdfsBuilder
  function hdfsBuilderConfSetStr (line 513) | int hdfsBuilderConfSetStr(struct hdfsBuilder *bld, const char *key,
  function hdfsFreeBuilder (line 529) | void hdfsFreeBuilder(struct hdfsBuilder *bld)
  function hdfsBuilderSetForceNewInstance (line 542) | void hdfsBuilderSetForceNewInstance(struct hdfsBuilder *bld)
  function hdfsBuilderSetNameNode (line 547) | void hdfsBuilderSetNameNode(struct hdfsBuilder *bld, const char *nn)
  function hdfsBuilderSetNameNodePort (line 552) | void hdfsBuilderSetNameNodePort(struct hdfsBuilder *bld, tPort port)
  function hdfsBuilderSetUserName (line 557) | void hdfsBuilderSetUserName(struct hdfsBuilder *bld, const char *userName)
  function hdfsBuilderSetKerbTicketCachePath (line 562) | void hdfsBuilderSetKerbTicketCachePath(struct hdfsBuilder *bld,
  function hdfsFS (line 568) | hdfsFS hdfsConnect(const char *host, tPort port)
  function hdfsFS (line 579) | hdfsFS hdfsConnectNewInstance(const char *host, tPort port)
  function hdfsFS (line 590) | hdfsFS hdfsConnectAsUser(const char *host, tPort port, const char *user)
  function hdfsFS (line 602) | hdfsFS hdfsConnectAsUserNewInstance(const char *host, tPort port,
  function calcEffectiveURI (line 631) | static int calcEffectiveURI(struct hdfsBuilder *bld, char ** uri)
  type hdfsBuilder (line 671) | struct hdfsBuilder
  function hdfsFS (line 681) | hdfsFS hdfsBuilderConnect(struct hdfsBuilder *bld)
  function hdfsDisconnect (line 856) | int hdfsDisconnect(hdfsFS fs)
  function jthrowable (line 907) | static jthrowable getDefaultBlockSize(JNIEnv *env, jobject jFS,
  function hdfsFile (line 921) | hdfsFile hdfsOpenFile(hdfsFS fs, const char *path, int flags,
  type hdfsStreamBuilder (line 937) | struct hdfsStreamBuilder {
  type hdfsStreamBuilder (line 946) | struct hdfsStreamBuilder
  type hdfsStreamBuilder (line 950) | struct hdfsStreamBuilder
  type hdfsStreamBuilder (line 954) | struct hdfsStreamBuilder
  function hdfsStreamBuilderFree (line 969) | void hdfsStreamBuilderFree(struct hdfsStreamBuilder *bld)
  function hdfsStreamBuilderSetBufferSize (line 974) | int hdfsStreamBuilderSetBufferSize(struct hdfsStreamBuilder *bld,
  function hdfsStreamBuilderSetReplication (line 985) | int hdfsStreamBuilderSetReplication(struct hdfsStreamBuilder *bld,
  function hdfsStreamBuilderSetDefaultBlockSize (line 996) | int hdfsStreamBuilderSetDefaultBlockSize(struct hdfsStreamBuilder *bld,
  function hdfsFile (line 1007) | static hdfsFile hdfsOpenFileImpl(hdfsFS fs, const char *path, int flags,
  function hdfsFile (line 1208) | hdfsFile hdfsStreamBuilderBuild(struct hdfsStreamBuilder *bld)
  function hdfsTruncateFile (line 1218) | int hdfsTruncateFile(hdfsFS fs, const char* path, tOffset newlength)
  function hdfsUnbufferFile (line 1255) | int hdfsUnbufferFile(hdfsFile file)
  function hdfsCloseFile (line 1283) | int hdfsCloseFile(hdfsFS fs, hdfsFile file)
  function hdfsExists (line 1334) | int hdfsExists(hdfsFS fs, const char *path)
  function readPrepare (line 1375) | static int readPrepare(JNIEnv* env, hdfsFS fs, hdfsFile f,
  function tSize (line 1396) | tSize hdfsRead(hdfsFS fs, hdfsFile f, void* buffer, tSize length)
  function tSize (line 1467) | tSize readDirect(hdfsFS fs, hdfsFile f, void* buffer, tSize length)
  function tSize (line 1508) | tSize hdfsPread(hdfsFS fs, hdfsFile f, tOffset position,
  function tSize (line 1576) | tSize hdfsWrite(hdfsFS fs, hdfsFile f, const void* buffer, tSize length)
  function hdfsSeek (line 1647) | int hdfsSeek(hdfsFS fs, hdfsFile f, tOffset desiredPos)
  function tOffset (line 1682) | tOffset hdfsTell(hdfsFS fs, hdfsFile f)
  function hdfsFlush (line 1721) | int hdfsFlush(hdfsFS fs, hdfsFile f)
  function hdfsHFlush (line 1750) | int hdfsHFlush(hdfsFS fs, hdfsFile f)
  function hdfsHSync (line 1779) | int hdfsHSync(hdfsFS fs, hdfsFile f)
  function hdfsAvailable (line 1808) | int hdfsAvailable(hdfsFS fs, hdfsFile f)
  function hdfsCopyImpl (line 1842) | static int hdfsCopyImpl(hdfsFS srcFS, const char *src, hdfsFS dstFS,
  function hdfsCopy (line 1918) | int hdfsCopy(hdfsFS srcFS, const char *src, hdfsFS dstFS, const char *dst)
  function hdfsMove (line 1923) | int hdfsMove(hdfsFS srcFS, const char *src, hdfsFS dstFS, const char *dst)
  function hdfsDelete (line 1928) | int hdfsDelete(hdfsFS fs, const char *path, int recursive)
  function hdfsRename (line 1973) | int hdfsRename(hdfsFS fs, const char *oldPath, const char *newPath)
  function hdfsRenameOverwrite (line 2029) | int hdfsRenameOverwrite(hdfsFS fs, const char *oldPath, const char *newP...
  function hdfsSetWorkingDirectory (line 2186) | int hdfsSetWorkingDirectory(hdfsFS fs, const char *path)
  function hdfsCreateDirectory (line 2227) | int hdfsCreateDirectory(hdfsFS fs, const char *path)
  function hdfsSetReplication (line 2278) | int hdfsSetReplication(hdfsFS fs, const char *path, int16_t replication)
  function hdfsChown (line 2324) | int hdfsChown(hdfsFS fs, const char *path, const char *owner, const char...
  function hdfsChmod (line 2393) | int hdfsChmod(hdfsFS fs, const char *path, short mode)
  function hdfsUtime (line 2453) | int hdfsUtime(hdfsFS fs, const char *path, tTime mtime, tTime atime)
  type hadoopRzOptions (line 2503) | struct hadoopRzOptions
  type hadoopRzOptions (line 2511) | struct hadoopRzOptions
  type hadoopRzOptions (line 2513) | struct hadoopRzOptions
  type hadoopRzOptions (line 2522) | struct hadoopRzOptions
  function hadoopRzOptionsClearCached (line 2530) | static void hadoopRzOptionsClearCached(JNIEnv *env,
  function hadoopRzOptionsSetSkipChecksum (line 2540) | int hadoopRzOptionsSetSkipChecksum(
  function hadoopRzOptionsSetByteBufferPool (line 2554) | int hadoopRzOptionsSetByteBufferPool(
  function hadoopRzOptionsFree (line 2588) | void hadoopRzOptionsFree(struct hadoopRzOptions *opts)
  type hadoopRzBuffer (line 2603) | struct hadoopRzBuffer
  function jthrowable (line 2611) | static jthrowable hadoopRzOptionsGetEnumSet(JNIEnv *env,
  function hadoopReadZeroExtractBuffer (line 2662) | static int hadoopReadZeroExtractBuffer(JNIEnv *env,
  function translateZCRException (line 2750) | static int translateZCRException(JNIEnv *env, jthrowable exc)
  type hadoopRzBuffer (line 2775) | struct hadoopRzBuffer
  type hadoopRzOptions (line 2776) | struct hadoopRzOptions
  type hadoopRzBuffer (line 2782) | struct hadoopRzBuffer
  type hadoopRzBuffer (line 2795) | struct hadoopRzBuffer
  function hadoopRzBufferLength (line 2848) | int32_t hadoopRzBufferLength(const struct hadoopRzBuffer *buffer)
  type hadoopRzBuffer (line 2853) | struct hadoopRzBuffer
  function hadoopRzBufferFree (line 2858) | void hadoopRzBufferFree(hdfsFile file, struct hadoopRzBuffer *buffer)
  function hdfsFreeHosts (line 3047) | void hdfsFreeHosts(char ***blockHosts)
  function tOffset (line 3060) | tOffset hdfsGetDefaultBlockSize(hdfsFS fs)
  function tOffset (line 3088) | tOffset hdfsGetDefaultBlockSizeAtPath(hdfsFS fs, const char *path)
  function tOffset (line 3122) | tOffset hdfsGetCapacity(hdfsFS fs)
  function tOffset (line 3162) | tOffset hdfsGetUsed(hdfsFS fs)
  function getExtendedFileInfoOffset (line 3219) | static size_t getExtendedFileInfoOffset(const char *str)
  type hdfsExtendedFileInfo (line 3225) | struct hdfsExtendedFileInfo
  type hdfsExtendedFileInfo (line 3228) | struct hdfsExtendedFileInfo
  function jthrowable (line 3232) | static jthrowable
  function jthrowable (line 3382) | static jthrowable
  function hdfsFileInfo (line 3425) | hdfsFileInfo* hdfsListDirectory(hdfsFS fs, const char *path, int *numEnt...
  function hdfsFileInfo (line 3521) | hdfsFileInfo *hdfsGetPathInfo(hdfsFS fs, const char *path)
  function hdfsFreeFileInfoEntry (line 3565) | static void hdfsFreeFileInfoEntry(hdfsFileInfo *hdfsFileInfo)
  function hdfsFreeFileInfo (line 3573) | void hdfsFreeFileInfo(hdfsFileInfo *hdfsFileInfo, int numEntries)
  function hdfsFileIsEncrypted (line 3585) | int hdfsFileIsEncrypted(hdfsFileInfo *fileInfo)

FILE: native/fs-hdfs/c_src/libhdfs/hdfs.h
  type hdfsBuilder (line 74) | struct hdfsBuilder
  type tSize (line 75) | typedef int32_t   tSize;
  type time_t (line 76) | typedef time_t    tTime;
  type tOffset (line 77) | typedef int64_t   tOffset;
  type tPort (line 78) | typedef uint16_t  tPort;
  type tObjectKind (line 79) | typedef enum tObjectKind {
  type hdfsStreamBuilder (line 83) | struct hdfsStreamBuilder
  type hdfs_internal (line 89) | struct hdfs_internal
  type hdfs_internal (line 90) | struct hdfs_internal
  type hdfsFile_internal (line 92) | struct hdfsFile_internal
  type hdfsFile_internal (line 93) | struct hdfsFile_internal
  type hadoopRzOptions (line 95) | struct hadoopRzOptions
  type hadoopRzBuffer (line 97) | struct hadoopRzBuffer
  type hdfsReadStatistics (line 117) | struct hdfsReadStatistics {
  type hdfsReadStatistics (line 139) | struct hdfsReadStatistics
  type hdfsReadStatistics (line 148) | struct hdfsReadStatistics
  type hdfsReadStatistics (line 170) | struct hdfsReadStatistics
  type hdfsHedgedReadMetrics (line 172) | struct hdfsHedgedReadMetrics {
  type hdfsHedgedReadMetrics (line 191) | struct hdfsHedgedReadMetrics
  type hdfsHedgedReadMetrics (line 199) | struct hdfsHedgedReadMetrics
  type hdfsBuilder (line 264) | struct hdfsBuilder
  type hdfsBuilder (line 281) | struct hdfsBuilder
  type hdfsBuilder (line 301) | struct hdfsBuilder
  type hdfsBuilder (line 310) | struct hdfsBuilder
  type hdfsBuilder (line 319) | struct hdfsBuilder
  type hdfsBuilder (line 330) | struct hdfsBuilder
  type hdfsBuilder (line 342) | struct hdfsBuilder
  type hdfsBuilder (line 356) | struct hdfsBuilder
  type hdfsStreamBuilder (line 449) | struct hdfsStreamBuilder
  type hdfsStreamBuilder (line 460) | struct hdfsStreamBuilder
  type hdfsStreamBuilder (line 476) | struct hdfsStreamBuilder
  type hdfsStreamBuilder (line 493) | struct hdfsStreamBuilder
  type hdfsStreamBuilder (line 506) | struct hdfsStreamBuilder
  type hdfsFileInfo (line 774) | typedef struct  {
  type hadoopRzOptions (line 963) | struct hadoopRzOptions
  type hadoopRzOptions (line 981) | struct hadoopRzOptions
  type hadoopRzOptions (line 990) | struct hadoopRzOptions
  type hadoopRzOptions (line 1016) | struct hadoopRzOptions
  type hadoopRzBuffer (line 1025) | struct hadoopRzBuffer
  type hadoopRzBuffer (line 1038) | struct hadoopRzBuffer
  type hadoopRzBuffer (line 1048) | struct hadoopRzBuffer

FILE: native/fs-hdfs/c_src/libhdfs/htable.c
  type htable_pair (line 27) | struct htable_pair {
  type htable (line 35) | struct htable {
  function htable_insert_internal (line 54) | static void htable_insert_internal(struct htable_pair *nelem,
  function htable_realloc (line 74) | static int htable_realloc(struct htable *htable, uint32_t new_capacity)
  function round_up_to_power_of_2 (line 97) | static uint32_t round_up_to_power_of_2(uint32_t i)
  type htable (line 112) | struct htable
  type htable (line 115) | struct htable
  function htable_visit (line 135) | void htable_visit(struct htable *htable, visitor_fn_t fun, void *ctx)
  function htable_free (line 147) | void htable_free(struct htable *htable)
  function htable_put (line 155) | int htable_put(struct htable *htable, void *key, void *val)
  function htable_get_internal (line 185) | static int htable_get_internal(const struct htable *htable,
  type htable (line 213) | struct htable
  function htable_pop (line 223) | void htable_pop(struct htable *htable, const void *key,
  function htable_used (line 260) | uint32_t htable_used(const struct htable *htable)
  function htable_capacity (line 265) | uint32_t htable_capacity(const struct htable *htable)
  function ht_hash_string (line 270) | uint32_t ht_hash_string(const void *str, uint32_t max)
  function ht_compare_string (line 282) | int ht_compare_string(const void *a, const void *b)

FILE: native/fs-hdfs/c_src/libhdfs/htable.h
  type htable (line 28) | struct htable
  type htable (line 59) | struct htable
  type htable (line 71) | struct htable
  type htable (line 81) | struct htable
  type htable (line 96) | struct htable
  type htable (line 106) | struct htable
  type htable (line 118) | struct htable
  type htable (line 128) | struct htable
  type htable (line 137) | struct htable

FILE: native/fs-hdfs/c_src/libhdfs/jni_helper.c
  type htable (line 32) | struct htable
  function destroyLocalReference (line 59) | void destroyLocalReference(JNIEnv *env, jobject jObject)
  function jthrowable (line 65) | static jthrowable validateMethodType(JNIEnv *env, MethType methType)
  function jthrowable (line 74) | jthrowable newJavaStr(JNIEnv *env, const char *str, jstring *out)
  function jthrowable (line 94) | jthrowable newCStr(JNIEnv *env, jstring jstr, char **out)
  function jthrowable (line 111) | jthrowable invokeMethod(JNIEnv *env, jvalue *retval, MethType methType,
  function jthrowable (line 205) | jthrowable constructNewObjectOfClass(JNIEnv *env, jobject *out, const ch...
  function jthrowable (line 231) | jthrowable methodIdFromClass(const char *className, const char *methName,
  function jthrowable (line 260) | jthrowable globalClassReference(const char *className, JNIEnv *env, jcla...
  function jthrowable (line 308) | jthrowable classNameOfObject(jobject jobj, JNIEnv *env, char **name)
  function wildcard_expandPath (line 370) | static ssize_t wildcard_expandPath(const char* path, char* expanded)
  function getClassPath_helper (line 477) | static ssize_t getClassPath_helper(const char *classpath, char* expanded...
  function JNIEnv (line 642) | static JNIEnv* getGlobalJNIEnv(void)
  function JNIEnv (line 774) | JNIEnv* getJNIEnv(void)
  type ThreadLocalState (line 825) | struct ThreadLocalState
  type ThreadLocalState (line 840) | struct ThreadLocalState
  function setTLSExceptionStrings (line 853) | void setTLSExceptionStrings(const char *rootCause, const char *stackTrace)
  function javaObjectIsOfClass (line 872) | int javaObjectIsOfClass(JNIEnv *env, jobject obj, const char *name)
  function jthrowable (line 888) | jthrowable hadoopConfSetStr(JNIEnv *env, jobject jConfiguration,
  function jthrowable (line 912) | jthrowable fetchEnumInstance(JNIEnv *env, const char *className,

FILE: native/fs-hdfs/c_src/libhdfs/jni_helper.h
  type MethType (line 58) | typedef enum {

FILE: native/fs-hdfs/c_src/libhdfs/os/posix/mutexes.c
  function init (line 28) | __attribute__((constructor)) static void init() {
  function mutexLock (line 34) | int mutexLock(mutex *m) {
  function mutexUnlock (line 43) | int mutexUnlock(mutex *m) {

FILE: native/fs-hdfs/c_src/libhdfs/os/posix/platform.h
  type pthread_mutex_t (line 31) | typedef pthread_mutex_t mutex;
  type pthread_t (line 32) | typedef pthread_t threadId;

FILE: native/fs-hdfs/c_src/libhdfs/os/posix/thread.c
  function threadCreate (line 37) | int threadCreate(thread *t) {
  function threadJoin (line 46) | int threadJoin(const thread *t) {

FILE: native/fs-hdfs/c_src/libhdfs/os/posix/thread_local_storage.c
  function hdfsThreadDestructor (line 38) | void hdfsThreadDestructor(void *v)
  type ThreadLocalState (line 65) | struct ThreadLocalState
  type ThreadLocalState (line 67) | struct ThreadLocalState
  type ThreadLocalState (line 68) | struct ThreadLocalState
  type ThreadLocalState (line 68) | struct ThreadLocalState
  function threadLocalStorageGet (line 79) | int threadLocalStorageGet(struct ThreadLocalState **state)
  function threadLocalStorageSet (line 96) | int threadLocalStorageSet(struct ThreadLocalState *state)

FILE: native/fs-hdfs/c_src/libhdfs/os/thread.h
  type thread (line 32) | typedef struct {

FILE: native/fs-hdfs/c_src/libhdfs/os/thread_local_storage.h
  type ThreadLocalState (line 54) | struct ThreadLocalState {
  type ThreadLocalState (line 76) | struct ThreadLocalState
  type ThreadLocalState (line 90) | struct ThreadLocalState
  type ThreadLocalState (line 98) | struct ThreadLocalState

FILE: native/fs-hdfs/c_src/libminidfs/native_mini_dfs.c
  type NativeMiniDfsCluster (line 44) | struct NativeMiniDfsCluster {
  function hdfsDisableDomainSocketSecurity (line 56) | static int hdfsDisableDomainSocketSecurity(void)
  function jthrowable (line 75) | static jthrowable nmdConfigureShortCircuit(JNIEnv *env,
  type NativeMiniDfsCluster (line 106) | struct NativeMiniDfsCluster
  type NativeMiniDfsConf (line 106) | struct NativeMiniDfsConf
  type NativeMiniDfsCluster (line 108) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 119) | struct NativeMiniDfsCluster
  function nmdFree (line 215) | void nmdFree(struct NativeMiniDfsCluster* cl)
  function nmdShutdownInner (line 227) | int nmdShutdownInner(struct NativeMiniDfsCluster* cl, jboolean deleteDfs...
  function nmdShutdown (line 246) | int nmdShutdown(struct NativeMiniDfsCluster *cl) {
  function nmdShutdownClean (line 250) | int nmdShutdownClean(struct NativeMiniDfsCluster *cl) {
  function nmdWaitClusterUp (line 254) | int nmdWaitClusterUp(struct NativeMiniDfsCluster *cl)
  function nmdGetNameNodePort (line 272) | int nmdGetNameNodePort(const struct NativeMiniDfsCluster *cl)
  function nmdGetNameNodeHttpAddress (line 294) | int nmdGetNameNodeHttpAddress(const struct NativeMiniDfsCluster *cl,
  type NativeMiniDfsCluster (line 360) | struct NativeMiniDfsCluster
  function nmdConfigureHdfsBuilder (line 368) | int nmdConfigureHdfsBuilder(struct NativeMiniDfsCluster *cl,

FILE: native/fs-hdfs/c_src/libminidfs/native_mini_dfs.h
  type hdfsBuilder (line 28) | struct hdfsBuilder
  type NativeMiniDfsCluster (line 32) | struct NativeMiniDfsCluster
  type NativeMiniDfsConf (line 39) | struct NativeMiniDfsConf {
  type NativeMiniDfsCluster (line 68) | struct NativeMiniDfsCluster
  type NativeMiniDfsConf (line 68) | struct NativeMiniDfsConf
  type NativeMiniDfsCluster (line 78) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 88) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 98) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 105) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 114) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 128) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 138) | struct NativeMiniDfsCluster
  type NativeMiniDfsCluster (line 147) | struct NativeMiniDfsCluster
  type hdfsBuilder (line 148) | struct hdfsBuilder

FILE: native/fs-hdfs/src/err.rs
  type HdfsErr (line 23) | pub enum HdfsErr {
  method fmt (line 36) | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {

FILE: native/fs-hdfs/src/hdfs.rs
  constant O_RDONLY (line 36) | const O_RDONLY: c_int = 0;
  constant O_WRONLY (line 37) | const O_WRONLY: c_int = 1;
  constant O_APPEND (line 38) | const O_APPEND: c_int = 8;
  function get_hdfs_by_full_path (line 46) | pub fn get_hdfs_by_full_path(path: &str) -> Result<Arc<HdfsFs>, HdfsErr> {
  function get_hdfs (line 51) | pub fn get_hdfs() -> Result<Arc<HdfsFs>, HdfsErr> {
  function unload_hdfs_cache_by_full_path (line 56) | pub fn unload_hdfs_cache_by_full_path(
  function unload_hdfs_cache (line 63) | pub fn unload_hdfs_cache(hdfs: Arc<HdfsFs>) -> Result<Option<Arc<HdfsFs>...
  type HdfsManager (line 69) | struct HdfsManager {
    method new (line 74) | fn new() -> Self {
    method get_hdfs_by_full_path (line 80) | fn get_hdfs_by_full_path(&self, path: &str) -> Result<Arc<HdfsFs>, Hdf...
    method remove_hdfs_by_full_path (line 123) | fn remove_hdfs_by_full_path(
    method remove_hdfs (line 135) | fn remove_hdfs(&self, hdfs: Arc<HdfsFs>) -> Result<Option<Arc<HdfsFs>>...
    method remove_hdfs_inner (line 139) | fn remove_hdfs_inner(&self, hdfs_key: &str) -> Result<Option<Arc<HdfsF...
  type HdfsFs (line 149) | pub struct HdfsFs {
    method url (line 164) | pub fn url(&self) -> &str {
    method raw (line 170) | pub fn raw(&self) -> hdfsFS {
    method new_hdfs_file (line 175) | fn new_hdfs_file(&self, path: &str, file: hdfsFile) -> Result<HdfsFile...
    method open (line 192) | pub fn open(&self, path: &str) -> Result<HdfsFile, HdfsErr> {
    method open_with_buf_size (line 197) | pub fn open_with_buf_size(
    method get_file_status (line 218) | pub fn get_file_status(&self, path: &str) -> Result<FileStatus, HdfsEr...
    method list_status (line 234) | pub fn list_status(&self, path: &str) -> Result<Vec<FileStatus>, HdfsE...
    method default_blocksize (line 258) | pub fn default_blocksize(&self) -> Result<usize, HdfsErr> {
    method block_size (line 271) | pub fn block_size(&self, path: &str) -> Result<usize, HdfsErr> {
    method capacity (line 287) | pub fn capacity(&self) -> Result<usize, HdfsErr> {
    method used (line 298) | pub fn used(&self) -> Result<usize, HdfsErr> {
    method exist (line 309) | pub fn exist(&self, path: &str) -> bool {
    method get_hosts (line 320) | pub fn get_hosts(
    method create (line 346) | pub fn create(&self, path: &str) -> Result<HdfsFile, HdfsErr> {
    method create_with_overwrite (line 351) | pub fn create_with_overwrite(
    method create_with_params (line 359) | pub fn create_with_params(
    method chmod (line 387) | pub fn chmod(&self, path: &str, mode: i16) -> bool {
    method chown (line 394) | pub fn chown(&self, path: &str, owner: &str, group: &str) -> bool {
    method append (line 409) | pub fn append(&self, path: &str) -> Result<HdfsFile, HdfsErr> {
    method mkdir (line 423) | pub fn mkdir(&self, path: &str) -> Result<bool, HdfsErr> {
    method rename (line 438) | pub fn rename(
    method set_replication (line 467) | pub fn set_replication(&self, path: &str, num: i16) -> Result<bool, Hd...
    method delete (line 482) | pub fn delete(&self, path: &str, recursive: bool) -> Result<bool, Hdfs...
  method fmt (line 156) | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  type HdfsFile (line 504) | pub struct HdfsFile {
    method fs (line 523) | pub fn fs(&self) -> &HdfsFs {
    method path (line 529) | pub fn path(&self) -> &str {
    method available (line 533) | pub fn available(&self) -> Result<bool, HdfsErr> {
    method close (line 545) | pub fn close(&self) -> Result<bool, HdfsErr> {
    method flush (line 557) | pub fn flush(&self) -> bool {
    method hflush (line 563) | pub fn hflush(&self) -> bool {
    method hsync (line 570) | pub fn hsync(&self) -> bool {
    method is_readable (line 575) | pub fn is_readable(&self) -> bool {
    method is_writable (line 580) | pub fn is_writable(&self) -> bool {
    method get_file_status (line 585) | pub fn get_file_status(&self) -> Result<FileStatus, HdfsErr> {
    method pos (line 590) | pub fn pos(&self) -> Result<u64, HdfsErr> {
    method read (line 604) | pub fn read(&self, buf: &mut [u8]) -> Result<i32, HdfsErr> {
    method read_with_pos (line 628) | pub fn read_with_pos(&self, pos: i64, buf: &mut [u8]) -> Result<i32, H...
    method seek (line 653) | pub fn seek(&self, offset: u64) -> bool {
    method write (line 658) | pub fn write(&self, buf: &[u8]) -> Result<i32, HdfsErr> {
  method fmt (line 512) | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  type FileStatus (line 689) | pub struct FileStatus {
    method new (line 698) | fn new(ptr: *const hdfsFileInfo) -> FileStatus {
    method from_array (line 709) | fn from_array(raw: Arc<HdfsFileInfoPtr>, idx: u32) -> FileStatus {
    method ptr (line 719) | fn ptr(&self) -> *const hdfsFileInfo {
    method name (line 725) | pub fn name(&self) -> &str {
    method is_file (line 732) | pub fn is_file(&self) -> bool {
    method is_directory (line 741) | pub fn is_directory(&self) -> bool {
    method owner (line 750) | pub fn owner(&self) -> &str {
    method group (line 757) | pub fn group(&self) -> &str {
    method permission (line 764) | pub fn permission(&self) -> i16 {
    method len (line 771) | pub fn len(&self) -> usize {
    method block_size (line 777) | pub fn block_size(&self) -> usize {
    method replica_count (line 783) | pub fn replica_count(&self) -> i16 {
    method last_modified (line 789) | pub fn last_modified(&self) -> time_t {
    method last_access (line 795) | pub fn last_access(&self) -> time_t {
  type HdfsFileInfoPtr (line 801) | struct HdfsFileInfoPtr {
    method new (line 814) | fn new(ptr: *const hdfsFileInfo) -> HdfsFileInfoPtr {
    method new_array (line 818) | pub fn new_array(ptr: *const hdfsFileInfo, len: i32) -> HdfsFileInfoPtr {
  method drop (line 808) | fn drop(&mut self) {
  type BlockHosts (line 829) | pub struct BlockHosts {
  method drop (line 834) | fn drop(&mut self) {
  constant LOCAL_FS_SCHEME (line 839) | pub const LOCAL_FS_SCHEME: &str = "file";
  constant HDFS_FS_SCHEME (line 840) | pub const HDFS_FS_SCHEME: &str = "hdfs";
  constant VIEW_FS_SCHEME (line 841) | pub const VIEW_FS_SCHEME: &str = "viewfs";
  function get_namenode_uri (line 844) | fn get_namenode_uri(path: &str) -> Result<String, HdfsErr> {
  function get_uri (line 868) | pub fn get_uri(path: &str) -> Result<String, HdfsErr> {
  function test_hdfs_default (line 891) | fn test_hdfs_default() {
  function test_hdfs (line 908) | fn test_hdfs() {
  function test_list_status_with_empty_dir (line 981) | fn test_list_status_with_empty_dir() {
  function test_write_read (line 1021) | fn test_write_read() {

FILE: native/fs-hdfs/src/minidfs.rs
  function get_dfs (line 52) | pub fn get_dfs() -> Arc<MiniDFS> {
  type MiniDFS (line 56) | pub struct MiniDFS {
    method new (line 71) | fn new() -> MiniDFS {
    method start (line 76) | fn start(conf: &MiniDfsConf) -> Option<MiniDFS> {
    method stop (line 83) | fn stop(&self) {
    method wait_for_clusterup (line 93) | fn wait_for_clusterup(&self) -> bool {
    method set_hdfs_builder (line 98) | pub fn set_hdfs_builder(&self, builder: *mut hdfsBuilder) -> bool {
    method namenode_port (line 102) | pub fn namenode_port(&self) -> Option<i32> {
    method namenode_addr (line 109) | pub fn namenode_addr(&self) -> String {
    method namenode_http_addr (line 117) | pub fn namenode_http_addr(&self) -> Option<(&str, i32)> {
    method get_hdfs (line 133) | pub fn get_hdfs(&self) -> Result<Arc<HdfsFs>, HdfsErr> {
  method drop (line 65) | fn drop(&mut self) {
  function new_mini_dfs_conf (line 138) | fn new_mini_dfs_conf() -> MiniDfsConf {

FILE: native/fs-hdfs/src/util.rs
  type HdfsUtil (line 29) | pub struct HdfsUtil;
    method copy_file_to_hdfs (line 33) | pub fn copy_file_to_hdfs(
    method copy_file_from_hdfs (line 47) | pub fn copy_file_from_hdfs(
    method mv_file_to_hdfs (line 61) | pub fn mv_file_to_hdfs(
    method mv_file_from_hdfs (line 75) | pub fn mv_file_from_hdfs(
    method copy (line 95) | pub fn copy(
    method mv (line 128) | pub fn mv(
  function test_from_local (line 164) | fn test_from_local() {
  function test_to_local (line 208) | fn test_to_local() {

FILE: native/fs-hdfs/src/walkdir/mod.rs
  type HdfsWalkDir (line 28) | pub struct HdfsWalkDir {
    method new (line 35) | pub fn new(root: String) -> Result<Self, HdfsErr> {
    method new_with_hdfs (line 40) | pub fn new_with_hdfs(root: String, hdfs: Arc<HdfsFs>) -> Self {
    method min_depth (line 56) | pub fn min_depth(mut self, depth: usize) -> Self {
    method max_depth (line 73) | pub fn max_depth(mut self, depth: usize) -> Self {
  type Item (line 83) | type Item = Result<FileStatus, HdfsErr>;
  type IntoIter (line 84) | type IntoIter = TreeIter<String, FileStatus, HdfsErr>;
  method into_iter (line 86) | fn into_iter(self) -> TreeIter<String, FileStatus, HdfsErr> {
  type HdfsTreeManager (line 97) | struct HdfsTreeManager {
    method to_value (line 102) | fn to_value(&self, v: String) -> Result<FileStatus, HdfsErr> {
    method get_children (line 106) | fn get_children(&self, n: &FileStatus) -> Result<Vec<FileStatus>, Hdfs...
    method is_leaf (line 110) | fn is_leaf(&self, n: &FileStatus) -> bool {
  function test_hdfs_file_list (line 124) | fn test_hdfs_file_list() -> Result<(), HdfsErr> {
  function set_up_hdfs_env (line 181) | fn set_up_hdfs_env() -> Result<Arc<HdfsFs>, HdfsErr> {

FILE: native/fs-hdfs/src/walkdir/tree_iter.rs
  type TreeIter (line 21) | pub struct TreeIter<V, N, E> {
  function new (line 37) | pub fn new(
  function next_item (line 51) | fn next_item(&mut self) -> Result<Option<N>, E> {
  type Item (line 116) | type Item = Result<N, E>;
  method next (line 118) | fn next(&mut self) -> Option<Self::Item> {
  type TreeNode (line 123) | pub struct TreeNode<N> {
  type IterOptions (line 128) | pub struct IterOptions {
    method fmt (line 134) | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
  type TreeManager (line 142) | pub trait TreeManager<V, N, E>: Send + Sync {
    method to_value (line 143) | fn to_value(&self, v: V) -> Result<N, E>;
    method get_children (line 145) | fn get_children(&self, n: &N) -> Result<Vec<N>, E>;
    method is_leaf (line 147) | fn is_leaf(&self, n: &N) -> bool;
  function test_tree_iter (line 158) | fn test_tree_iter() -> Result<(), Error> {
  function create_test_tree_manager (line 213) | fn create_test_tree_manager() -> TestTreeManager {
  type TestTreeManager (line 233) | struct TestTreeManager {
    method to_value (line 238) | fn to_value(&self, v: String) -> Result<String, Error> {
    method get_children (line 242) | fn get_children(&self, n: &String) -> Result<Vec<String>, Error> {
    method is_leaf (line 255) | fn is_leaf(&self, n: &String) -> bool {

FILE: native/hdfs/src/object_store/hdfs.rs
  type HadoopFileSystem (line 46) | pub struct HadoopFileSystem {
    method new (line 60) | pub fn new(full_path: &str) -> Option<Self> {
    method path_to_filesystem (line 67) | fn path_to_filesystem(location: &Path) -> String {
    method get_path_root (line 71) | pub fn get_path_root(&self) -> String {
    method get_path (line 75) | pub fn get_path(&self, full_path: &str) -> Path {
    method get_hdfs_host (line 79) | pub fn get_hdfs_host(&self) -> String {
    method read_range (line 90) | fn read_range(range: &Range<u64>, file: &HdfsFile) -> Result<Bytes> {
  method default (line 51) | fn default() -> Self {
  method fmt (line 124) | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
  method put_opts (line 131) | async fn put_opts(
  method put_multipart_opts (line 140) | async fn put_multipart_opts(
  method get_opts (line 148) | async fn get_opts(&self, location: &Path, options: GetOptions) -> Result...
  method get_ranges (line 197) | async fn get_ranges(&self, location: &Path, ranges: &[Range<u64>]) -> Re...
  method delete_stream (line 214) | fn delete_stream(
  method list (line 235) | fn list(&self, prefix: Option<&Path>) -> BoxStream<'static, Result<Objec...
  method list_with_delimiter (line 291) | async fn list_with_delimiter(&self, prefix: Option<&Path>) -> Result<Lis...
  method copy_opts (line 339) | async fn copy_opts(&self, from: &Path, to: &Path, options: CopyOptions) ...
  function get_path (line 378) | pub fn get_path(full_path: &str, prefix: &str) -> Path {
  function convert_metadata (line 384) | pub fn convert_metadata(file: FileStatus, prefix: &str) -> ObjectMeta {
  function last_modified (line 394) | fn last_modified(file: &FileStatus) -> DateTime<Utc> {
  function check_modified (line 398) | fn check_modified(
  function convert_walkdir_result (line 424) | fn convert_walkdir_result(
  constant HDFS_COALESCE_DEFAULT (line 440) | pub const HDFS_COALESCE_DEFAULT: u64 = 1024 * 1024;
  constant OBJECT_STORE_COALESCE_PARALLEL (line 443) | pub const OBJECT_STORE_COALESCE_PARALLEL: usize = 10;
  function coalesce_ranges (line 447) | pub async fn coalesce_ranges<F, Fut>(
  function maybe_spawn_blocking (line 479) | pub async fn maybe_spawn_blocking<F, T>(f: F) -> Result<T>
  function merge_ranges (line 495) | fn merge_ranges(ranges: &[Range<u64>], coalesce: u64) -> Vec<Range<u64>> {
  function to_error (line 532) | fn to_error(err: HdfsErr) -> Error {
  function test_coalesce_ranges (line 565) | async fn test_coalesce_ranges() {

FILE: native/jni-bridge/src/batch_iterator.rs
  type CometBatchIterator (line 29) | pub struct CometBatchIterator<'a> {
  constant JVM_CLASS (line 42) | pub const JVM_CLASS: &'static str = "org/apache/comet/CometBatchIterator";
  function new (line 44) | pub fn new(env: &mut Env<'a>) -> JniResult<CometBatchIterator<'a>> {

FILE: native/jni-bridge/src/comet_exec.rs
  type CometExec (line 27) | pub struct CometExec<'a> {
  constant JVM_CLASS (line 54) | pub const JVM_CLASS: &'static str = "org/apache/spark/sql/comet/CometSca...
  function new (line 56) | pub fn new(env: &mut Env<'a>) -> JniResult<CometExec<'a>> {

FILE: native/jni-bridge/src/comet_metric_node.rs
  type CometMetricNode (line 29) | pub struct CometMetricNode<'a> {
  constant JVM_CLASS (line 40) | pub const JVM_CLASS: &'static str = "org/apache/spark/sql/comet/CometMet...
  function new (line 42) | pub fn new(env: &mut Env<'a>) -> JniResult<CometMetricNode<'a>> {

FILE: native/jni-bridge/src/comet_task_memory_manager.rs
  type CometTaskMemoryManager (line 30) | pub struct CometTaskMemoryManager<'a> {
  constant JVM_CLASS (line 40) | pub const JVM_CLASS: &'static str = "org/apache/spark/CometTaskMemoryMan...
  function new (line 42) | pub fn new(env: &mut Env<'a>) -> JniResult<CometTaskMemoryManager<'a>> {

FILE: native/jni-bridge/src/errors.rs
  type ExecutionError (line 53) | pub enum ExecutionError {
    method from (line 228) | fn from(value: CometError) -> Self {
    method from (line 258) | fn from(error: prost::DecodeError) -> ExecutionError {
    method from (line 264) | fn from(error: prost::UnknownEnumValue) -> ExecutionError {
    method from (line 270) | fn from(error: ArrowError) -> ExecutionError {
    method from (line 288) | fn from(value: DataFusionError) -> Self {
  type CometError (line 80) | pub enum CometError {
    method from (line 187) | fn from(e: Box<dyn Any + Send>) -> Self {
    method from (line 201) | fn from(value: DataFusionError) -> Self {
    method to_exception (line 306) | fn to_exception(&self) -> Exception {
  function init (line 174) | pub fn init() {
  method from (line 210) | fn from(value: CometError) -> Self {
  method from (line 219) | fn from(value: CometError) -> Self {
  method from (line 282) | fn from(error: ExpressionError) -> ArrowError {
  method from (line 294) | fn from(value: ExecutionError) -> Self {
  method from (line 300) | fn from(value: ExpressionError) -> Self {
  type ExpressionError (line 346) | pub enum ExpressionError {
    method from (line 246) | fn from(error: prost::DecodeError) -> ExpressionError {
    method from (line 252) | fn from(error: prost::UnknownEnumValue) -> ExpressionError {
    method from (line 276) | fn from(error: ArrowError) -> ExpressionError {
  type CometResult (line 365) | pub type CometResult<T> = result::Result<T, CometError>;
  type JNIDefault (line 385) | pub trait JNIDefault {
    method default (line 386) | fn default() -> Self;
    method default (line 390) | fn default() -> jboolean {
    method default (line 396) | fn default() -> jbyte {
    method default (line 402) | fn default() -> jchar {
    method default (line 408) | fn default() -> jdouble {
    method default (line 414) | fn default() -> jfloat {
    method default (line 420) | fn default() -> jint {
    method default (line 426) | fn default() -> jlong {
    method default (line 433) | fn default() -> jobject {
    method default (line 439) | fn default() -> jshort {
    method default (line 445) | fn default() {}
  function unwrap_or_throw_default (line 451) | pub fn unwrap_or_throw_default<T: JNIDefault>(
  function throw_exception (line 468) | fn throw_exception(env: &mut Env, error: &CometError, backtrace: Option<...
  function throw_spark_error_as_json (line 566) | fn throw_spark_error_as_json(env: &mut Env, spark_error: &SparkError) ->...
  function try_convert_duplicate_field_error (line 583) | fn try_convert_duplicate_field_error(error_msg: &str) -> Option<SparkErr...
  type StacktraceError (line 626) | enum StacktraceError {
  function to_stacktrace_string (line 638) | fn to_stacktrace_string(msg: String, backtrace_string: String) -> Result...
  function try_unwrap_or_throw (line 674) | pub fn try_unwrap_or_throw<T, F>(env: &EnvUnowned, f: F) -> T
  function jvm (line 713) | pub fn jvm() -> &'static Arc<JavaVM> {
  function error_from_panic (line 759) | pub fn error_from_panic() {
  function object_result (line 781) | pub fn object_result() {
  function jlong_result (line 803) | pub fn jlong_result() {
  function jlong_panic_exception (line 823) | pub fn jlong_panic_exception() {
  function jlong_result_ok (line 847) | pub fn jlong_result_ok() {
  function jlong_result_err (line 868) | pub fn jlong_result_err() {
  function jint_array_result (line 893) | pub fn jint_array_result() {
  function jint_array_panic_exception (line 918) | pub fn jint_array_panic_exception() {
  function stacktrace_string (line 946) | pub fn stacktrace_string() {
  function read_resource (line 962) | fn read_resource(path: &str) -> Result<String, io::Error> {
  function Java_Errors_hello (line 976) | pub extern "system" fn Java_Errors_hello(
  function Java_Errors_div (line 996) | pub extern "system" fn Java_Errors_div(
  function Java_Errors_div_with_parse (line 1006) | pub extern "system" fn Java_Errors_div_with_parse(
  function Java_Errors_array_div (line 1023) | pub extern "system" fn Java_Errors_array_div(
  function assert_pending_java_exception_detailed (line 1043) | fn assert_pending_java_exception_detailed(
  function assert_exception_type (line 1062) | fn assert_exception_type(env: &mut Env, exception: &JThrowable, expected...
  function assert_exception_message (line 1085) | fn assert_exception_message(env: &mut Env, exception: JThrowable, expect...

FILE: native/jni-bridge/src/lib.rs
  type StringWrapper (line 129) | pub struct StringWrapper<'a> {
  function new (line 134) | pub fn new(value: JObject<'a>) -> StringWrapper<'a> {
  function get (line 138) | pub fn get(&self) -> &JObject<'_> {
  type BinaryWrapper (line 143) | pub struct BinaryWrapper<'a> {
  function new (line 148) | pub fn new(value: JObject<'a>) -> BinaryWrapper<'a> {
  function get (line 152) | pub fn get(&self) -> &JObject<'_> {
  type Error (line 158) | type Error = Error;
  function try_from (line 160) | fn try_from(value: JValueOwned<'a>) -> Result<StringWrapper<'a>, Error> {
  type Error (line 169) | type Error = Error;
  function try_from (line 171) | fn try_from(value: JValueOwned<'a>) -> Result<BinaryWrapper<'a>, Error> {
  type JVMClasses (line 193) | pub struct JVMClasses<'a> {
  function init (line 231) | pub fn init(env: &mut Env) {
  function get (line 294) | pub fn get() -> &'static JVMClasses<'static> {
  function with_env (line 303) | pub fn with_env<T, E, F>(f: F) -> Result<T, E>
  function check_exception (line 324) | pub fn check_exception(env: &mut Env) -> CometResult<Option<CometError>> {
  function get_throwable_class_name (line 342) | fn get_throwable_class_name(
  function get_throwable_message (line 373) | fn get_throwable_message(
  function convert_exception (line 420) | pub fn convert_exception(env: &mut Env, throwable: &JThrowable) -> Comet...

FILE: native/jni-bridge/src/shuffle_block_iterator.rs
  type CometShuffleBlockIterator (line 29) | pub struct CometShuffleBlockIterator<'a> {
  constant JVM_CLASS (line 40) | pub const JVM_CLASS: &'static str = "org/apache/comet/CometShuffleBlockI...
  function new (line 42) | pub fn new(env: &mut Env<'a>) -> JniResult<CometShuffleBlockIterator<'a>> {

FILE: native/proto/build.rs
  function main (line 22) | fn main() -> Result<()> {

FILE: native/shuffle/benches/row_columnar.rs
  constant BATCH_SIZE (line 32) | const BATCH_SIZE: usize = 5000;
  constant INT64_SIZE (line 35) | const INT64_SIZE: usize = 8;
  constant UNSAFE_ROW_POINTER_SIZE (line 40) | const UNSAFE_ROW_POINTER_SIZE: usize = 8;
  constant ARRAY_HEADER_SIZE (line 43) | const ARRAY_HEADER_SIZE: usize = 8;
  function write_pointer (line 48) | fn write_pointer(data: &mut [u8], pos: usize, offset: usize, size: usize) {
  function null_bitset_size (line 54) | fn null_bitset_size(n: usize) -> usize {
  function make_struct_schema (line 66) | fn make_struct_schema(depth: usize, num_leaf_fields: usize) -> ArrowData...
  function make_list_schema (line 77) | fn make_list_schema() -> ArrowDataType {
  function make_map_schema (line 81) | fn make_map_schema() -> ArrowDataType {
  function build_struct_row (line 97) | fn build_struct_row(depth: usize, num_leaf_fields: usize) -> Vec<u8> {
  function build_list_row (line 153) | fn build_list_row(num_elements: usize) -> Vec<u8> {
  function build_map_row (line 178) | fn build_map_row(num_entries: usize) -> Vec<u8> {
  f
Copy disabled (too large) Download .json
Condensed preview — 2133 files, each showing path, character count, and a content snippet. Download the .json file for the full structured content (21,613K chars).
[
  {
    "path": ".asf.yaml",
    "chars": 1759,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".claude/skills/audit-comet-expression/SKILL.md",
    "chars": 12868,
    "preview": "---\nname: audit-comet-expression\ndescription: Audit an existing Comet expression for correctness and test coverage. Stud"
  },
  {
    "path": ".claude/skills/review-comet-pr/SKILL.md",
    "chars": 12509,
    "preview": "---\nname: review-comet-pr\ndescription: Review a DataFusion Comet pull request for Spark compatibility and implementation"
  },
  {
    "path": ".dockerignore",
    "chars": 198,
    "preview": ".git\n.github\n.idea\nbin\nconf\ndocs/build\ndocs/temp\ndocs/venv\nmetastore_db\ntarget\ncommon/target\nspark-integration/target\nfu"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.yml",
    "chars": 1509,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.yml",
    "chars": 1634,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/actions/java-test/action.yaml",
    "chars": 4451,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/actions/rust-test/action.yaml",
    "chars": 2206,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/actions/setup-builder/action.yaml",
    "chars": 2039,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/actions/setup-iceberg-builder/action.yaml",
    "chars": 1408,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/actions/setup-macos-builder/action.yaml",
    "chars": 2977,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/actions/setup-spark-builder/action.yaml",
    "chars": 2397,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/dependabot.yml",
    "chars": 2078,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/pull_request_template.md",
    "chars": 1158,
    "preview": "## Which issue does this PR close?\n\n<!--\nWe generally require a GitHub issue to be filed for all bug fixes and enhanceme"
  },
  {
    "path": ".github/workflows/codeql.yml",
    "chars": 1603,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/docker-publish.yml",
    "chars": 2681,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/docs.yaml",
    "chars": 2553,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/iceberg_spark_test.yml",
    "chars": 9240,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/label_new_issues.yml",
    "chars": 1269,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/miri.yml",
    "chars": 2041,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_benchmark_check.yml",
    "chars": 2700,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_build_linux.yml",
    "chars": 21461,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_build_macos.yml",
    "chars": 10939,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_markdown_format.yml",
    "chars": 1600,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_missing_suites.yml",
    "chars": 1126,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_rat_check.yml",
    "chars": 1450,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/pr_title_check.yml",
    "chars": 1546,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/spark_sql_test.yml",
    "chars": 7781,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/spark_sql_test_native_iceberg_compat.yml",
    "chars": 3308,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/stale.yml",
    "chars": 1536,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/take.yml",
    "chars": 2495,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".github/workflows/validate_workflows.yml",
    "chars": 1342,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".gitignore",
    "chars": 338,
    "preview": "CLAUDE.md\ntarget\n.idea\n*.iml\n.vscode/\n.bloop/\n.metals/\nderby.log\nmetastore_db/\nspark-warehouse/\ndependency-reduced-pom.x"
  },
  {
    "path": ".mvn/wrapper/maven-wrapper.properties",
    "chars": 1019,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": ".scalafix.conf",
    "chars": 978,
    "preview": "// Licensed to the Apache Software Foundation (ASF) under one\n// or more contributor license agreements.  See the NOTICE"
  },
  {
    "path": "CHANGELOG.md",
    "chars": 881,
    "preview": "<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor more contributor license agreements.  See the NOTICE "
  },
  {
    "path": "LICENSE.txt",
    "chars": 11725,
    "preview": "\n                                 Apache License\n                           Version 2.0, January 2004\n                  "
  },
  {
    "path": "Makefile",
    "chars": 5707,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "NOTICE.txt",
    "chars": 638,
    "preview": "Apache DataFusion Comet\nCopyright 2024 The Apache Software Foundation\n\nThis product includes software developed at\nThe A"
  },
  {
    "path": "README.md",
    "chars": 6437,
    "preview": "<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor more contributor license agreements.  See the NOTICE "
  },
  {
    "path": "benchmarks/Dockerfile",
    "chars": 1011,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/README.md",
    "chars": 4198,
    "preview": "<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor more contributor license agreements.  See the NOTICE "
  },
  {
    "path": "benchmarks/pyspark/README.md",
    "chars": 5323,
    "preview": "<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor more contributor license agreements.  See the NOTICE "
  },
  {
    "path": "benchmarks/pyspark/benchmarks/__init__.py",
    "chars": 2291,
    "preview": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreem"
  },
  {
    "path": "benchmarks/pyspark/benchmarks/base.py",
    "chars": 3985,
    "preview": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreem"
  },
  {
    "path": "benchmarks/pyspark/benchmarks/shuffle.py",
    "chars": 4401,
    "preview": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreem"
  },
  {
    "path": "benchmarks/pyspark/generate_data.py",
    "chars": 20734,
    "preview": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreem"
  },
  {
    "path": "benchmarks/pyspark/run_all_benchmarks.sh",
    "chars": 4657,
    "preview": "#!/bin/bash\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  Se"
  },
  {
    "path": "benchmarks/pyspark/run_benchmark.py",
    "chars": 3448,
    "preview": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreem"
  },
  {
    "path": "benchmarks/tpc/.gitignore",
    "chars": 12,
    "preview": "*.json\n*.png"
  },
  {
    "path": "benchmarks/tpc/README.md",
    "chars": 19748,
    "preview": "<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor more contributor license agreements.  See the NOTICE "
  },
  {
    "path": "benchmarks/tpc/create-iceberg-tables.py",
    "chars": 5300,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/drop-caches.sh",
    "chars": 836,
    "preview": "#!/bin/bash\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  Se"
  },
  {
    "path": "benchmarks/tpc/engines/comet-hashjoin.toml",
    "chars": 1324,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/engines/comet-iceberg-hashjoin.toml",
    "chars": 1880,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/engines/comet-iceberg.toml",
    "chars": 1822,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/engines/comet.toml",
    "chars": 1266,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/engines/gluten.toml",
    "chars": 1248,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/engines/spark.toml",
    "chars": 810,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/generate-comparison.py",
    "chars": 11427,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/infra/docker/Dockerfile",
    "chars": 3169,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/infra/docker/Dockerfile.build-comet",
    "chars": 3498,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/infra/docker/docker-compose-laptop.yml",
    "chars": 4001,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/infra/docker/docker-compose.yml",
    "chars": 5219,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q1.sql",
    "chars": 840,
    "preview": "-- CometBench-DS query 1 derived from TPC-DS query 1 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q10.sql",
    "chars": 1969,
    "preview": "-- CometBench-DS query 10 derived from TPC-DS query 10 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q11.sql",
    "chars": 3045,
    "preview": "-- CometBench-DS query 11 derived from TPC-DS query 11 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q12.sql",
    "chars": 975,
    "preview": "-- CometBench-DS query 12 derived from TPC-DS query 12 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q13.sql",
    "chars": 1671,
    "preview": "-- CometBench-DS query 13 derived from TPC-DS query 13 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q14.sql",
    "chars": 7598,
    "preview": "-- CometBench-DS query 14 derived from TPC-DS query 14 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q15.sql",
    "chars": 760,
    "preview": "-- CometBench-DS query 15 derived from TPC-DS query 15 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q16.sql",
    "chars": 1198,
    "preview": "-- CometBench-DS query 16 derived from TPC-DS query 16 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q17.sql",
    "chars": 1852,
    "preview": "-- CometBench-DS query 17 derived from TPC-DS query 17 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q18.sql",
    "chars": 1455,
    "preview": "-- CometBench-DS query 18 derived from TPC-DS query 18 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q19.sql",
    "chars": 881,
    "preview": "-- CometBench-DS query 19 derived from TPC-DS query 19 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q2.sql",
    "chars": 2330,
    "preview": "-- CometBench-DS query 2 derived from TPC-DS query 2 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q20.sql",
    "chars": 993,
    "preview": "-- CometBench-DS query 20 derived from TPC-DS query 20 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q21.sql",
    "chars": 1274,
    "preview": "-- CometBench-DS query 21 derived from TPC-DS query 21 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q22.sql",
    "chars": 765,
    "preview": "-- CometBench-DS query 22 derived from TPC-DS query 22 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q23.sql",
    "chars": 4005,
    "preview": "-- CometBench-DS query 23 derived from TPC-DS query 23 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q24.sql",
    "chars": 2539,
    "preview": "-- CometBench-DS query 24 derived from TPC-DS query 24 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q25.sql",
    "chars": 1220,
    "preview": "-- CometBench-DS query 25 derived from TPC-DS query 25 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q26.sql",
    "chars": 844,
    "preview": "-- CometBench-DS query 26 derived from TPC-DS query 26 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q27.sql",
    "chars": 905,
    "preview": "-- CometBench-DS query 27 derived from TPC-DS query 27 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q28.sql",
    "chars": 2393,
    "preview": "-- CometBench-DS query 28 derived from TPC-DS query 28 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q29.sql",
    "chars": 1466,
    "preview": "-- CometBench-DS query 29 derived from TPC-DS query 29 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q3.sql",
    "chars": 678,
    "preview": "-- CometBench-DS query 3 derived from TPC-DS query 3 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q30.sql",
    "chars": 1426,
    "preview": "-- CometBench-DS query 30 derived from TPC-DS query 30 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q31.sql",
    "chars": 1909,
    "preview": "-- CometBench-DS query 31 derived from TPC-DS query 31 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q32.sql",
    "chars": 923,
    "preview": "-- CometBench-DS query 32 derived from TPC-DS query 32 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q33.sql",
    "chars": 2082,
    "preview": "-- CometBench-DS query 33 derived from TPC-DS query 33 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q34.sql",
    "chars": 1599,
    "preview": "-- CometBench-DS query 34 derived from TPC-DS query 34 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q35.sql",
    "chars": 1865,
    "preview": "-- CometBench-DS query 35 derived from TPC-DS query 35 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q36.sql",
    "chars": 1014,
    "preview": "-- CometBench-DS query 36 derived from TPC-DS query 36 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q37.sql",
    "chars": 736,
    "preview": "-- CometBench-DS query 37 derived from TPC-DS query 37 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q38.sql",
    "chars": 1122,
    "preview": "-- CometBench-DS query 38 derived from TPC-DS query 38 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q39.sql",
    "chars": 2335,
    "preview": "-- CometBench-DS query 39 derived from TPC-DS query 39 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q4.sql",
    "chars": 4433,
    "preview": "-- CometBench-DS query 4 derived from TPC-DS query 4 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q40.sql",
    "chars": 1119,
    "preview": "-- CometBench-DS query 40 derived from TPC-DS query 40 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q41.sql",
    "chars": 2122,
    "preview": "-- CometBench-DS query 41 derived from TPC-DS query 41 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q42.sql",
    "chars": 697,
    "preview": "-- CometBench-DS query 42 derived from TPC-DS query 42 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q43.sql",
    "chars": 1214,
    "preview": "-- CometBench-DS query 43 derived from TPC-DS query 43 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q44.sql",
    "chars": 1838,
    "preview": "-- CometBench-DS query 44 derived from TPC-DS query 44 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q45.sql",
    "chars": 903,
    "preview": "-- CometBench-DS query 45 derived from TPC-DS query 45 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q46.sql",
    "chars": 1488,
    "preview": "-- CometBench-DS query 46 derived from TPC-DS query 46 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q47.sql",
    "chars": 2002,
    "preview": "-- CometBench-DS query 47 derived from TPC-DS query 47 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q48.sql",
    "chars": 1426,
    "preview": "-- CometBench-DS query 48 derived from TPC-DS query 48 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q49.sql",
    "chars": 3964,
    "preview": "-- CometBench-DS query 49 derived from TPC-DS query 49 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q5.sql",
    "chars": 4161,
    "preview": "-- CometBench-DS query 5 derived from TPC-DS query 5 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q50.sql",
    "chars": 1786,
    "preview": "-- CometBench-DS query 50 derived from TPC-DS query 50 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q51.sql",
    "chars": 1865,
    "preview": "-- CometBench-DS query 51 derived from TPC-DS query 51 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q52.sql",
    "chars": 669,
    "preview": "-- CometBench-DS query 52 derived from TPC-DS query 52 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q53.sql",
    "chars": 1283,
    "preview": "-- CometBench-DS query 53 derived from TPC-DS query 53 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q54.sql",
    "chars": 1914,
    "preview": "-- CometBench-DS query 54 derived from TPC-DS query 54 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q55.sql",
    "chars": 541,
    "preview": "-- CometBench-DS query 55 derived from TPC-DS query 55 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q56.sql",
    "chars": 2066,
    "preview": "-- CometBench-DS query 56 derived from TPC-DS query 56 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q57.sql",
    "chars": 1761,
    "preview": "-- CometBench-DS query 57 derived from TPC-DS query 57 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q58.sql",
    "chars": 2548,
    "preview": "-- CometBench-DS query 58 derived from TPC-DS query 58 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q59.sql",
    "chars": 2235,
    "preview": "-- CometBench-DS query 59 derived from TPC-DS query 59 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q6.sql",
    "chars": 888,
    "preview": "-- CometBench-DS query 6 derived from TPC-DS query 6 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q60.sql",
    "chars": 2051,
    "preview": "-- CometBench-DS query 60 derived from TPC-DS query 60 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q61.sql",
    "chars": 1469,
    "preview": "-- CometBench-DS query 61 derived from TPC-DS query 61 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q62.sql",
    "chars": 1378,
    "preview": "-- CometBench-DS query 62 derived from TPC-DS query 62 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q63.sql",
    "chars": 1487,
    "preview": "-- CometBench-DS query 63 derived from TPC-DS query 63 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q64.sql",
    "chars": 3738,
    "preview": "-- CometBench-DS query 64 derived from TPC-DS query 64 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q65.sql",
    "chars": 1069,
    "preview": "-- CometBench-DS query 65 derived from TPC-DS query 65 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q66.sql",
    "chars": 7591,
    "preview": "-- CometBench-DS query 66 derived from TPC-DS query 66 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q67.sql",
    "chars": 1388,
    "preview": "-- CometBench-DS query 67 derived from TPC-DS query 67 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q68.sql",
    "chars": 1642,
    "preview": "-- CometBench-DS query 68 derived from TPC-DS query 68 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q69.sql",
    "chars": 1615,
    "preview": "-- CometBench-DS query 69 derived from TPC-DS query 69 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q7.sql",
    "chars": 835,
    "preview": "-- CometBench-DS query 7 derived from TPC-DS query 7 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q70.sql",
    "chars": 1359,
    "preview": "-- CometBench-DS query 70 derived from TPC-DS query 70 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q71.sql",
    "chars": 1764,
    "preview": "-- CometBench-DS query 71 derived from TPC-DS query 71 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q72.sql",
    "chars": 1373,
    "preview": "-- CometBench-DS query 72 derived from TPC-DS query 72 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q73.sql",
    "chars": 1403,
    "preview": "-- CometBench-DS query 73 derived from TPC-DS query 73 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q74.sql",
    "chars": 2252,
    "preview": "-- CometBench-DS query 74 derived from TPC-DS query 74 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q75.sql",
    "chars": 3306,
    "preview": "-- CometBench-DS query 75 derived from TPC-DS query 75 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q76.sql",
    "chars": 1358,
    "preview": "-- CometBench-DS query 76 derived from TPC-DS query 76 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q77.sql",
    "chars": 3384,
    "preview": "-- CometBench-DS query 77 derived from TPC-DS query 77 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q78.sql",
    "chars": 2293,
    "preview": "-- CometBench-DS query 78 derived from TPC-DS query 78 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q79.sql",
    "chars": 1130,
    "preview": "-- CometBench-DS query 79 derived from TPC-DS query 79 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q8.sql",
    "chars": 6331,
    "preview": "-- CometBench-DS query 8 derived from TPC-DS query 8 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q80.sql",
    "chars": 3152,
    "preview": "-- CometBench-DS query 80 derived from TPC-DS query 80 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q81.sql",
    "chars": 1496,
    "preview": "-- CometBench-DS query 81 derived from TPC-DS query 81 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q82.sql",
    "chars": 732,
    "preview": "-- CometBench-DS query 82 derived from TPC-DS query 82 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q83.sql",
    "chars": 1987,
    "preview": "-- CometBench-DS query 83 derived from TPC-DS query 83 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q84.sql",
    "chars": 832,
    "preview": "-- CometBench-DS query 84 derived from TPC-DS query 84 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q85.sql",
    "chars": 2254,
    "preview": "-- CometBench-DS query 85 derived from TPC-DS query 85 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q86.sql",
    "chars": 864,
    "preview": "-- CometBench-DS query 86 derived from TPC-DS query 86 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q87.sql",
    "chars": 1134,
    "preview": "-- CometBench-DS query 87 derived from TPC-DS query 87 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q88.sql",
    "chars": 5364,
    "preview": "-- CometBench-DS query 88 derived from TPC-DS query 88 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q89.sql",
    "chars": 1206,
    "preview": "-- CometBench-DS query 89 derived from TPC-DS query 89 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q9.sql",
    "chars": 2360,
    "preview": "-- CometBench-DS query 9 derived from TPC-DS query 9 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are C"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q90.sql",
    "chars": 1194,
    "preview": "-- CometBench-DS query 90 derived from TPC-DS query 90 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q91.sql",
    "chars": 1303,
    "preview": "-- CometBench-DS query 91 derived from TPC-DS query 91 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q92.sql",
    "chars": 954,
    "preview": "-- CometBench-DS query 92 derived from TPC-DS query 92 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q93.sql",
    "chars": 1013,
    "preview": "-- CometBench-DS query 93 derived from TPC-DS query 93 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q94.sql",
    "chars": 1064,
    "preview": "-- CometBench-DS query 94 derived from TPC-DS query 94 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q95.sql",
    "chars": 1259,
    "preview": "-- CometBench-DS query 95 derived from TPC-DS query 95 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q96.sql",
    "chars": 622,
    "preview": "-- CometBench-DS query 96 derived from TPC-DS query 96 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q97.sql",
    "chars": 1173,
    "preview": "-- CometBench-DS query 97 derived from TPC-DS query 97 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q98.sql",
    "chars": 961,
    "preview": "-- CometBench-DS query 98 derived from TPC-DS query 98 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpcds/q99.sql",
    "chars": 1389,
    "preview": "-- CometBench-DS query 99 derived from TPC-DS query 99 under the terms of the TPC Fair Use Policy.\n-- TPC-DS queries are"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q1.sql",
    "chars": 689,
    "preview": "-- CometBench-H query 1 derived from TPC-H query 1 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q10.sql",
    "chars": 687,
    "preview": "-- CometBench-H query 10 derived from TPC-H query 10 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q11.sql",
    "chars": 664,
    "preview": "-- CometBench-H query 11 derived from TPC-H query 11 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q12.sql",
    "chars": 748,
    "preview": "-- CometBench-H query 12 derived from TPC-H query 12 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q13.sql",
    "chars": 510,
    "preview": "-- CometBench-H query 13 derived from TPC-H query 13 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q14.sql",
    "chars": 499,
    "preview": "-- CometBench-H query 14 derived from TPC-H query 14 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q15.sql",
    "chars": 676,
    "preview": "-- CometBench-H query 15 derived from TPC-H query 15 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q16.sql",
    "chars": 635,
    "preview": "-- CometBench-H query 16 derived from TPC-H query 16 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q17.sql",
    "chars": 446,
    "preview": "-- CometBench-H query 17 derived from TPC-H query 17 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q18.sql",
    "chars": 610,
    "preview": "-- CometBench-H query 18 derived from TPC-H query 18 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q19.sql",
    "chars": 1123,
    "preview": "-- CometBench-H query 19 derived from TPC-H query 19 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q2.sql",
    "chars": 834,
    "preview": "-- CometBench-H query 2 derived from TPC-H query 2 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q20.sql",
    "chars": 747,
    "preview": "-- CometBench-H query 20 derived from TPC-H query 20 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q21.sql",
    "chars": 818,
    "preview": "-- CometBench-H query 21 derived from TPC-H query 21 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q22.sql",
    "chars": 806,
    "preview": "-- CometBench-H query 22 derived from TPC-H query 22 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Co"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q3.sql",
    "chars": 579,
    "preview": "-- CometBench-H query 3 derived from TPC-H query 3 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q4.sql",
    "chars": 515,
    "preview": "-- CometBench-H query 4 derived from TPC-H query 4 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q5.sql",
    "chars": 649,
    "preview": "-- CometBench-H query 5 derived from TPC-H query 5 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q6.sql",
    "chars": 417,
    "preview": "-- CometBench-H query 6 derived from TPC-H query 6 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q7.sql",
    "chars": 955,
    "preview": "-- CometBench-H query 7 derived from TPC-H query 7 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q8.sql",
    "chars": 938,
    "preview": "-- CometBench-H query 8 derived from TPC-H query 8 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/queries/tpch/q9.sql",
    "chars": 757,
    "preview": "-- CometBench-H query 9 derived from TPC-H query 9 under the terms of the TPC Fair Use Policy.\n-- TPC-H queries are Copy"
  },
  {
    "path": "benchmarks/tpc/run.py",
    "chars": 16470,
    "preview": "#!/usr/bin/env python3\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agre"
  },
  {
    "path": "benchmarks/tpc/tpcbench.py",
    "chars": 9172,
    "preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE f"
  },
  {
    "path": "common/pom.xml",
    "chars": 8705,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor more contribu"
  },
  {
    "path": "common/src/main/java/org/apache/arrow/c/AbstractCometSchemaImporter.java",
    "chars": 2670,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/arrow/c/ArrowImporter.java",
    "chars": 2245,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/CometNativeException.java",
    "chars": 1046,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/CometOutOfMemoryError.java",
    "chars": 1019,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/CometRuntimeException.java",
    "chars": 1128,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/CometSchemaImporter.java",
    "chars": 1190,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/IcebergApi.java",
    "chars": 1870,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/NativeBase.java",
    "chars": 10363,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/ParquetRuntimeException.java",
    "chars": 1169,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/exceptions/CometQueryExecutionException.java",
    "chars": 2243,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/parquet/AbstractColumnReader.java",
    "chars": 4517,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  },
  {
    "path": "common/src/main/java/org/apache/comet/parquet/ArrowConstantColumnReader.java",
    "chars": 9547,
    "preview": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOT"
  }
]

// ... and 1933 more files (download for full content)

About this extraction

This page contains the full source code of the apache/arrow-datafusion-comet GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 2133 files (19.8 MB), approximately 5.3M tokens, and a symbol index with 4143 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!