Copy disabled (too large)
Download .txt
Showing preview only (33,671K chars total). Download the full file to get everything.
Repository: airlift/aircompressor
Branch: master
Commit: 38c2a7d5ad6d
Files: 294
Total size: 172.2 MB
Directory structure:
gitextract_x_n9lj4e/
├── .github/
│ ├── dependabot.yml
│ ├── release.yml
│ └── workflows/
│ ├── main.yml
│ ├── release-2x.yml
│ └── release.yml
├── .gitignore
├── .mvn/
│ ├── maven.config
│ ├── settings.xml
│ └── wrapper/
│ └── maven-wrapper.properties
├── README.md
├── bin/
│ └── download.sh
├── license.txt
├── mvnw
├── notice.md
├── pom.xml
├── src/
│ ├── checkstyle/
│ │ └── checks.xml
│ ├── license/
│ │ └── LICENSE-HEADER.txt
│ ├── main/
│ │ └── java/
│ │ └── io/
│ │ └── airlift/
│ │ └── compress/
│ │ └── v3/
│ │ ├── Compressor.java
│ │ ├── Decompressor.java
│ │ ├── IncompatibleJvmException.java
│ │ ├── MalformedInputException.java
│ │ ├── bzip2/
│ │ │ ├── BZip2Codec.java
│ │ │ ├── BZip2Constants.java
│ │ │ ├── BZip2HadoopInputStream.java
│ │ │ ├── BZip2HadoopOutputStream.java
│ │ │ ├── BZip2HadoopStreams.java
│ │ │ ├── CBZip2InputStream.java
│ │ │ ├── CBZip2OutputStream.java
│ │ │ └── Crc32.java
│ │ ├── deflate/
│ │ │ ├── DeflateCompressor.java
│ │ │ ├── DeflateDecompressor.java
│ │ │ ├── DeflateJavaCompressor.java
│ │ │ ├── DeflateJavaDecompressor.java
│ │ │ ├── DeflateNative.java
│ │ │ ├── DeflateNativeCompressor.java
│ │ │ ├── DeflateNativeDecompressor.java
│ │ │ ├── JdkDeflateCodec.java
│ │ │ ├── JdkDeflateHadoopInputStream.java
│ │ │ ├── JdkDeflateHadoopOutputStream.java
│ │ │ └── JdkDeflateHadoopStreams.java
│ │ ├── gzip/
│ │ │ ├── JdkGzipCodec.java
│ │ │ ├── JdkGzipConstants.java
│ │ │ ├── JdkGzipHadoopInputStream.java
│ │ │ ├── JdkGzipHadoopOutputStream.java
│ │ │ └── JdkGzipHadoopStreams.java
│ │ ├── hadoop/
│ │ │ ├── CodecAdapter.java
│ │ │ ├── CompressionInputStreamAdapter.java
│ │ │ ├── CompressionOutputStreamAdapter.java
│ │ │ ├── HadoopInputStream.java
│ │ │ ├── HadoopOutputStream.java
│ │ │ └── HadoopStreams.java
│ │ ├── internal/
│ │ │ ├── NativeLoader.java
│ │ │ └── NativeSignature.java
│ │ ├── lz4/
│ │ │ ├── Lz4Codec.java
│ │ │ ├── Lz4Compressor.java
│ │ │ ├── Lz4Constants.java
│ │ │ ├── Lz4Decompressor.java
│ │ │ ├── Lz4HadoopInputStream.java
│ │ │ ├── Lz4HadoopOutputStream.java
│ │ │ ├── Lz4HadoopStreams.java
│ │ │ ├── Lz4JavaCompressor.java
│ │ │ ├── Lz4JavaDecompressor.java
│ │ │ ├── Lz4Native.java
│ │ │ ├── Lz4NativeCompressor.java
│ │ │ ├── Lz4NativeDecompressor.java
│ │ │ ├── Lz4RawCompressor.java
│ │ │ ├── Lz4RawDecompressor.java
│ │ │ └── UnsafeUtil.java
│ │ ├── lzo/
│ │ │ ├── LzoCodec.java
│ │ │ ├── LzoCompressor.java
│ │ │ ├── LzoConstants.java
│ │ │ ├── LzoDecompressor.java
│ │ │ ├── LzoHadoopInputStream.java
│ │ │ ├── LzoHadoopOutputStream.java
│ │ │ ├── LzoHadoopStreams.java
│ │ │ ├── LzoRawCompressor.java
│ │ │ ├── LzoRawDecompressor.java
│ │ │ ├── LzopCodec.java
│ │ │ ├── LzopHadoopInputStream.java
│ │ │ ├── LzopHadoopOutputStream.java
│ │ │ ├── LzopHadoopStreams.java
│ │ │ └── UnsafeUtil.java
│ │ ├── snappy/
│ │ │ ├── Crc32C.java
│ │ │ ├── SnappyCodec.java
│ │ │ ├── SnappyCompressor.java
│ │ │ ├── SnappyConstants.java
│ │ │ ├── SnappyDecompressor.java
│ │ │ ├── SnappyFramed.java
│ │ │ ├── SnappyFramedInputStream.java
│ │ │ ├── SnappyFramedOutputStream.java
│ │ │ ├── SnappyHadoopInputStream.java
│ │ │ ├── SnappyHadoopOutputStream.java
│ │ │ ├── SnappyHadoopStreams.java
│ │ │ ├── SnappyInternalUtils.java
│ │ │ ├── SnappyJavaCompressor.java
│ │ │ ├── SnappyJavaDecompressor.java
│ │ │ ├── SnappyNative.java
│ │ │ ├── SnappyNativeCompressor.java
│ │ │ ├── SnappyNativeDecompressor.java
│ │ │ ├── SnappyRawCompressor.java
│ │ │ ├── SnappyRawDecompressor.java
│ │ │ └── UnsafeUtil.java
│ │ ├── xxhash/
│ │ │ ├── XxHash128.java
│ │ │ ├── XxHash3Bindings.java
│ │ │ ├── XxHash3Hasher.java
│ │ │ ├── XxHash3Hasher128.java
│ │ │ ├── XxHash3Native.java
│ │ │ ├── XxHash64Bindings.java
│ │ │ ├── XxHash64Hasher.java
│ │ │ ├── XxHash64JavaHasher.java
│ │ │ └── XxHash64NativeHasher.java
│ │ └── zstd/
│ │ ├── BitInputStream.java
│ │ ├── BitOutputStream.java
│ │ ├── BlockCompressionState.java
│ │ ├── BlockCompressor.java
│ │ ├── CompressionContext.java
│ │ ├── CompressionParameters.java
│ │ ├── Constants.java
│ │ ├── DoubleFastBlockCompressor.java
│ │ ├── FiniteStateEntropy.java
│ │ ├── FrameHeader.java
│ │ ├── FseCompressionTable.java
│ │ ├── FseTableReader.java
│ │ ├── Histogram.java
│ │ ├── Huffman.java
│ │ ├── HuffmanCompressionContext.java
│ │ ├── HuffmanCompressionTable.java
│ │ ├── HuffmanCompressionTableWorkspace.java
│ │ ├── HuffmanCompressor.java
│ │ ├── HuffmanTableWriterWorkspace.java
│ │ ├── NodeTable.java
│ │ ├── RepeatedOffsets.java
│ │ ├── SequenceEncoder.java
│ │ ├── SequenceEncodingContext.java
│ │ ├── SequenceStore.java
│ │ ├── UnsafeUtil.java
│ │ ├── Util.java
│ │ ├── XxHash64.java
│ │ ├── ZstdCodec.java
│ │ ├── ZstdCompressor.java
│ │ ├── ZstdDecompressor.java
│ │ ├── ZstdFrameCompressor.java
│ │ ├── ZstdFrameDecompressor.java
│ │ ├── ZstdHadoopInputStream.java
│ │ ├── ZstdHadoopOutputStream.java
│ │ ├── ZstdHadoopStreams.java
│ │ ├── ZstdIncrementalFrameDecompressor.java
│ │ ├── ZstdInputStream.java
│ │ ├── ZstdJavaCompressor.java
│ │ ├── ZstdJavaDecompressor.java
│ │ ├── ZstdNative.java
│ │ ├── ZstdNativeCompressor.java
│ │ ├── ZstdNativeDecompressor.java
│ │ └── ZstdOutputStream.java
│ └── test/
│ ├── java/
│ │ └── io/
│ │ └── airlift/
│ │ └── compress/
│ │ └── v3/
│ │ ├── AbstractTestCompression.java
│ │ ├── HadoopCodecCompressor.java
│ │ ├── HadoopCodecDecompressor.java
│ │ ├── HadoopCodecDecompressorByteAtATime.java
│ │ ├── HadoopNative.java
│ │ ├── TestingData.java
│ │ ├── Util.java
│ │ ├── benchmark/
│ │ │ ├── Algorithm.java
│ │ │ ├── BytesCounter.java
│ │ │ ├── CompressionBenchmark.java
│ │ │ ├── DataSet.java
│ │ │ └── HashBenchmark.java
│ │ ├── bzip2/
│ │ │ ├── TestBZip2Codec.java
│ │ │ └── TestBZip2CodecByteAtATime.java
│ │ ├── deflate/
│ │ │ ├── MockJdkDeflateCompressor.java
│ │ │ ├── TestDeflate.java
│ │ │ ├── TestDeflateNative.java
│ │ │ ├── TestJdkDeflateCodec.java
│ │ │ └── TestJdkDeflateCodecByteAtATime.java
│ │ ├── gzip/
│ │ │ ├── MockJdkGzipCompressor.java
│ │ │ ├── TestJdkGzipCodec.java
│ │ │ ├── TestJdkGzipCodecByteAtATime.java
│ │ │ └── TestJdkGzipHadoopInputStream.java
│ │ ├── internal/
│ │ │ └── TestNativeLoader.java
│ │ ├── lz4/
│ │ │ ├── AbstractTestLz4.java
│ │ │ ├── BenchmarkCount.java
│ │ │ ├── TestLz4.java
│ │ │ ├── TestLz4Codec.java
│ │ │ ├── TestLz4CodecByteAtATime.java
│ │ │ ├── TestLz4Native.java
│ │ │ └── TestLz4NativeFastest.java
│ │ ├── lzo/
│ │ │ ├── TestLzo.java
│ │ │ ├── TestLzoCodec.java
│ │ │ ├── TestLzoCodecByteAtATime.java
│ │ │ ├── TestLzopCodec.java
│ │ │ └── TestLzopCodecByteAtATime.java
│ │ ├── snappy/
│ │ │ ├── AbstractTestSnappy.java
│ │ │ ├── ByteArrayOutputStream.java
│ │ │ ├── RandomGenerator.java
│ │ │ ├── TestSnappyCodec.java
│ │ │ ├── TestSnappyCodecByteAtATime.java
│ │ │ ├── TestSnappyJava.java
│ │ │ ├── TestSnappyNative.java
│ │ │ └── TestSnappyStream.java
│ │ ├── thirdparty/
│ │ │ ├── HadoopLzoCompressor.java
│ │ │ ├── HadoopLzoDecompressor.java
│ │ │ ├── JPountzLz4Compressor.java
│ │ │ ├── JPountzLz4Decompressor.java
│ │ │ ├── JdkDeflateCompressor.java
│ │ │ ├── JdkInflateDecompressor.java
│ │ │ ├── XerialSnappyCompressor.java
│ │ │ ├── XerialSnappyDecompressor.java
│ │ │ ├── ZstdJniCompressor.java
│ │ │ └── ZstdJniDecompressor.java
│ │ ├── xxhash/
│ │ │ ├── AbstractTestXxHash64.java
│ │ │ ├── TestXxHash3.java
│ │ │ ├── TestXxHash64.java
│ │ │ └── TestXxHash64Java.java
│ │ └── zstd/
│ │ ├── AbstractTestZstd.java
│ │ ├── TestCompressor.java
│ │ ├── TestUtil.java
│ │ ├── TestXxHash64.java
│ │ ├── TestZstd.java
│ │ ├── TestZstdCodec.java
│ │ ├── TestZstdCodecByteAtATime.java
│ │ ├── TestZstdFast.java
│ │ ├── TestZstdHigh.java
│ │ ├── TestZstdNative.java
│ │ ├── TestZstdPartial.java
│ │ ├── TestZstdStream.java
│ │ ├── ZstdPartialDecompressor.java
│ │ ├── ZstdStreamCompressor.java
│ │ └── ZstdStreamDecompressor.java
│ └── resources/
│ └── data/
│ ├── lzo/
│ │ ├── test
│ │ ├── test-adler32-both.lzo
│ │ ├── test-adler32.lzo
│ │ ├── test-crc32-both.lzo
│ │ ├── test-crc32.lzo
│ │ └── test-no-checksum.lzo
│ └── zstd/
│ ├── bad-second-frame.zst
│ ├── incompressible
│ ├── large-rle
│ ├── multiple-frames
│ ├── multiple-frames.zst
│ ├── offset-before-start.zst
│ ├── small-literals-after-incompressible-literals
│ ├── with-checksum
│ └── with-checksum.zst
└── testdata/
├── artificial/
│ ├── a.txt
│ ├── aaa.txt
│ ├── alphabet.txt
│ └── random.txt
├── calgary/
│ ├── bib
│ ├── book1
│ ├── book2
│ ├── geo
│ ├── news
│ ├── obj1
│ ├── obj2
│ ├── paper1
│ ├── paper2
│ ├── paper3
│ ├── paper4
│ ├── paper5
│ ├── paper6
│ ├── pic
│ ├── progc
│ ├── progl
│ ├── progp
│ └── trans
├── canterbury/
│ ├── alice29.txt
│ ├── asyoulik.txt
│ ├── cp.html
│ ├── fields.c
│ ├── grammar.lsp
│ ├── kennedy.xls
│ ├── lcet10.txt
│ ├── plrabn12.txt
│ ├── ptt5
│ ├── sum
│ └── xargs.1
├── geo.protodata
├── html
├── kppkn.gtb
├── large/
│ ├── E.coli
│ ├── bible.txt
│ └── world192.txt
├── silesia/
│ ├── dickens
│ ├── mozilla
│ ├── mr
│ ├── nci
│ ├── ooffice
│ ├── osdb
│ ├── reymont
│ ├── samba
│ ├── sao
│ ├── webster
│ ├── x-ray
│ └── xml
└── urls.10K
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/dependabot.yml
================================================
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "maven"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 5
================================================
FILE: .github/release.yml
================================================
changelog:
exclude:
labels:
- ignore-for-release
categories:
- title: Breaking Changes 🛠
labels:
- breaking
- title: Bug Fixes 🐛
labels:
- bug
- title: Improvements 🎉
labels:
- improvement
- cleanup
- title: Dependency updates 📦
labels:
- dependency
- title: Security fixes 🔒
labels:
- security
- title: Others
labels:
- "*"
================================================
FILE: .github/workflows/main.yml
================================================
name: ci
on:
- push
- pull_request
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
java-version:
- 25
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: ${{ matrix.java-version }}
- name: Maven Install
run: ./mvnw install -B -V -DskipTests -Dair.check.skip-all
- name: Maven Tests
run: ./mvnw install -B -P ci
================================================
FILE: .github/workflows/release-2x.yml
================================================
name: Release new 2.x version
on:
workflow_dispatch:
jobs:
release:
runs-on: ubuntu-latest
permissions:
contents: write
packages: write
steps:
- name: Check if release is running from master
run: |
if [ "${GITHUB_REF}" != "refs/heads/master" ]; then
echo "Release is only allowed from master"
exit 1
fi
- name: Checkout code for release-2.x
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: release-2.x
- name: Install java
uses: actions/setup-java@v4
with:
java-version: '21'
distribution: 'temurin'
gpg-private-key: ${{ secrets.JRELEASER_GPG_SECRET_KEY }}
gpg-passphrase: MAVEN_GPG_PASSPHRASE
cache: 'maven'
- name: Configure git
run: |
git config user.name "Airlift Release"
git config user.email "airlift-bot@airlift.io"
- name: Lock branch before release
uses: github/lock@v2
id: release-lock
with:
mode: 'lock'
- name: Run ./mvnw release:prepare
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
run: |
./mvnw -B release:prepare
- name: Determine release version
run: |
export VERSION=$(grep 'scm.tag=' release.properties | cut -d'=' -f2)
echo "VERSION=${VERSION}" >> $GITHUB_ENV
echo "Releasing version: ${VERSION}"
- name: Run ./mvnw release:perform to local staging
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
run: |
./mvnw -B release:perform
- name: Display git status and history, checkout release tag
run: |
git status
git log --oneline -n 2
- name: Run njord:validate
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
MAVENCENTRAL_USERNAME: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_USERNAME }}
MAVENCENTRAL_PASSWORD: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_TOKEN }}
run: |
./mvnw njord:list
./mvnw njord:status
./mvnw njord:validate
- name: Run njord:publish
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
MAVENCENTRAL_USERNAME: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_USERNAME }}
MAVENCENTRAL_PASSWORD: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_TOKEN }}
run:
./mvnw njord:publish
- name: Push git changes
run: |
git status
git describe
git push origin release-2.x
git push origin --tags
- name: Unlock branch after a release
uses: github/lock@v2
id: release-unlock
with:
mode: 'unlock'
- name: Create release notes
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
gh release create "${VERSION}" \
--repo="$GITHUB_REPOSITORY" \
--title="${GITHUB_REPOSITORY#*/} ${VERSION}" \
--generate-notes
================================================
FILE: .github/workflows/release.yml
================================================
name: Release new version
on:
workflow_dispatch:
jobs:
release:
runs-on: ubuntu-latest
permissions:
contents: write
packages: write
steps:
- name: Check if release is running from master
run: |
if [ "${GITHUB_REF}" != "refs/heads/master" ]; then
echo "Release is only allowed from master branch"
exit 1
fi
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install java
uses: actions/setup-java@v4
with:
java-version: '25'
distribution: 'temurin'
gpg-private-key: ${{ secrets.JRELEASER_GPG_SECRET_KEY }}
gpg-passphrase: MAVEN_GPG_PASSPHRASE
cache: 'maven'
- name: Configure git
run: |
git config user.name "Airlift Release"
git config user.email "airlift-bot@airlift.io"
- name: Lock branch before release
uses: github/lock@v2
id: release-lock
with:
mode: 'lock'
- name: Run mvn release:prepare
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
run: |
./mvnw -B release:prepare -Poss-release
- name: Determine release version
run: |
export VERSION=$(grep 'scm.tag=' release.properties | cut -d'=' -f2)
echo "VERSION=${VERSION}" >> $GITHUB_ENV
echo "Releasing version: ${VERSION}"
- name: Run mvn release:perform to local staging
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
run: |
./mvnw -B release:perform -Poss-release
- name: Display git status and history
run: |
git status
git log --oneline -n 2
- name: Run njord:validate
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
MAVENCENTRAL_USERNAME: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_USERNAME }}
MAVENCENTRAL_PASSWORD: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_TOKEN }}
run: |
./mvnw njord:list
./mvnw njord:status
./mvnw njord:validate
- name: Run njord:publish
env:
MAVEN_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }}
MAVENCENTRAL_USERNAME: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_USERNAME }}
MAVENCENTRAL_PASSWORD: ${{ secrets.JRELEASER_NEXUS2_MAVEN_CENTRAL_TOKEN }}
run:
./mvnw njord:publish -Poss-release
- name: Push git changes
run: |
git status
git describe
git push origin master
git push origin --tags
- name: Unlock branch after a release
uses: github/lock@v2
id: release-unlock
with:
mode: 'unlock'
- name: Create release notes
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
gh release create "${VERSION}" \
--repo="$GITHUB_REPOSITORY" \
--title="${GITHUB_REPOSITORY#*/} ${VERSION}" \
--generate-notes
================================================
FILE: .gitignore
================================================
target/
/var
pom.xml.versionsBackup
test-output/
/atlassian-ide-plugin.x
.idea
.*.swp
.*.swo
*~
*.swp
.idea
.idea/*
*.iml
*.ipr
*.iws
.DS_Store
.scala_dependencies
.project
.classpath
.settings
eclipse-classes
snappy-cc
================================================
FILE: .mvn/maven.config
================================================
-s
${session.rootDirectory}/.mvn/settings.xml
================================================
FILE: .mvn/settings.xml
================================================
<settings>
<pluginGroups>
<pluginGroup>eu.maveniverse.maven.plugins</pluginGroup>
</pluginGroups>
<servers>
<server>
<id>sonatype-central-portal</id>
<username>${env.MAVENCENTRAL_USERNAME}</username>
<password>${env.MAVENCENTRAL_PASSWORD}</password>
<configuration>
<njord.publisher>sonatype-cp</njord.publisher>
<njord.releaseUrl>njord:template:release-sca</njord.releaseUrl>
</configuration>
</server>
</servers>
</settings>
================================================
FILE: .mvn/wrapper/maven-wrapper.properties
================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
wrapperVersion=3.3.4
distributionType=only-script
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.12/apache-maven-3.9.12-bin.zip
================================================
FILE: README.md
================================================
# Compression for Java
[](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22io.airlift%22%20AND%20a%3A%22aircompressor-v3%22)
This library provides a set of compression algorithms implemented in pure Java and
where possible native implementations. The Java implementations use `sun.misc.Unsafe`
to provide fast access to memory. The native implementations use `java.lang.foreign`
to interact directly with native libraries without the need for JNI.
# Usage
Each algorithm provides a simple block compression API using the `io.airlift.compress.v3.Compressor`
and `io.airlift.compress.v3.Decompressor` classes. Block compression is the simplest form of
which simply compresses a small block of data provided as a `byte[]`, or more generally a
`java.lang.foreign.MemorySegment`. Each algorithm may have one or more streaming format
which typically produces a sequence of block compressed chunks.
## byte array API
```java
byte[] data = ...
Compressor compressor = new Lz4JavaCompressor();
byte[] compressed = new byte[compressor.maxCompressedLength(data.length)];
int compressedSize = compressor.compress(data, 0, data.length, compressed, 0, compressed.length);
Decompressor decompressor = new Lz4JavaDecompressor();
byte[] uncompressed = new byte[data.length];
int uncompressedSize = decompressor.decompress(compressed, 0, compressedSize, uncompressed, 0, uncompressed.length);
```
## MemorySegment API
```java
Arena arena = ...
MemorySegment data = ...
Compressor compressor = new Lz4JavaCompressor();
MemorySegment compressed = arena.allocate(compressor.maxCompressedLength(toIntExact(data.byteSize())));
int compressedSize = compressor.compress(data, compressed);
compressed = compressed.asSlice(0, compressedSize);
Decompressor decompressor = new Lz4JavaDecompressor();
MemorySegment uncompressed = arena.allocate(data.byteSize());
int uncompressedSize = decompressor.decompress(compressed, uncompressed);
uncompressed = uncompressed.asSlice(0, uncompressedSize);
```
# Algorithms
## [Zstandard (Zstd)](https://facebook.github.io/zstd) **(Recommended)**
Zstandard is the recommended algorithm for most compression. It provides
superior compression and performance at all levels compared to zlib. Zstandard is
an excellent choice for most use cases, especially storage and bandwidth constrained
network transfer.
The native implementation of Zstandard is provided by the `ZstdNativeCompressor` and
`ZstdNativeDecompressor` classes. The Java implementation is provided by the
`ZstdJavaCompressor` and `ZstdJavaDecompressor` classes.
The Zstandard streaming format is supported by `ZstdInputStream` and `ZstdOutputStream`.
## [LZ4](https://www.lz4.org/)
LZ4 is an extremely fast compression algorithm that provides compression ratios comparable
to Snappy and LZO. LZ4 is an excellent choice for applications that require high-performance
compression and decompression.
The native implementation of LZ4 is provided by `Lz4NativeCompressor` and `Lz4NativeDecompressor`.
The Java implementation is provided by `Lz4JavaCompressor` and `Lz4JavaDecompressor`.
## [Snappy](https://google.github.io/snappy/)
Snappy is not as fast as LZ4, but provides a guarantee on memory usage that makes it a good
choice for extremely resource-limited environments (e.g. embedded systems like a network
switch). If your application is not highly resource constrained, LZ4 is a better choice.
The native implementation of Snappy is provided by `SnappyNativeCompressor` and `SnappyNativeDecompressor`.
The Java implementation is provided by `SnappyJavaCompressor` and `SnappyJavaDecompressor`.
The Snappy framed format is supported by `SnappyFramedInputStream` and `SnappyFramedOutputStream`.
## [LZO](https://www.oberhumer.com/opensource/lzo/)
LZO is only provided for compatibility with existing systems that use LZO. We recommend
rewriting LZO data using Zstandard or LZ4.
The Java implementation of LZO is provided by `LzoJavaCompressor` and `LzoJavaDecompressor`.
Due to licensing issues, the LZO only has a Java implementation which is based on LZ4.
## Deflate
Deflate is the block compression algorithm used by the `gzip` and `zlib` libraries. Deflate is
provided for compatibility with existing systems that use Deflate. We recommend rewriting
Deflate data using Zstandard which provides superior compression and performance.
The implementation of Deflate is provided by `DeflateCompressor` and `DeflateDecompressor`.
This is implemented in the built-in Java libraries which internally use the native code.
# Hash Functions
## [XXHash3](https://xxhash.com/) **(Recommended)**
XXHash3 is the latest generation of the XXHash family, providing faster hashing than XXHash64
at all input sizes. It supports both 64-bit and 128-bit hash outputs.
XXHash3 is only available as a native implementation via `XxHash3Native`. There is no Java
implementation available. The 128-bit variant has approximately 12ns of constant overhead due
to Java FFM pulling the 128-bit result back into Java. At small inputs (<512 bytes) this
overhead is noticeable, but at larger sizes (8KB+) it becomes a rounding error as hash
computation dominates (measured on M4 Apple Silicon).
```java
// One-shot hashing (64-bit)
long hash = XxHash3Native.hash(data);
// One-shot hashing (128-bit)
XxHash128 hash = XxHash3Native.hash128(data);
// Streaming hashing (64-bit)
try (XxHash3Hasher hasher = XxHash3Native.newHasher()) {
hasher.update(chunk1);
hasher.update(chunk2);
long hash = hasher.digest();
}
// Streaming hashing (128-bit)
try (XxHash3Hasher128 hasher = XxHash3Native.newHasher128()) {
hasher.update(chunk1);
hasher.update(chunk2);
XxHash128 hash = hasher.digest();
}
```
## [XXHash64](https://xxhash.com/)
XXHash64 is an extremely fast non-cryptographic hash function with excellent distribution properties.
The native implementation is provided by `XxHash64NativeHasher` and the Java implementation
is provided by `XxHash64JavaHasher`. The `XxHash64Hasher` interface provides static methods
that automatically select the best available implementation.
```java
// One-shot hashing
long hash = XxHash64Hasher.hash(data);
long hash = XxHash64Hasher.hash(data, seed);
// Streaming hashing
try (XxHash64Hasher hasher = XxHash64Hasher.create()) {
hasher.update(chunk1);
hasher.update(chunk2);
long hash = hasher.digest();
}
```
# Hadoop Compression
In addition to the raw block encoders, there are implementations of the
Hadoop streams for the above algorithms. In addition, implementations of
gzip and bzip2 are provided so that all standard Hadoop algorithms are available.
The `HadoopStreams` class provides a factory for creating `InputStream` and `OutputStream`
implementations without the need for any Hadoop dependencies. For environments
that have Hadoop dependencies, each algorithm also provides a `CompressionCodec` class.
# Requirements
This library requires a Java 22+ virtual machine containing the `sun.misc.Unsafe` interface running on a little endian platform.
# Configuration
Temporary directory used to unpack and load native libraries can be configured using the `aircompressor.tmpdir` system property,
with a default value of `java.io.tmpdir`. This is useful when the default temporary directory is mounted as `noexec`.
Loading of native libraries can be disabled entirely by setting the `io.airlift.compress.v3.disable-native` system property.
# Users
This library is used in projects such as Trino (https://trino.io), a distributed SQL engine.
================================================
FILE: bin/download.sh
================================================
#!/bin/bash
set -euo pipefail
RESOURCES=$(dirname $0)/../src/main/resources/aircompressor
download_linux()
{
echo "Download $3 ..."
OUT="$RESOURCES/$3"
if [ -f "$OUT" ]; then
echo "=> skipped"
return
fi
TEMP=$(mktemp)
curl -sSL "$1" | tar -xO data.tar.xz | tar -xO ".$2" > "$TEMP"
mv -f "$TEMP" "$OUT"
chmod 644 "$OUT"
echo "=> downloaded"
}
download_macos()
{
echo "Download $5 ..."
OUT="$RESOURCES/$5"
if [ -f "$OUT" ]; then
echo "=> skipped"
return
fi
DIGEST=$(curl -sS -L \
-H 'Authorization: Bearer QQ==' \
-H 'Accept: application/vnd.oci.image.index.v1+json' \
"https://ghcr.io/v2/homebrew/core/$1/manifests/$2" \
| jq -r "
.manifests |
sort_by(.platform[\"os.version\"]) |
.[] |
select(.platform.os == \"darwin\") |
select(.platform.architecture == \"$4\") |
.annotations[\"sh.brew.bottle.digest\"]" \
| head -n1)
TEMP=$(mktemp)
curl -sS -L \
-H 'Authorization: Bearer QQ==' \
"https://ghcr.io/v2/homebrew/core/$1/blobs/sha256:$DIGEST" | \
tar -xO "$1/$2/lib/$3" > "$TEMP"
mv -f "$TEMP" "$OUT"
chmod 644 "$OUT"
echo "=> downloaded"
}
# Snappy
download_linux \
"https://deb.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.2.1-1+b1_amd64.deb" \
"/usr/lib/x86_64-linux-gnu/libsnappy.so.1.2.1" \
"linux-amd64/libsnappy.so"
download_linux \
"https://deb.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.2.1-1+b1_arm64.deb" \
"/usr/lib/aarch64-linux-gnu/libsnappy.so.1.2.1" \
"linux-aarch64/libsnappy.so"
download_linux \
"https://deb.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.2.1-1+b1_ppc64el.deb" \
"/usr/lib/powerpc64le-linux-gnu/libsnappy.so.1.2.1" \
"linux-ppc64le/libsnappy.so"
download_macos \
snappy 1.1.10 libsnappy.1.1.10.dylib amd64 macos-amd64/libsnappy.dylib
download_macos \
snappy 1.1.10 libsnappy.1.1.10.dylib arm64 macos-aarch64/libsnappy.dylib
# Zstandard
download_linux \
"https://deb.debian.org/debian/pool/main/libz/libzstd/libzstd1_1.5.6+dfsg-1_amd64.deb" \
"/usr/lib/x86_64-linux-gnu/libzstd.so.1.5.6" \
"linux-amd64/libzstd.so"
download_linux \
"https://deb.debian.org/debian/pool/main/libz/libzstd/libzstd1_1.5.6+dfsg-1_arm64.deb" \
"/usr/lib/aarch64-linux-gnu/libzstd.so.1.5.6" \
"linux-aarch64/libzstd.so"
download_linux \
"https://deb.debian.org/debian/pool/main/libz/libzstd/libzstd1_1.5.6+dfsg-1_ppc64el.deb" \
"/usr/lib/powerpc64le-linux-gnu/libzstd.so.1.5.6" \
"linux-ppc64le/libzstd.so"
download_macos \
zstd 1.5.6 libzstd.1.5.6.dylib amd64 macos-amd64/libzstd.dylib
download_macos \
zstd 1.5.6 libzstd.1.5.6.dylib arm64 macos-aarch64/libzstd.dylib
# LZ4
download_linux \
"https://deb.debian.org/debian/pool/main/l/lz4/liblz4-1_1.10.0-1_amd64.deb" \
"/usr/lib/x86_64-linux-gnu/liblz4.so.1.10.0" \
"linux-amd64/liblz4.so"
download_linux \
"https://deb.debian.org/debian/pool/main/l/lz4/liblz4-1_1.10.0-1_arm64.deb" \
"/usr/lib/aarch64-linux-gnu/liblz4.so.1.10.0" \
"linux-aarch64/liblz4.so"
download_linux \
"https://deb.debian.org/debian/pool/main/l/lz4/liblz4-1_1.10.0-1_ppc64el.deb" \
"/usr/lib/powerpc64le-linux-gnu/liblz4.so.1.10.0" \
"linux-ppc64le/liblz4.so"
download_macos \
lz4 1.10.0 liblz4.1.10.0.dylib amd64 macos-amd64/liblz4.dylib
download_macos \
lz4 1.10.0 liblz4.1.10.0.dylib arm64 macos-aarch64/liblz4.dylib
# bzip2
download_linux \
"https://deb.debian.org/debian/pool/main/b/bzip2/libbz2-1.0_1.0.8-6_amd64.deb" \
"/usr/lib/x86_64-linux-gnu/libbz2.so.1.0.4" \
"linux-amd64/libbz2.so"
download_linux \
"https://deb.debian.org/debian/pool/main/b/bzip2/libbz2-1.0_1.0.8-6_arm64.deb" \
"/usr/lib/aarch64-linux-gnu/libbz2.so.1.0.4" \
"linux-aarch64/libbz2.so"
download_linux \
"https://deb.debian.org/debian/pool/main/b/bzip2/libbz2-1.0_1.0.8-6_ppc64el.deb" \
"/usr/lib/powerpc64le-linux-gnu/libbz2.so.1.0.4" \
"linux-ppc64le/libbz2.so"
# XXHash
download_linux \
"https://deb.debian.org/debian/pool/main/x/xxhash/libxxhash0_0.8.3-2_amd64.deb" \
"/usr/lib/x86_64-linux-gnu/libxxhash.so.0.8.3" \
"linux-amd64/libxxhash.so"
download_linux \
"https://deb.debian.org/debian/pool/main/x/xxhash/libxxhash0_0.8.3-2_arm64.deb" \
"/usr/lib/aarch64-linux-gnu/libxxhash.so.0.8.3" \
"linux-aarch64/libxxhash.so"
download_linux \
"https://deb.debian.org/debian/pool/main/x/xxhash/libxxhash0_0.8.3-2_ppc64el.deb" \
"/usr/lib/powerpc64le-linux-gnu/libxxhash.so.0.8.3" \
"linux-ppc64le/libxxhash.so"
download_macos \
xxhash 0.8.3 libxxhash.0.8.3.dylib amd64 macos-amd64/libxxhash.dylib
download_macos \
xxhash 0.8.3 libxxhash.0.8.3.dylib arm64 macos-aarch64/libxxhash.dylib
# libdeflate
download_linux \
"https://deb.debian.org/debian/pool/main/libd/libdeflate/libdeflate0_1.23-2_amd64.deb" \
"/usr/lib/x86_64-linux-gnu/libdeflate.so.0" \
"linux-amd64/libdeflate.so"
download_linux \
"https://deb.debian.org/debian/pool/main/libd/libdeflate/libdeflate0_1.23-2_arm64.deb" \
"/usr/lib/aarch64-linux-gnu/libdeflate.so.0" \
"linux-aarch64/libdeflate.so"
download_linux \
"https://deb.debian.org/debian/pool/main/libd/libdeflate/libdeflate0_1.23-2+b1_ppc64el.deb" \
"/usr/lib/powerpc64le-linux-gnu/libdeflate.so.0" \
"linux-ppc64le/libdeflate.so"
download_macos \
libdeflate 1.23 libdeflate.0.dylib amd64 macos-amd64/libdeflate.dylib
download_macos \
libdeflate 1.23 libdeflate.0.dylib arm64 macos-aarch64/libdeflate.dylib
================================================
FILE: license.txt
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: mvnw
================================================
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Apache Maven Wrapper startup batch script, version 3.3.2
#
# Optional ENV vars
# -----------------
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
# MVNW_REPOURL - repo url base for downloading maven distribution
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
# ----------------------------------------------------------------------------
set -euf
[ "${MVNW_VERBOSE-}" != debug ] || set -x
# OS specific support.
native_path() { printf %s\\n "$1"; }
case "$(uname)" in
CYGWIN* | MINGW*)
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
native_path() { cygpath --path --windows "$1"; }
;;
esac
# set JAVACMD and JAVACCMD
set_java_home() {
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
if [ -n "${JAVA_HOME-}" ]; then
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
JAVACCMD="$JAVA_HOME/jre/sh/javac"
else
JAVACMD="$JAVA_HOME/bin/java"
JAVACCMD="$JAVA_HOME/bin/javac"
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
return 1
fi
fi
else
JAVACMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v java
)" || :
JAVACCMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v javac
)" || :
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
return 1
fi
fi
}
# hash string like Java String::hashCode
hash_string() {
str="${1:-}" h=0
while [ -n "$str" ]; do
char="${str%"${str#?}"}"
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
str="${str#?}"
done
printf %x\\n $h
}
verbose() { :; }
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
die() {
printf %s\\n "$1" >&2
exit 1
}
trim() {
# MWRAPPER-139:
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
# Needed for removing poorly interpreted newline sequences when running in more
# exotic environments such as mingw bash on Windows.
printf "%s" "${1}" | tr -d '[:space:]'
}
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do
case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in
maven-mvnd-*bin.*)
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
*)
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
distributionPlatform=linux-amd64
;;
esac
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
distributionUrlName="${distributionUrl##*/}"
distributionUrlNameMain="${distributionUrlName%.*}"
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
exec_maven() {
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
}
if [ -d "$MAVEN_HOME" ]; then
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
exec_maven "$@"
fi
case "${distributionUrl-}" in
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
esac
# prepare tmp dir
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
trap clean HUP INT TERM EXIT
else
die "cannot create temp dir"
fi
mkdir -p -- "${MAVEN_HOME%/*}"
# Download and Install Apache Maven
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
verbose "Downloading from: $distributionUrl"
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
# select .zip or .tar.gz
if ! command -v unzip >/dev/null; then
distributionUrl="${distributionUrl%.zip}.tar.gz"
distributionUrlName="${distributionUrl##*/}"
fi
# verbose opt
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
# normalize http auth
case "${MVNW_PASSWORD:+has-password}" in
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
esac
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
verbose "Found wget ... using wget"
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
verbose "Found curl ... using curl"
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
elif set_java_home; then
verbose "Falling back to use Java to download"
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
cat >"$javaSource" <<-END
public class Downloader extends java.net.Authenticator
{
protected java.net.PasswordAuthentication getPasswordAuthentication()
{
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
}
public static void main( String[] args ) throws Exception
{
setDefault( new Downloader() );
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
}
}
END
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
verbose " - Compiling Downloader.java ..."
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
verbose " - Running Downloader.java ..."
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
fi
# If specified, validate the SHA-256 sum of the Maven distribution zip file
if [ -n "${distributionSha256Sum-}" ]; then
distributionSha256Result=false
if [ "$MVN_CMD" = mvnd.sh ]; then
echo "Checksum validation is not supported for maven-mvnd." >&2
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
elif command -v sha256sum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
elif command -v shasum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
else
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
fi
if [ $distributionSha256Result = false ]; then
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
exit 1
fi
fi
# unzip and move
if command -v unzip >/dev/null; then
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || :
exec_maven "$@"
================================================
FILE: notice.md
================================================
Snappy Copyright Notices
=========================
* Copyright 2011 Dain Sundstrom <dain@iq80.com>
* Copyright 2011, Google Inc.<opensource@google.com>
Snappy License
===============
Copyright 2011, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
================================================
FILE: pom.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.airlift</groupId>
<artifactId>airbase</artifactId>
<version>349</version>
</parent>
<groupId>io.airlift</groupId>
<artifactId>aircompressor-v3</artifactId>
<version>3.7-SNAPSHOT</version>
<packaging>jar</packaging>
<name>aircompressor</name>
<description>Compression algorithms</description>
<url>https://github.com/airlift/aircompressor</url>
<inceptionYear>2011</inceptionYear>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>https://www.apache.org/licenses/LICENSE-2.0.html</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:git://github.com/airlift/aircompressor.git</connection>
<developerConnection>scm:git:git@github.com:airlift/aircompressor.git</developerConnection>
<tag>HEAD</tag>
<url>https://github.com/airlift/aircompressor/tree/master</url>
</scm>
<distributionManagement>
<repository>
<id>sonatype-central-portal</id>
<name>Sonatype Central Portal</name>
<url>https://central.sonatype.com/repository/maven/</url>
</repository>
<snapshotRepository>
<id>sonatype-central-portal</id>
<name>Sonatype Central Portal</name>
<url>https://central.sonatype.com/repository/maven-snapshots/</url>
</snapshotRepository>
</distributionManagement>
<properties>
<air.java.version>25</air.java.version>
<project.build.targetJdk>25</project.build.targetJdk>
<air.check.skip-extended>true</air.check.skip-extended>
<air.check.skip-license>false</air.check.skip-license>
<air.check.fail-checkstyle>true</air.check.fail-checkstyle>
<air.check.skip-checkstyle>false</air.check.skip-checkstyle>
<air.test.parallel>methods</air.test.parallel>
<air.test.thread-count>4</air.test.thread-count>
<air.test.jvmsize>2G</air.test.jvmsize>
<!-- do not run tests on release -->
<air.release.preparation-goals>clean verify -DskipTests</air.release.preparation-goals>
</properties>
<dependencies>
<dependency>
<groupId>io.trino.hadoop</groupId>
<artifactId>hadoop-apache</artifactId>
<version>3.3.5-3</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.github.luben</groupId>
<artifactId>zstd-jni</artifactId>
<version>1.5.6-6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
<classifier>classes</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.anarres.lzo</groupId>
<artifactId>lzo-hadoop</artifactId>
<version>1.0.6</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
<version>1.8.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.10.7</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<target>${project.build.targetJdk}</target>
<release combine.self="override" />
<annotationProcessorPaths>
<path>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<version>${dep.jmh.version}</version>
</path>
</annotationProcessorPaths>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>
================================================
FILE: src/checkstyle/checks.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
<module name="Checker">
<module name="FileTabCharacter"/>
<module name="NewlineAtEndOfFile">
<property name="lineSeparator" value="lf"/>
</module>
<module name="RegexpMultiline">
<property name="format" value="\r"/>
<property name="message" value="Line contains carriage return"/>
</module>
<module name="RegexpMultiline">
<property name="format" value=" \n"/>
<property name="message" value="Line has trailing whitespace"/>
</module>
<module name="RegexpMultiline">
<property name="format" value="\{\n\n"/>
<property name="message" value="Blank line after opening brace"/>
</module>
<module name="RegexpMultiline">
<property name="format" value="\n\n\}"/>
<property name="message" value="Blank line before closing brace"/>
</module>
<module name="RegexpMultiline">
<property name="format" value="\n\n\n"/>
<property name="message" value="Multiple consecutive blank lines"/>
</module>
<module name="RegexpMultiline">
<property name="format" value="\n\n\Z"/>
<property name="message" value="Blank line before end of file"/>
</module>
<module name="TreeWalker">
<module name="EmptyBlock">
<property name="option" value="text"/>
<property name="tokens" value="
LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY, LITERAL_IF,
LITERAL_FOR, LITERAL_TRY, LITERAL_WHILE, INSTANCE_INIT, STATIC_INIT"/>
</module>
<module name="EmptyStatement"/>
<module name="EmptyForInitializerPad"/>
<module name="EmptyForIteratorPad">
<property name="option" value="space"/>
</module>
<module name="MethodParamPad">
<property name="allowLineBreaks" value="true"/>
<property name="option" value="nospace"/>
</module>
<module name="ParenPad"/>
<module name="TypecastParenPad"/>
<module name="NeedBraces"/>
<module name="LeftCurly">
<property name="option" value="nl"/>
<property name="tokens" value="CLASS_DEF, CTOR_DEF, INTERFACE_DEF, METHOD_DEF"/>
</module>
<module name="LeftCurly">
<property name="option" value="eol"/>
<property name="tokens" value="
LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY, LITERAL_FOR,
LITERAL_IF, LITERAL_SWITCH, LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE"/>
</module>
<module name="RightCurly">
<property name="option" value="alone"/>
</module>
<module name="GenericWhitespace"/>
<module name="WhitespaceAfter"/>
<module name="NoWhitespaceBefore"/>
<module name="UpperEll"/>
<module name="DefaultComesLast"/>
<module name="ArrayTypeStyle"/>
<module name="MultipleVariableDeclarations"/>
<module name="ModifierOrder"/>
<module name="OneStatementPerLine"/>
<module name="StringLiteralEquality"/>
<module name="MutableException"/>
<module name="EqualsHashCode"/>
<!-- LZO and CRC 32 use inner assignments -->
<!--<module name="InnerAssignment"/>-->
<module name="InterfaceIsType"/>
<module name="HideUtilityClassConstructor"/>
<module name="MemberName"/>
<module name="LocalVariableName"/>
<module name="LocalFinalVariableName"/>
<module name="TypeName"/>
<module name="PackageName"/>
<module name="ParameterName"/>
<module name="StaticVariableName"/>
<module name="ClassTypeParameterName">
<property name="format" value="^[A-Z][0-9]?$"/>
</module>
<module name="MethodTypeParameterName">
<property name="format" value="^[A-Z][0-9]?$"/>
</module>
<module name="AvoidStarImport"/>
<module name="RedundantImport"/>
<module name="UnusedImports"/>
<module name="WhitespaceAround">
<property name="allowEmptyConstructors" value="true"/>
<property name="allowEmptyMethods" value="true"/>
<property name="ignoreEnhancedForColon" value="false"/>
<property name="tokens" value="
ASSIGN, BAND, BAND_ASSIGN, BOR, BOR_ASSIGN, BSR, BSR_ASSIGN,
BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN, EQUAL, GE, GT, LAND, LE,
LITERAL_ASSERT, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE,
LOR, LT, MINUS, MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL,
PLUS, PLUS_ASSIGN, QUESTION, SL, SLIST, SL_ASSIGN, SR, SR_ASSIGN,
STAR, STAR_ASSIGN, TYPE_EXTENSION_AND"/>
</module>
</module>
</module>
================================================
FILE: src/license/LICENSE-HEADER.txt
================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: src/main/java/io/airlift/compress/v3/Compressor.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3;
import java.lang.foreign.MemorySegment;
public interface Compressor
{
int maxCompressedLength(int uncompressedSize);
/**
* @return number of bytes written to the output
*/
int compress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength);
/**
* @return number of bytes written to the output
*/
int compress(MemorySegment input, MemorySegment output);
default int getRetainedSizeInBytes(int inputLength)
{
return 0;
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/Decompressor.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3;
import java.lang.foreign.MemorySegment;
public interface Decompressor
{
/**
* @return number of bytes written to the output
*/
int decompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
throws MalformedInputException;
/**
* @return number of bytes written to the output
*/
int decompress(MemorySegment input, MemorySegment output)
throws MalformedInputException;
}
================================================
FILE: src/main/java/io/airlift/compress/v3/IncompatibleJvmException.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3;
public class IncompatibleJvmException
extends RuntimeException
{
public IncompatibleJvmException(String message)
{
super(message);
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/MalformedInputException.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3;
public class MalformedInputException
extends RuntimeException
{
private final long offset;
public MalformedInputException(long offset)
{
this(offset, "Malformed input");
}
public MalformedInputException(long offset, String reason)
{
super(reason + ": offset=" + offset);
this.offset = offset;
}
public long getOffset()
{
return offset;
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2Codec.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.bzip2;
import io.airlift.compress.v3.hadoop.CodecAdapter;
public class BZip2Codec
extends CodecAdapter
{
public BZip2Codec()
{
super(configuration -> new BZip2HadoopStreams());
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2Constants.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This package is based on the work done by Keiron Liddle, Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
package io.airlift.compress.v3.bzip2;
/**
* Base class for both the compress and decompress classes. Holds common arrays,
* and static data.
* <p>
* This interface is public for historical purposes. You should have no need to
* use it.
* </p>
*/
// forked from Apache Hadoop
final class BZip2Constants
{
public static final int BASE_BLOCK_SIZE = 100000;
public static final int MAX_ALPHA_SIZE = 258;
public static final int RUN_A = 0;
public static final int RUN_B = 1;
public static final int N_GROUPS = 6;
public static final int G_SIZE = 50;
public static final int MAX_SELECTORS = (2 + (900000 / G_SIZE));
private BZip2Constants() {}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopInputStream.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.bzip2;
import io.airlift.compress.v3.hadoop.HadoopInputStream;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
// forked from Apache Hadoop
class BZip2HadoopInputStream
extends HadoopInputStream
{
private final BufferedInputStream bufferedIn;
private CBZip2InputStream input;
public BZip2HadoopInputStream(InputStream in)
{
bufferedIn = new BufferedInputStream(in);
}
@Override
public int read(byte[] buffer, int offset, int length)
throws IOException
{
if (length == 0) {
return 0;
}
if (input == null) {
// If the stream starts with `BZ`, skip it
bufferedIn.mark(2);
if (bufferedIn.read() != 'B' || bufferedIn.read() != 'Z') {
bufferedIn.reset();
}
input = new CBZip2InputStream(bufferedIn);
}
int result = input.read(buffer, offset, length);
// if the result is the end of block marker, no data was read
if (result == CBZip2InputStream.END_OF_BLOCK) {
// read one byte into the new block and update the position.
result = input.read(buffer, offset, 1);
}
return result;
}
@Override
public int read()
throws IOException
{
byte[] buffer = new byte[1];
int result = read(buffer, 0, 1);
if (result < 0) {
return result;
}
return buffer[0] & 0xff;
}
@Override
public void resetState()
{
// drop the current compression stream, and new one will be created during the next read
input = null;
}
@Override
public void close()
throws IOException
{
input = null;
bufferedIn.close();
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopOutputStream.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.bzip2;
import io.airlift.compress.v3.hadoop.HadoopOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import static java.util.Objects.requireNonNull;
// forked from Apache Hadoop
class BZip2HadoopOutputStream
extends HadoopOutputStream
{
private final OutputStream rawOutput;
private boolean initialized;
private CBZip2OutputStream output;
public BZip2HadoopOutputStream(OutputStream out)
{
this.rawOutput = requireNonNull(out, "out is null");
}
@Override
public void write(int b)
throws IOException
{
openStreamIfNecessary();
output.write(b);
}
@Override
public void write(byte[] b, int off, int len)
throws IOException
{
openStreamIfNecessary();
output.write(b, off, len);
}
@Override
public void finish()
throws IOException
{
if (output != null) {
output.finish();
output = null;
}
}
@Override
public void flush()
throws IOException
{
rawOutput.flush();
}
@Override
public void close()
throws IOException
{
try {
// it the stream has never been initialized, create an empty block
if (!initialized) {
openStreamIfNecessary();
}
finish();
}
finally {
rawOutput.close();
}
}
private void openStreamIfNecessary()
throws IOException
{
if (output == null) {
initialized = true;
// write magic
rawOutput.write(new byte[] {'B', 'Z'});
// open new block
output = new CBZip2OutputStream(rawOutput);
}
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopStreams.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.bzip2;
import io.airlift.compress.v3.hadoop.HadoopInputStream;
import io.airlift.compress.v3.hadoop.HadoopOutputStream;
import io.airlift.compress.v3.hadoop.HadoopStreams;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import static java.util.Collections.singletonList;
public class BZip2HadoopStreams
implements HadoopStreams
{
@Override
public String getDefaultFileExtension()
{
return ".bz2";
}
@Override
public List<String> getHadoopCodecName()
{
return singletonList("org.apache.hadoop.io.compress.BZip2Codec");
}
@Override
public HadoopInputStream createInputStream(InputStream in)
{
return new BZip2HadoopInputStream(in);
}
@Override
public HadoopOutputStream createOutputStream(OutputStream out)
{
return new BZip2HadoopOutputStream(out);
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/CBZip2InputStream.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This package is based on the work done by Keiron Liddle, Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
package io.airlift.compress.v3.bzip2;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import static io.airlift.compress.v3.bzip2.BZip2Constants.G_SIZE;
import static io.airlift.compress.v3.bzip2.BZip2Constants.MAX_ALPHA_SIZE;
import static io.airlift.compress.v3.bzip2.BZip2Constants.MAX_SELECTORS;
import static io.airlift.compress.v3.bzip2.BZip2Constants.N_GROUPS;
import static io.airlift.compress.v3.bzip2.BZip2Constants.RUN_A;
import static io.airlift.compress.v3.bzip2.BZip2Constants.RUN_B;
/**
* An input stream that decompresses from the BZip2 format (without the file
* header chars) to be read as any other stream.
*
* <p>
* The decompression requires large amounts of memory. Thus you should call the
* {@link #close() close()} method as soon as possible, to force
* <tt>CBZip2InputStream</tt> to release the allocated memory. See
* {@link CBZip2OutputStream CBZip2OutputStream} for information about memory
* usage.
* </p>
*
* <p>
* <tt>CBZip2InputStream</tt> reads bytes from the compressed source stream via
* the single byte {@link InputStream#read() read()} method exclusively.
* Thus you should consider to use a buffered source stream.
* </p>
*
* <p>
* This Ant code was enhanced so that it can de-compress blocks of bzip2 data.
* Current position in the stream is an important statistic for Hadoop. For
* example in LineRecordReader, we solely depend on the current position in the
* stream to know about the progress. The notion of position becomes complicated
* for compressed files. The Hadoop splitting is done in terms of compressed
* file. But a compressed file deflates to a large amount of data. So we have
* handled this problem in the following way.
* <p>
* On object creation time, we find the next block start delimiter. Once such a
* marker is found, the stream stops there (we discard any read compressed data
* in this process) and the position is reported as the beginning of the block
* start delimiter. At this point we are ready for actual reading
* (i.e. decompression) of data.
* <p>
* The subsequent read calls give out data. The position is updated when the
* caller of this class has read off the current block + 1 bytes. In between the
* block reading, position is not updated. (We can only update the position on
* block boundaries).
* </p>
*
* <p>
* Instances of this class are not thread safe.
* </p>
*/
@SuppressWarnings({ "AssignmentToForLoopParameter", "SpellCheckingInspection"})
class CBZip2InputStream
extends InputStream
{
// start of block
private static final long BLOCK_DELIMITER = 0X314159265359L;
private static final int MAX_CODE_LEN = 23;
/**
* End of a BZip2 block
*/
public static final int END_OF_BLOCK = -2;
/**
* End of BZip2 stream.
*/
private static final int END_OF_STREAM = -1;
private static final int DELIMITER_BIT_LENGTH = 48;
// The variable records the current advertised position of the stream.
private long reportedBytesReadFromCompressedStream;
// The following variable keep record of compressed bytes read.
private long bytesReadFromCompressedStream;
private boolean initialized;
private final byte[] array = new byte[1];
/**
* Index of the last char in the block, so the block size == last + 1.
*/
private int last;
/**
* Index in zptr[] of original string after sorting.
*/
private int origPtr;
/**
* always: in the range 0 .. 9. The current block size is 100000 * this
* number.
*/
private int blockSize100k;
private boolean blockRandomised;
private long bsBuff;
private long bsLive;
private final Crc32 crc32 = new Crc32();
private int nInUse;
private BufferedInputStream in;
private int currentChar = -1;
/**
* A state machine to keep track of current state of the de-coder
*/
public enum STATE
{
EOF, START_BLOCK_STATE, RAND_PART_A_STATE, RAND_PART_B_STATE, RAND_PART_C_STATE, NO_RAND_PART_A_STATE, NO_RAND_PART_B_STATE, NO_RAND_PART_C_STATE, NO_PROCESS_STATE
}
private STATE currentState = STATE.START_BLOCK_STATE;
private int storedBlockCRC;
private int storedCombinedCRC;
private int computedCombinedCRC;
// used by skipToNextMarker
private boolean skipResult;
// Variables used by setup* methods exclusively
private int suCount;
private int suCh2;
private int suChPrev;
private int suI2;
private int suJ2;
private int suRNToGo;
private int suRTPos;
private int suTPos;
private char suZ;
/**
* All memory intensive stuff. This field is initialized by initBlock().
*/
private Data data;
/**
* Constructs a new CBZip2InputStream which decompresses bytes read from the
* specified stream.
*
* <p>
* Although BZip2 headers are marked with the magic <tt>"Bz"</tt> this
* constructor expects the next byte in the stream to be the first one after
* the magic. Thus callers have to skip the first two bytes. Otherwise this
* constructor will throw an exception.
* </p>
*
* @throws IOException if the stream content is malformed or an I/O error occurs.
* @throws NullPointerException if <tt>in == null</tt>
*/
public CBZip2InputStream(final InputStream in)
{
int blockSize = 0X39; // i.e 9
this.blockSize100k = blockSize - (int) '0';
this.in = new BufferedInputStream(in, 1024 * 9); // >1 MB buffer
}
/**
* This method reports the processed bytes so far. Please note that this
* statistic is only updated on block boundaries and only when the stream is
* initiated in BYBLOCK mode.
*/
public long getProcessedByteCount()
{
return reportedBytesReadFromCompressedStream;
}
/**
* This method keeps track of raw processed compressed
* bytes.
*
* @param count count is the number of bytes to be
* added to raw processed bytes
*/
private void updateProcessedByteCount(int count)
{
this.bytesReadFromCompressedStream += count;
}
/**
* This method reads a Byte from the compressed stream. Whenever we need to
* read from the underlying compressed stream, this method should be called
* instead of directly calling the read method of the underlying compressed
* stream. This method does important record keeping to have the statistic
* that how many bytes have been read off the compressed stream.
*/
private int readAByte(InputStream inStream)
throws IOException
{
int read = inStream.read();
if (read >= 0) {
this.updateProcessedByteCount(1);
}
return read;
}
/**
* This method tries to find the marker (passed to it as the first parameter)
* in the stream. It can find bit patterns of length <= 63 bits. Specifically
* this method is used in CBZip2InputStream to find the end of block (EOB)
* delimiter in the stream, starting from the current position of the stream.
* If marker is found, the stream position will be at the byte containing
* the starting bit of the marker.
*
* @param marker The bit pattern to be found in the stream
* @param markerBitLength No of bits in the marker
* @return true if the marker was found otherwise false
* @throws IllegalArgumentException if marketBitLength is greater than 63
*/
private boolean skipToNextMarker(long marker, int markerBitLength)
throws IllegalArgumentException
{
try {
if (markerBitLength > 63) {
throw new IllegalArgumentException(
"skipToNextMarker can not find patterns greater than 63 bits");
}
// pick next marketBitLength bits in the stream
long bytes;
bytes = this.bsR(markerBitLength);
if (bytes == -1) {
this.reportedBytesReadFromCompressedStream =
this.bytesReadFromCompressedStream;
return false;
}
while (true) {
if (bytes == marker) {
// Report the byte position where the marker starts
long markerBytesRead = (markerBitLength + this.bsLive + 7) / 8;
this.reportedBytesReadFromCompressedStream =
this.bytesReadFromCompressedStream - markerBytesRead;
return true;
}
else {
bytes = bytes << 1;
bytes = bytes & ((1L << markerBitLength) - 1);
int oneBit = (int) this.bsR(1);
if (oneBit != -1) {
bytes = bytes | oneBit;
}
else {
this.reportedBytesReadFromCompressedStream =
this.bytesReadFromCompressedStream;
return false;
}
}
}
}
catch (IOException ex) {
this.reportedBytesReadFromCompressedStream =
this.bytesReadFromCompressedStream;
return false;
}
}
private void makeMaps()
{
final boolean[] inUse = this.data.inUse;
final byte[] seqToUnseq = this.data.seqToUnseq;
int nInUseShadow = 0;
for (int i = 0; i < 256; i++) {
if (inUse[i]) {
seqToUnseq[nInUseShadow++] = (byte) i;
}
}
this.nInUse = nInUseShadow;
}
private void changeStateToProcessABlock()
throws IOException
{
if (skipResult) {
initBlock();
setupBlock();
}
else {
this.currentState = STATE.EOF;
}
}
@Override
public int read()
throws IOException
{
if (this.in != null) {
int result = this.read(array, 0, 1);
int value = 0XFF & array[0];
return (result > 0 ? value : result);
}
else {
throw new IOException("stream closed");
}
}
/**
* In CONTINOUS reading mode, this read method starts from the
* start of the compressed stream and end at the end of file by
* emitting un-compressed data. In this mode stream positioning
* is not announced and should be ignored.
* <p>
* In BYBLOCK reading mode, this read method informs about the end
* of a BZip2 block by returning EOB. At this event, the compressed
* stream position is also announced. This announcement tells that
* how much of the compressed stream has been de-compressed and read
* out of this class. In between EOB events, the stream position is
* not updated.
*
* @return int The return value greater than 0 are the bytes read. A value
* of -1 means end of stream while -2 represents end of block
* @throws IOException if the stream content is malformed or an I/O error occurs.
*/
@Override
public int read(final byte[] dest, final int offs, final int len)
throws IOException
{
if (offs < 0) {
throw new IndexOutOfBoundsException("offs(" + offs + ") < 0.");
}
if (len < 0) {
throw new IndexOutOfBoundsException("len(" + len + ") < 0.");
}
if (offs + len > dest.length) {
throw new IndexOutOfBoundsException("offs(" + offs + ") + len("
+ len + ") > dest.length(" + dest.length + ").");
}
if (this.in == null) {
throw new IOException("stream closed");
}
if (!initialized) {
this.init();
this.initialized = true;
}
final int hi = offs + len;
int destOffs = offs;
int b = 0;
while (((destOffs < hi) && ((b = read0())) >= 0)) {
dest[destOffs++] = (byte) b;
}
int result = destOffs - offs;
if (result == 0) {
//report 'end of block' or 'end of stream'
result = b;
skipResult = this.skipToNextMarker(BLOCK_DELIMITER, DELIMITER_BIT_LENGTH);
changeStateToProcessABlock();
}
return result;
}
private int read0()
throws IOException
{
final int retChar = this.currentChar;
switch (this.currentState) {
case EOF:
return END_OF_STREAM; // return -1
case NO_PROCESS_STATE:
return END_OF_BLOCK; // return -2
case START_BLOCK_STATE:
throw new IllegalStateException();
case RAND_PART_A_STATE:
throw new IllegalStateException();
case RAND_PART_B_STATE:
setupRandPartB();
break;
case RAND_PART_C_STATE:
setupRandPartC();
break;
case NO_RAND_PART_A_STATE:
throw new IllegalStateException();
case NO_RAND_PART_B_STATE:
setupNoRandPartB();
break;
case NO_RAND_PART_C_STATE:
setupNoRandPartC();
break;
default:
throw new IllegalStateException();
}
return retChar;
}
private void init()
throws IOException
{
int magic2 = this.readAByte(in);
if (magic2 != 'h') {
throw new IOException("Stream is not BZip2 formatted: expected 'h'"
+ " as first byte but got '" + (char) magic2 + "'");
}
int blockSize = this.readAByte(in);
if ((blockSize < '1') || (blockSize > '9')) {
throw new IOException("Stream is not BZip2 formatted: illegal "
+ "blocksize " + (char) blockSize);
}
this.blockSize100k = blockSize - (int) '0';
initBlock();
setupBlock();
}
private void initBlock()
throws IOException
{
char magic0 = bsGetUByte();
char magic1 = bsGetUByte();
char magic2 = bsGetUByte();
char magic3 = bsGetUByte();
char magic4 = bsGetUByte();
char magic5 = bsGetUByte();
if (magic0 == 0x17 && magic1 == 0x72 && magic2 == 0x45
&& magic3 == 0x38 && magic4 == 0x50 && magic5 == 0x90) {
complete(); // end of file
}
else if (magic0 != 0x31 || // '1'
magic1 != 0x41 || // ')'
magic2 != 0x59 || // 'Y'
magic3 != 0x26 || // '&'
magic4 != 0x53 || // 'S'
magic5 != 0x59 /* 'Y' */) {
this.currentState = STATE.EOF;
throw new IOException("bad block header");
}
else {
this.storedBlockCRC = bsGetInt();
this.blockRandomised = bsR(1) == 1;
// Allocate data here instead in constructor, so we do not allocate
// it if the input file is empty.
if (this.data == null) {
this.data = new Data(this.blockSize100k);
}
// currBlockNo++;
getAndMoveToFrontDecode();
this.crc32.initialiseCRC();
this.currentState = STATE.START_BLOCK_STATE;
}
}
private void endBlock()
throws IOException
{
int computedBlockCRC = this.crc32.getFinalCRC();
// A bad CRC is considered a fatal error.
if (this.storedBlockCRC != computedBlockCRC) {
// make next blocks readable without error
// (repair feature, not yet documented, not tested)
this.computedCombinedCRC = (this.storedCombinedCRC << 1)
| (this.storedCombinedCRC >>> 31);
this.computedCombinedCRC ^= this.storedBlockCRC;
throw new IOException("crc error");
}
this.computedCombinedCRC = (this.computedCombinedCRC << 1)
| (this.computedCombinedCRC >>> 31);
this.computedCombinedCRC ^= computedBlockCRC;
}
private void complete()
throws IOException
{
this.storedCombinedCRC = bsGetInt();
this.currentState = STATE.EOF;
this.data = null;
if (this.storedCombinedCRC != this.computedCombinedCRC) {
throw new IOException("crc error");
}
}
@Override
public void close()
throws IOException
{
InputStream inShadow = this.in;
if (inShadow != null) {
try {
if (inShadow != System.in) {
inShadow.close();
}
}
finally {
this.data = null;
this.in = null;
}
}
}
private long bsR(final long n)
throws IOException
{
long bsLiveShadow = this.bsLive;
long bsBuffShadow = this.bsBuff;
if (bsLiveShadow < n) {
final InputStream inShadow = this.in;
do {
int thech = readAByte(inShadow);
if (thech < 0) {
throw new IOException("unexpected end of stream");
}
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
} while (bsLiveShadow < n);
this.bsBuff = bsBuffShadow;
}
this.bsLive = bsLiveShadow - n;
return (bsBuffShadow >> (bsLiveShadow - n)) & ((1L << n) - 1);
}
private boolean bsGetBit()
throws IOException
{
long bsLiveShadow = this.bsLive;
long bsBuffShadow = this.bsBuff;
if (bsLiveShadow < 1) {
int thech = this.readAByte(in);
if (thech < 0) {
throw new IOException("unexpected end of stream");
}
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
this.bsBuff = bsBuffShadow;
}
this.bsLive = bsLiveShadow - 1;
return ((bsBuffShadow >> (bsLiveShadow - 1)) & 1) != 0;
}
private char bsGetUByte()
throws IOException
{
return (char) bsR(8);
}
private int bsGetInt()
throws IOException
{
return (int) ((((((bsR(8) << 8) | bsR(8)) << 8) | bsR(8)) << 8) | bsR(8));
}
/**
* Called by createHuffmanDecodingTables() exclusively.
*/
private static void hbCreateDecodeTables(final int[] limit,
final int[] base, final int[] perm, final char[] length,
final int minLen, final int maxLen, final int alphaSize)
{
for (int i = minLen, pp = 0; i <= maxLen; i++) {
for (int j = 0; j < alphaSize; j++) {
if (length[j] == i) {
perm[pp++] = j;
}
}
}
for (int i = MAX_CODE_LEN; --i > 0; ) {
base[i] = 0;
limit[i] = 0;
}
for (int i = 0; i < alphaSize; i++) {
base[(int) length[i] + 1]++;
}
for (int i = 1, b = base[0]; i < MAX_CODE_LEN; i++) {
b += base[i];
base[i] = b;
}
for (int i = minLen, vec = 0, b = base[i]; i <= maxLen; i++) {
final int nb = base[i + 1];
vec += nb - b;
b = nb;
limit[i] = vec - 1;
vec <<= 1;
}
for (int i = minLen + 1; i <= maxLen; i++) {
base[i] = ((limit[i - 1] + 1) << 1) - base[i];
}
}
private void recvDecodingTables()
throws IOException
{
final Data dataShadow = this.data;
final boolean[] inUse = dataShadow.inUse;
final byte[] pos = dataShadow.recvDecodingTablesPos;
final byte[] selector = dataShadow.selector;
final byte[] selectorMtf = dataShadow.selectorMtf;
int inUse16 = 0;
/* Receive the mapping table */
for (int i = 0; i < 16; i++) {
if (bsGetBit()) {
inUse16 |= 1 << i;
}
}
for (int i = 256; --i >= 0; ) {
inUse[i] = false;
}
for (int i = 0; i < 16; i++) {
if ((inUse16 & (1 << i)) != 0) {
final int i16 = i << 4;
for (int j = 0; j < 16; j++) {
if (bsGetBit()) {
inUse[i16 + j] = true;
}
}
}
}
makeMaps();
final int alphaSize = this.nInUse + 2;
/* Now the selectors */
final int nGroups = (int) bsR(3);
final int nSelectors = (int) bsR(15);
for (int i = 0; i < nSelectors; i++) {
int j = 0;
while (bsGetBit()) {
j++;
}
selectorMtf[i] = (byte) j;
}
/* Undo the MTF values for the selectors. */
for (int v = nGroups; --v >= 0; ) {
pos[v] = (byte) v;
}
for (int i = 0; i < nSelectors; i++) {
int v = selectorMtf[i] & 0xff;
final byte tmp = pos[v];
while (v > 0) {
// nearly all times v is zero, 4 in most other cases
pos[v] = pos[v - 1];
v--;
}
pos[0] = tmp;
selector[i] = tmp;
}
final char[][] len = dataShadow.tempCharArray2D;
/* Now the coding tables */
for (int t = 0; t < nGroups; t++) {
int curr = (int) bsR(5);
final char[] lenT = len[t];
for (int i = 0; i < alphaSize; i++) {
while (bsGetBit()) {
curr += bsGetBit() ? -1 : 1;
}
lenT[i] = (char) curr;
}
}
// finally create the Huffman tables
createHuffmanDecodingTables(alphaSize, nGroups);
}
/**
* Called by recvDecodingTables() exclusively.
*/
private void createHuffmanDecodingTables(final int alphaSize,
final int nGroups)
{
final Data dataShadow = this.data;
final char[][] len = dataShadow.tempCharArray2D;
final int[] minLens = dataShadow.minLens;
final int[][] limit = dataShadow.limit;
final int[][] base = dataShadow.base;
final int[][] perm = dataShadow.perm;
for (int t = 0; t < nGroups; t++) {
int minLen = 32;
int maxLen = 0;
final char[] lenT = len[t];
for (int i = alphaSize; --i >= 0; ) {
final char lent = lenT[i];
if (lent > maxLen) {
maxLen = lent;
}
if (lent < minLen) {
minLen = lent;
}
}
hbCreateDecodeTables(limit[t], base[t], perm[t], len[t], minLen,
maxLen, alphaSize);
minLens[t] = minLen;
}
}
private void getAndMoveToFrontDecode()
throws IOException
{
this.origPtr = (int) bsR(24);
recvDecodingTables();
final InputStream inShadow = this.in;
final Data dataShadow = this.data;
final byte[] ll8 = dataShadow.ll8;
final int[] unzftab = dataShadow.unzftab;
final byte[] selector = dataShadow.selector;
final byte[] seqToUnseq = dataShadow.seqToUnseq;
final char[] yy = dataShadow.getAndMoveToFrontDecodeYy;
final int[] minLens = dataShadow.minLens;
final int[][] limit = dataShadow.limit;
final int[][] base = dataShadow.base;
final int[][] perm = dataShadow.perm;
final int limitLast = this.blockSize100k * 100000;
/*
* Setting up the unzftab entries here is not strictly necessary, but it
* does save having to do it later in a separate pass, and so saves a
* block's worth of cache misses.
*/
for (int i = 256; --i >= 0; ) {
yy[i] = (char) i;
unzftab[i] = 0;
}
int groupNo = 0;
int groupPos = G_SIZE - 1;
final int eob = this.nInUse + 1;
int nextSym = getAndMoveToFrontDecode0(0);
int bsBuffShadow = (int) this.bsBuff;
int bsLiveShadow = (int) this.bsLive;
int lastShadow = -1;
int zt = selector[groupNo] & 0xff;
int[] baseZt = base[zt];
int[] limitZt = limit[zt];
int[] permZt = perm[zt];
int minLensZt = minLens[zt];
while (nextSym != eob) {
if ((nextSym == RUN_A) || (nextSym == RUN_B)) {
int s = -1;
for (int n = 1; true; n <<= 1) {
if (nextSym == RUN_A) {
s += n;
}
else if (nextSym == RUN_B) {
s += n << 1;
}
else {
break;
}
if (groupPos == 0) {
groupPos = G_SIZE - 1;
zt = selector[++groupNo] & 0xff;
baseZt = base[zt];
limitZt = limit[zt];
permZt = perm[zt];
minLensZt = minLens[zt];
}
else {
groupPos--;
}
int zn = minLensZt;
while (bsLiveShadow < zn) {
final int thech = readAByte(inShadow);
if (thech >= 0) {
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
}
else {
throw new IOException("unexpected end of stream");
}
}
long zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1L << zn) - 1);
bsLiveShadow -= zn;
while (zvec > limitZt[zn]) {
zn++;
while (bsLiveShadow < 1) {
final int thech = readAByte(inShadow);
if (thech >= 0) {
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
}
else {
throw new IOException("unexpected end of stream");
}
}
bsLiveShadow--;
zvec = (zvec << 1)
| ((bsBuffShadow >> bsLiveShadow) & 1);
}
nextSym = permZt[(int) (zvec - baseZt[zn])];
}
final byte ch = seqToUnseq[yy[0]];
unzftab[ch & 0xff] += s + 1;
while (s-- >= 0) {
ll8[++lastShadow] = ch;
}
if (lastShadow >= limitLast) {
throw new IOException("block overrun");
}
}
else {
if (++lastShadow >= limitLast) {
throw new IOException("block overrun");
}
final char tmp = yy[nextSym - 1];
unzftab[seqToUnseq[tmp] & 0xff]++;
ll8[lastShadow] = seqToUnseq[tmp];
/*
* This loop is hammered during decompression, hence avoid
* native method call overhead of System.arraycopy for very
* small ranges to copy.
*/
if (nextSym <= 16) {
for (int j = nextSym - 1; j > 0; ) {
yy[j] = yy[--j];
}
}
else {
//noinspection SuspiciousSystemArraycopy
System.arraycopy(yy, 0, yy, 1, nextSym - 1);
}
yy[0] = tmp;
if (groupPos == 0) {
groupPos = G_SIZE - 1;
zt = selector[++groupNo] & 0xff;
baseZt = base[zt];
limitZt = limit[zt];
permZt = perm[zt];
minLensZt = minLens[zt];
}
else {
groupPos--;
}
int zn = minLensZt;
while (bsLiveShadow < zn) {
final int thech = readAByte(inShadow);
if (thech >= 0) {
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
}
else {
throw new IOException("unexpected end of stream");
}
}
int zvec = (bsBuffShadow >> (bsLiveShadow - zn))
& ((1 << zn) - 1);
bsLiveShadow -= zn;
while (zvec > limitZt[zn]) {
zn++;
while (bsLiveShadow < 1) {
final int thech = readAByte(inShadow);
if (thech >= 0) {
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
}
else {
throw new IOException("unexpected end of stream");
}
}
bsLiveShadow--;
zvec = ((zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1));
}
nextSym = permZt[zvec - baseZt[zn]];
}
}
this.last = lastShadow;
this.bsLive = bsLiveShadow;
this.bsBuff = bsBuffShadow;
}
private int getAndMoveToFrontDecode0(final int groupNo)
throws IOException
{
final InputStream inShadow = this.in;
final Data dataShadow = this.data;
final int zt = dataShadow.selector[groupNo] & 0xff;
final int[] limitZt = dataShadow.limit[zt];
int zn = dataShadow.minLens[zt];
int zvec = (int) bsR(zn);
int bsLiveShadow = (int) this.bsLive;
int bsBuffShadow = (int) this.bsBuff;
while (zvec > limitZt[zn]) {
zn++;
while (bsLiveShadow < 1) {
final int thech = readAByte(inShadow);
if (thech >= 0) {
bsBuffShadow = (bsBuffShadow << 8) | thech;
bsLiveShadow += 8;
}
else {
throw new IOException("unexpected end of stream");
}
}
bsLiveShadow--;
zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1);
}
this.bsLive = bsLiveShadow;
this.bsBuff = bsBuffShadow;
return dataShadow.perm[zt][zvec - dataShadow.base[zt][zn]];
}
private void setupBlock()
throws IOException
{
if (this.data == null) {
return;
}
final int[] cftab = this.data.cftab;
final int[] tt = this.data.initTT(this.last + 1);
final byte[] ll8 = this.data.ll8;
cftab[0] = 0;
System.arraycopy(this.data.unzftab, 0, cftab, 1, 256);
for (int i = 1, c = cftab[0]; i <= 256; i++) {
c += cftab[i];
cftab[i] = c;
}
for (int i = 0, lastShadow = this.last; i <= lastShadow; i++) {
tt[cftab[ll8[i] & 0xff]++] = i;
}
if ((this.origPtr < 0) || (this.origPtr >= tt.length)) {
throw new IOException("stream corrupted");
}
this.suTPos = tt[this.origPtr];
this.suCount = 0;
this.suI2 = 0;
this.suCh2 = 256; /* not a char and not EOF */
if (this.blockRandomised) {
this.suRNToGo = 0;
this.suRTPos = 0;
setupRandPartA();
}
else {
setupNoRandPartA();
}
}
@SuppressWarnings("checkstyle:InnerAssignment")
private void setupRandPartA()
throws IOException
{
if (this.suI2 <= this.last) {
this.suChPrev = this.suCh2;
int suCh2Shadow = this.data.ll8[this.suTPos] & 0xff;
this.suTPos = this.data.tt[this.suTPos];
if (this.suRNToGo == 0) {
this.suRNToGo = R_NUMS[this.suRTPos] - 1;
if (++this.suRTPos == 512) {
this.suRTPos = 0;
}
}
else {
this.suRNToGo--;
}
this.suCh2 = suCh2Shadow ^= (this.suRNToGo == 1) ? 1 : 0;
this.suI2++;
this.currentChar = suCh2Shadow;
this.currentState = STATE.RAND_PART_B_STATE;
this.crc32.updateCRC(suCh2Shadow);
}
else {
endBlock();
initBlock();
setupBlock();
}
}
private void setupNoRandPartA()
throws IOException
{
if (this.suI2 <= this.last) {
this.suChPrev = this.suCh2;
int suCh2Shadow = this.data.ll8[this.suTPos] & 0xff;
this.suCh2 = suCh2Shadow;
this.suTPos = this.data.tt[this.suTPos];
this.suI2++;
this.currentChar = suCh2Shadow;
this.currentState = STATE.NO_RAND_PART_B_STATE;
this.crc32.updateCRC(suCh2Shadow);
}
else {
this.currentState = STATE.NO_RAND_PART_A_STATE;
endBlock();
initBlock();
setupBlock();
}
}
private void setupRandPartB()
throws IOException
{
if (this.suCh2 != this.suChPrev) {
this.currentState = STATE.RAND_PART_A_STATE;
this.suCount = 1;
setupRandPartA();
}
else if (++this.suCount >= 4) {
this.suZ = (char) (this.data.ll8[this.suTPos] & 0xff);
this.suTPos = this.data.tt[this.suTPos];
if (this.suRNToGo == 0) {
this.suRNToGo = R_NUMS[this.suRTPos] - 1;
if (++this.suRTPos == 512) {
this.suRTPos = 0;
}
}
else {
this.suRNToGo--;
}
this.suJ2 = 0;
this.currentState = STATE.RAND_PART_C_STATE;
if (this.suRNToGo == 1) {
this.suZ ^= 1;
}
setupRandPartC();
}
else {
this.currentState = STATE.RAND_PART_A_STATE;
setupRandPartA();
}
}
private void setupRandPartC()
throws IOException
{
if (this.suJ2 < this.suZ) {
this.currentChar = this.suCh2;
this.crc32.updateCRC(this.suCh2);
this.suJ2++;
}
else {
this.currentState = STATE.RAND_PART_A_STATE;
this.suI2++;
this.suCount = 0;
setupRandPartA();
}
}
private void setupNoRandPartB()
throws IOException
{
if (this.suCh2 != this.suChPrev) {
this.suCount = 1;
setupNoRandPartA();
}
else if (++this.suCount >= 4) {
this.suZ = (char) (this.data.ll8[this.suTPos] & 0xff);
this.suTPos = this.data.tt[this.suTPos];
this.suJ2 = 0;
setupNoRandPartC();
}
else {
setupNoRandPartA();
}
}
private void setupNoRandPartC()
throws IOException
{
if (this.suJ2 < this.suZ) {
int suCh2Shadow = this.suCh2;
this.currentChar = suCh2Shadow;
this.crc32.updateCRC(suCh2Shadow);
this.suJ2++;
this.currentState = STATE.NO_RAND_PART_C_STATE;
}
else {
this.suI2++;
this.suCount = 0;
setupNoRandPartA();
}
}
private static final class Data
{
// (with blockSize 900k)
final boolean[] inUse = new boolean[256]; // 256 byte
final byte[] seqToUnseq = new byte[256]; // 256 byte
final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte
final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte
/**
* Freq table collected to save a pass over the data during
* decompression.
*/
final int[] unzftab = new int[256]; // 1024 byte
final int[][] limit = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte
final int[][] base = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte
final int[][] perm = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte
final int[] minLens = new int[N_GROUPS]; // 24 byte
final int[] cftab = new int[257]; // 1028 byte
final char[] getAndMoveToFrontDecodeYy = new char[256]; // 512 byte
final char[][] tempCharArray2D = new char[N_GROUPS][MAX_ALPHA_SIZE]; // 3096
// byte
final byte[] recvDecodingTablesPos = new byte[N_GROUPS]; // 6 byte
// ---------------
// 60798 byte
int[] tt; // 3600000 byte
byte[] ll8; // 900000 byte
// ---------------
// 4560782 byte
// ===============
Data(int blockSize100k)
{
this.ll8 = new byte[blockSize100k * BZip2Constants.BASE_BLOCK_SIZE];
}
/**
* Initializes the {@link #tt} array.
* <p>
* This method is called when the required length of the array is known.
* I don't initialize it at construction time to avoid unnecessary
* memory allocation when compressing small files.
*/
int[] initTT(int length)
{
int[] ttShadow = this.tt;
// tt.length should always be >= length, but theoretically
// it can happen, if the compressor mixed small and large
// blocks. Normally only the last block will be smaller
// than others.
if ((ttShadow == null) || (ttShadow.length < length)) {
ttShadow = new int[length];
this.tt = ttShadow;
}
return ttShadow;
}
}
private static final int[] R_NUMS = {
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214, 733, 859, 335,
708, 621, 574, 73, 654, 730, 472, 419, 436, 278, 496, 867, 210,
399, 680, 480, 51, 878, 465, 811, 169, 869, 675, 611, 697, 867,
561, 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, 150, 238,
59, 379, 684, 877, 625, 169, 643, 105, 170, 607, 520, 932, 727,
476, 693, 425, 174, 647, 73, 122, 335, 530, 442, 853, 695, 249,
445, 515, 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, 641,
801, 220, 162, 819, 984, 589, 513, 495, 799, 161, 604, 958, 533,
221, 400, 386, 867, 600, 782, 382, 596, 414, 171, 516, 375, 682,
485, 911, 276, 98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224, 469, 68, 770,
919, 190, 373, 294, 822, 808, 206, 184, 943, 795, 384, 383, 461,
404, 758, 839, 887, 715, 67, 618, 276, 204, 918, 873, 777, 604,
560, 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, 652, 934, 970,
447, 318, 353, 859, 672, 112, 785, 645, 863, 803, 350, 139, 93,
354, 99, 820, 908, 609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125, 411, 521, 938,
300, 821, 78, 343, 175, 128, 250, 170, 774, 972, 275, 999, 639,
495, 78, 352, 126, 857, 956, 358, 619, 580, 124, 737, 594, 701,
612, 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, 944, 375,
748, 52, 600, 747, 642, 182, 862, 81, 344, 805, 988, 739, 511, 655,
814, 334, 249, 515, 897, 955, 664, 981, 649, 113, 974, 459, 893,
228, 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, 686, 754,
806, 760, 493, 403, 415, 394, 687, 700, 946, 670, 656, 610, 738,
392, 760, 799, 887, 653, 978, 321, 576, 617, 626, 502, 894, 679,
243, 440, 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, 707,
151, 457, 449, 797, 195, 791, 558, 945, 679, 297, 59, 87, 824, 713,
663, 412, 693, 342, 606, 134, 108, 571, 364, 631, 212, 174, 643,
304, 329, 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, 140,
206, 73, 263, 980, 736, 876, 478, 430, 305, 170, 514, 364, 692,
829, 82, 855, 953, 676, 246, 369, 970, 294, 750, 807, 827, 150,
790, 288, 923, 804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56, 661, 821, 976,
991, 658, 869, 905, 758, 745, 193, 768, 550, 608, 933, 378, 286,
215, 979, 792, 961, 61, 688, 793, 644, 986, 403, 106, 366, 905,
644, 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, 780, 773,
635, 389, 707, 100, 626, 958, 165, 504, 920, 176, 193, 713, 857,
265, 203, 50, 668, 108, 645, 990, 626, 197, 510, 357, 358, 850,
858, 364, 936, 638};
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/CBZip2OutputStream.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.bzip2;
import java.io.IOException;
import java.io.OutputStream;
import static io.airlift.compress.v3.bzip2.BZip2Constants.G_SIZE;
import static io.airlift.compress.v3.bzip2.BZip2Constants.MAX_ALPHA_SIZE;
import static io.airlift.compress.v3.bzip2.BZip2Constants.MAX_SELECTORS;
import static io.airlift.compress.v3.bzip2.BZip2Constants.N_GROUPS;
import static io.airlift.compress.v3.bzip2.BZip2Constants.RUN_A;
import static io.airlift.compress.v3.bzip2.BZip2Constants.RUN_B;
/**
* An output stream that compresses into the BZip2 format (without the file
* header chars) into another stream.
*
* <p>
* The compression requires large amounts of memory. Thus you should call the
* {@link #close() close()} method as soon as possible, to force
* <tt>CBZip2OutputStream</tt> to release the allocated memory.
* </p>
*
* <p>
* You can shrink the amount of allocated memory and maybe raise the compression
* speed by choosing a lower blocksize, which in turn may cause a lower
* compression ratio. You can avoid unnecessary memory allocation by avoiding
* using a blocksize which is bigger than the size of the input.
* </p>
*
* <p>
* You can compute the memory usage for compressing by the following formula:
* </p>
*
* <pre>
* <code>400k + (9 * blocksize)</code>.
* </pre>
*
* <p>
* To get the memory required for decompression by {@link CBZip2InputStream
* CBZip2InputStream} use
* </p>
*
* <pre>
* <code>65k + (5 * blocksize)</code>.
* </pre>
*
* <table width="100%" border="1">
* <colgroup> <col width="33%" /> <col width="33%" /> <col width="33%" />
* </colgroup>
* <tr>
* <th colspan="3">Memory usage by blocksize</th>
* </tr>
* <tr>
* <th align="right">Blocksize</th> <th align="right">Compression<br>
* memory usage</th> <th align="right">Decompression<br>
* memory usage</th>
* </tr>
* <tr>
* <td align="right">100k</td>
* <td align="right">1300k</td>
* <td align="right">565k</td>
* </tr>
* <tr>
* <td align="right">200k</td>
* <td align="right">2200k</td>
* <td align="right">1065k</td>
* </tr>
* <tr>
* <td align="right">300k</td>
* <td align="right">3100k</td>
* <td align="right">1565k</td>
* </tr>
* <tr>
* <td align="right">400k</td>
* <td align="right">4000k</td>
* <td align="right">2065k</td>
* </tr>
* <tr>
* <td align="right">500k</td>
* <td align="right">4900k</td>
* <td align="right">2565k</td>
* </tr>
* <tr>
* <td align="right">600k</td>
* <td align="right">5800k</td>
* <td align="right">3065k</td>
* </tr>
* <tr>
* <td align="right">700k</td>
* <td align="right">6700k</td>
* <td align="right">3565k</td>
* </tr>
* <tr>
* <td align="right">800k</td>
* <td align="right">7600k</td>
* <td align="right">4065k</td>
* </tr>
* <tr>
* <td align="right">900k</td>
* <td align="right">8500k</td>
* <td align="right">4565k</td>
* </tr>
* </table>
*
* <p>
* For decompression <tt>CBZip2InputStream</tt> allocates less memory if the
* bzipped input is smaller than one block.
* </p>
*
* <p>
* Instances of this class are not thread safe.
* </p>
*
* <p>
* TODO: Update to BZip2 1.0.1
* </p>
*/
// forked from Apache Hadoop
class CBZip2OutputStream
extends OutputStream
{
/**
* The maximum supported block size <tt> == 9</tt>.
*/
private static final int MAX_BLOCK_SIZE = 9;
private static final int[] R_NUMS = {619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214, 733, 859, 335,
708, 621, 574, 73, 654, 730, 472, 419, 436, 278, 496, 867, 210,
399, 680, 480, 51, 878, 465, 811, 169, 869, 675, 611, 697, 867,
561, 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, 150, 238,
59, 379, 684, 877, 625, 169, 643, 105, 170, 607, 520, 932, 727,
476, 693, 425, 174, 647, 73, 122, 335, 530, 442, 853, 695, 249,
445, 515, 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, 641,
801, 220, 162, 819, 984, 589, 513, 495, 799, 161, 604, 958, 533,
221, 400, 386, 867, 600, 782, 382, 596, 414, 171, 516, 375, 682,
485, 911, 276, 98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224, 469, 68, 770,
919, 190, 373, 294, 822, 808, 206, 184, 943, 795, 384, 383, 461,
404, 758, 839, 887, 715, 67, 618, 276, 204, 918, 873, 777, 604,
560, 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, 652, 934, 970,
447, 318, 353, 859, 672, 112, 785, 645, 863, 803, 350, 139, 93,
354, 99, 820, 908, 609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125, 411, 521, 938,
300, 821, 78, 343, 175, 128, 250, 170, 774, 972, 275, 999, 639,
495, 78, 352, 126, 857, 956, 358, 619, 580, 124, 737, 594, 701,
612, 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, 944, 375,
748, 52, 600, 747, 642, 182, 862, 81, 344, 805, 988, 739, 511, 655,
814, 334, 249, 515, 897, 955, 664, 981, 649, 113, 974, 459, 893,
228, 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, 686, 754,
806, 760, 493, 403, 415, 394, 687, 700, 946, 670, 656, 610, 738,
392, 760, 799, 887, 653, 978, 321, 576, 617, 626, 502, 894, 679,
243, 440, 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, 707,
151, 457, 449, 797, 195, 791, 558, 945, 679, 297, 59, 87, 824, 713,
663, 412, 693, 342, 606, 134, 108, 571, 364, 631, 212, 174, 643,
304, 329, 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, 140,
206, 73, 263, 980, 736, 876, 478, 430, 305, 170, 514, 364, 692,
829, 82, 855, 953, 676, 246, 369, 970, 294, 750, 807, 827, 150,
790, 288, 923, 804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56, 661, 821, 976,
991, 658, 869, 905, 758, 745, 193, 768, 550, 608, 933, 378, 286,
215, 979, 792, 961, 61, 688, 793, 644, 986, 403, 106, 366, 905,
644, 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, 780, 773,
635, 389, 707, 100, 626, 958, 165, 504, 920, 176, 193, 713, 857,
265, 203, 50, 668, 108, 645, 990, 626, 197, 510, 357, 358, 850,
858, 364, 936, 638};
private static final int N_ITERS = 4;
private static final int NUM_OVERSHOOT_BYTES = 20;
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int SET_MASK = (1 << 21);
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int CLEAR_MASK = (~SET_MASK);
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int GREATER_ICOST = 15;
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int LESSER_ICOST = 0;
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int SMALL_THRESH = 20;
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int DEPTH_THRESH = 10;
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
*/
private static final int WORK_FACTOR = 30;
/**
* This constant is accessible by subclasses for historical purposes. If you
* don't know what it means then you don't need it.
* <p>
* If you are ever unlucky/improbable enough to get a stack overflow whilst
* sorting, increase the following constant and try again. In practice I
* have never seen the stack go above 27 elems, so the following limit seems
* very generous.
* </p>
*/
private static final int QSORT_STACK_SIZE = 1000;
/**
* Knuth's increments seem to work better than Incerpi-Sedgewick here.
* Possibly because the number of elems to sort is usually small, typically
* <= 20.
*/
private static final int[] INCS = {1, 4, 13, 40, 121, 364, 1093, 3280, 9841, 29524, 88573, 265720, 797161, 2391484};
private static void hbMakeCodeLengths(final byte[] len, final int[] freq,
final Data dat, final int alphaSize, final int maxLen)
{
/*
* Nodes and heap entries run from 1. Entry 0 for both the heap and
* nodes is a sentinel.
*/
final int[] heap = dat.heap;
final int[] weight = dat.weight;
final int[] parent = dat.parent;
for (int i = alphaSize; --i >= 0; ) {
weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8;
}
for (boolean tooLong = true; tooLong; ) {
tooLong = false;
int nNodes = alphaSize;
int nHeap = 0;
heap[0] = 0;
weight[0] = 0;
parent[0] = -2;
for (int i = 1; i <= alphaSize; i++) {
parent[i] = -1;
nHeap++;
heap[nHeap] = i;
int zz = nHeap;
int tmp = heap[zz];
while (weight[tmp] < weight[heap[zz >> 1]]) {
heap[zz] = heap[zz >> 1];
zz >>= 1;
}
heap[zz] = tmp;
}
while (nHeap > 1) {
int n1 = heap[1];
heap[1] = heap[nHeap];
nHeap--;
int yy;
int zz = 1;
int tmp = heap[1];
while (true) {
yy = zz << 1;
if (yy > nHeap) {
break;
}
if ((yy < nHeap)
&& (weight[heap[yy + 1]] < weight[heap[yy]])) {
yy++;
}
if (weight[tmp] < weight[heap[yy]]) {
break;
}
heap[zz] = heap[yy];
zz = yy;
}
heap[zz] = tmp;
int n2 = heap[1];
heap[1] = heap[nHeap];
nHeap--;
zz = 1;
tmp = heap[1];
while (true) {
yy = zz << 1;
if (yy > nHeap) {
break;
}
if ((yy < nHeap)
&& (weight[heap[yy + 1]] < weight[heap[yy]])) {
yy++;
}
if (weight[tmp] < weight[heap[yy]]) {
break;
}
heap[zz] = heap[yy];
zz = yy;
}
heap[zz] = tmp;
nNodes++;
parent[n1] = nNodes;
parent[n2] = nNodes;
final int weightN1 = weight[n1];
final int weightN2 = weight[n2];
weight[nNodes] = ((weightN1 & 0xffffff00) + (weightN2 & 0xffffff00))
| (1 + (Math.max((weightN1 & 0x000000ff), (weightN2 & 0x000000ff))));
parent[nNodes] = -1;
nHeap++;
heap[nHeap] = nNodes;
zz = nHeap;
tmp = heap[zz];
final int weightTmp = weight[tmp];
while (weightTmp < weight[heap[zz >> 1]]) {
heap[zz] = heap[zz >> 1];
zz >>= 1;
}
heap[zz] = tmp;
}
for (int i = 1; i <= alphaSize; i++) {
int j = 0;
int k = i;
for (int parentK; (parentK = parent[k]) >= 0; ) {
k = parentK;
j++;
}
len[i - 1] = (byte) j;
if (j > maxLen) {
tooLong = true;
}
}
if (tooLong) {
for (int i = 1; i < alphaSize; i++) {
int j = weight[i] >> 8;
j = 1 + (j >> 1);
weight[i] = j << 8;
}
}
}
}
/**
* Index of the last char in the block, so the block size == last + 1.
*/
private int last;
/**
* Index in fmap[] of original string after sorting.
*/
private int origPtr;
/**
* Always: in the range 0 .. 9. The current block size is 100000 * this
* number.
*/
private final int blockSize100k;
private boolean blockRandomised;
private int bsBuff;
private int bsLive;
private final Crc32 crc32 = new Crc32();
private int nInUse;
private int nMTF;
/*
* Used when sorting. If too many long comparisons happen, we stop sorting,
* randomise the block slightly, and try again.
*/
private int workDone;
private int workLimit;
private boolean firstAttempt;
private int currentChar = -1;
private int runLength;
private int combinedCRC;
private int allowableBlockSize;
/**
* All memory intensive stuff.
*/
private Data data;
private OutputStream out;
/**
* Constructs a new <tt>CBZip2OutputStream</tt> with a block size of 900k.
*
* <p>
* <b>Attention: </b>The caller is responsible to write the two BZip2 magic
* bytes <tt>"BZ"</tt> to the specified stream prior to calling this
* constructor.
* </p>
*
* @param out *
* the destination stream.
* @throws IOException if an I/O error occurs in the specified stream.
* @throws NullPointerException if <code>out == null</code>.
*/
public CBZip2OutputStream(final OutputStream out)
throws IOException
{
this(out, MAX_BLOCK_SIZE);
}
/**
* Constructs a new <tt>CBZip2OutputStream</tt> with specified block size.
*
* <p>
* <b>Attention: </b>The caller is responsible to write the two BZip2 magic
* bytes <tt>"BZ"</tt> to the specified stream prior to calling this
* constructor.
* </p>
*
* @param out the destination stream.
* @param blockSize the blockSize as 100k units.
* @throws IOException if an I/O error occurs in the specified stream.
* @throws IllegalArgumentException if <code>(blockSize < 1) || (blockSize > 9)</code>.
* @throws NullPointerException if <code>out == null</code>.
* @see #MAX_BLOCK_SIZE
*/
private CBZip2OutputStream(final OutputStream out, final int blockSize)
throws IOException
{
if (blockSize < 1) {
throw new IllegalArgumentException("blockSize(" + blockSize
+ ") < 1");
}
if (blockSize > 9) {
throw new IllegalArgumentException("blockSize(" + blockSize
+ ") > 9");
}
this.blockSize100k = blockSize;
this.out = out;
init();
}
@Override
public void write(final int b)
throws IOException
{
if (this.out != null) {
write0(b);
}
else {
throw new IOException("closed");
}
}
private void writeRun()
throws IOException
{
final int lastShadow = this.last;
if (lastShadow < this.allowableBlockSize) {
final int currentCharShadow = this.currentChar;
final Data dataShadow = this.data;
dataShadow.inUse[currentCharShadow] = true;
final byte ch = (byte) currentCharShadow;
int runLengthShadow = this.runLength;
this.crc32.updateCRC(currentCharShadow, runLengthShadow);
switch (runLengthShadow) {
case 1:
dataShadow.block[lastShadow + 2] = ch;
this.last = lastShadow + 1;
break;
case 2:
dataShadow.block[lastShadow + 2] = ch;
dataShadow.block[lastShadow + 3] = ch;
this.last = lastShadow + 2;
break;
case 3: {
final byte[] block = dataShadow.block;
block[lastShadow + 2] = ch;
block[lastShadow + 3] = ch;
block[lastShadow + 4] = ch;
this.last = lastShadow + 3;
}
break;
default: {
runLengthShadow -= 4;
dataShadow.inUse[runLengthShadow] = true;
final byte[] block = dataShadow.block;
block[lastShadow + 2] = ch;
block[lastShadow + 3] = ch;
block[lastShadow + 4] = ch;
block[lastShadow + 5] = ch;
block[lastShadow + 6] = (byte) runLengthShadow;
this.last = lastShadow + 5;
}
break;
}
}
else {
endBlock();
initBlock();
writeRun();
}
}
/**
* Overridden to close the stream.
*/
@Override
protected void finalize()
throws Throwable
{
finish();
super.finalize();
}
public void finish()
throws IOException
{
if (out != null) {
try {
if (this.runLength > 0) {
writeRun();
}
this.currentChar = -1;
endBlock();
endCompression();
}
finally {
this.out = null;
this.data = null;
}
}
}
@Override
public void close()
throws IOException
{
if (out != null) {
OutputStream outShadow = this.out;
try {
finish();
outShadow.close();
outShadow = null;
}
finally {
outShadow.close();
}
}
}
@Override
public void flush()
throws IOException
{
OutputStream outShadow = this.out;
if (outShadow != null) {
outShadow.flush();
}
}
private void init()
throws IOException
{
// write magic: done by caller who created this stream
// this.out.write('B');
// this.out.write('Z');
this.data = new Data(this.blockSize100k);
/*
* Write `magic' bytes h indicating file-format == huffmanised, followed
* by a digit indicating blockSize100k.
*/
bsPutUByte('h');
bsPutUByte((int) '0' + this.blockSize100k);
this.combinedCRC = 0;
initBlock();
}
private void initBlock()
{
// blockNo++;
this.crc32.initialiseCRC();
this.last = -1;
// ch = 0;
boolean[] inUse = this.data.inUse;
for (int i = 256; --i >= 0; ) {
inUse[i] = false;
}
/* 20 is just a paranoia constant */
this.allowableBlockSize = (this.blockSize100k * BZip2Constants.BASE_BLOCK_SIZE) - 20;
}
private void endBlock()
throws IOException
{
int blockCRC = this.crc32.getFinalCRC();
this.combinedCRC = (this.combinedCRC << 1) | (this.combinedCRC >>> 31);
this.combinedCRC ^= blockCRC;
// empty block at end of file
if (this.last == -1) {
return;
}
/* sort the block and establish posn of original string */
blockSort();
/*
* A 6-byte block header, the value chosen arbitrarily as 0x314159265359
* :-). A 32 bit value does not really give a strong enough guarantee
* that the value will not appear by chance in the compressed
* data stream. Worst-case probability of this event, for a 900k block,
* is about 2.0e-3 for 32 bits, 1.0e-5 for 40 bits and 4.0e-8 for 48
* bits. For a compressed file of size 100Gb -- about 100000 blocks --
* only a 48-bit marker will do. NB: normal compression/ decompression
* do not rely on these statistical properties. They are only important
* when trying to recover blocks from damaged files.
*/
bsPutUByte(0x31);
bsPutUByte(0x41);
bsPutUByte(0x59);
bsPutUByte(0x26);
bsPutUByte(0x53);
bsPutUByte(0x59);
/* Now the block's CRC, so it is in a known place. */
bsPutInt(blockCRC);
/* Now a single bit indicating randomisation. */
if (this.blockRandomised) {
bsW(1, 1);
}
else {
bsW(1, 0);
}
/* Finally, block's contents proper. */
moveToFrontCodeAndSend();
}
private void endCompression()
throws IOException
{
/*
* Now another magic 48-bit number, 0x177245385090, to indicate the end
* of the last block. (sqrt(pi), if you want to know. I did want to use
* e, but it contains too much repetition -- 27 18 28 18 28 46 -- for me
* to feel statistically comfortable. Call me paranoid.)
*/
bsPutUByte(0x17);
bsPutUByte(0x72);
bsPutUByte(0x45);
bsPutUByte(0x38);
bsPutUByte(0x50);
bsPutUByte(0x90);
bsPutInt(this.combinedCRC);
bsFinishedWithStream();
}
@Override
public void write(final byte[] buf, int offs, final int len)
throws IOException
{
if (offs < 0) {
throw new IndexOutOfBoundsException("offs(" + offs + ") < 0.");
}
if (len < 0) {
throw new IndexOutOfBoundsException("len(" + len + ") < 0.");
}
if (offs + len > buf.length) {
throw new IndexOutOfBoundsException("offs(" + offs + ") + len("
+ len + ") > buf.length(" + buf.length + ").");
}
if (this.out == null) {
throw new IOException("stream closed");
}
for (int hi = offs + len; offs < hi; ) {
write0(buf[offs++]);
}
}
private void write0(int b)
throws IOException
{
if (this.currentChar != -1) {
b &= 0xff;
if (this.currentChar == b) {
if (++this.runLength > 254) {
writeRun();
this.currentChar = -1;
this.runLength = 0;
}
// else nothing to do
}
else {
writeRun();
this.runLength = 1;
this.currentChar = b;
}
}
else {
this.currentChar = b & 0xff;
this.runLength++;
}
}
private static void hbAssignCodes(final int[] code, final byte[] length,
final int minLen, final int maxLen, final int alphaSize)
{
int vec = 0;
for (int n = minLen; n <= maxLen; n++) {
for (int i = 0; i < alphaSize; i++) {
if ((length[i] & 0xff) == n) {
code[i] = vec;
vec++;
}
}
vec <<= 1;
}
}
private void bsFinishedWithStream()
throws IOException
{
while (this.bsLive > 0) {
int ch = this.bsBuff >> 24;
this.out.write(ch); // write 8-bit
this.bsBuff <<= 8;
this.bsLive -= 8;
}
}
private void bsW(final int n, final int v)
throws IOException
{
final OutputStream outShadow = this.out;
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24); // write 8-bit
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
this.bsBuff = bsBuffShadow | (v << (32 - bsLiveShadow - n));
this.bsLive = bsLiveShadow + n;
}
private void bsPutUByte(final int c)
throws IOException
{
bsW(8, c);
}
private void bsPutInt(final int u)
throws IOException
{
bsW(8, (u >> 24) & 0xff);
bsW(8, (u >> 16) & 0xff);
bsW(8, (u >> 8) & 0xff);
bsW(8, u & 0xff);
}
private void sendMTFValues()
throws IOException
{
final byte[][] len = this.data.sendMTFValuesLen;
final int alphaSize = this.nInUse + 2;
for (int t = N_GROUPS; --t >= 0; ) {
byte[] lenT = len[t];
for (int v = alphaSize; --v >= 0; ) {
lenT[v] = GREATER_ICOST;
}
}
/* Decide how many coding tables to use */
// assert (this.nMTF > 0) : this.nMTF;
final int nGroups = (this.nMTF < 200) ? 2 : (this.nMTF < 600) ? 3
: (this.nMTF < 1200) ? 4 : (this.nMTF < 2400) ? 5 : 6;
/* Generate an initial set of coding tables */
sendMTFValues0(nGroups, alphaSize);
/*
* Iterate up to N_ITERS times to improve the tables.
*/
final int nSelectors = sendMTFValues1(nGroups, alphaSize);
/* Compute MTF values for the selectors. */
sendMTFValues2(nGroups, nSelectors);
/* Assign actual codes for the tables. */
sendMTFValues3(nGroups, alphaSize);
/* Transmit the mapping table. */
sendMTFValues4();
/* Now the selectors. */
sendMTFValues5(nGroups, nSelectors);
/* Now the coding tables. */
sendMTFValues6(nGroups, alphaSize);
/* And finally, the block data proper */
sendMTFValues7();
}
private void sendMTFValues0(final int nGroups, final int alphaSize)
{
final byte[][] len = this.data.sendMTFValuesLen;
final int[] mtfFreq = this.data.mtfFreq;
int remF = this.nMTF;
int gs = 0;
for (int nPart = nGroups; nPart > 0; nPart--) {
final int tFreq = remF / nPart;
int ge = gs - 1;
int aFreq = 0;
for (final int a = alphaSize - 1; (aFreq < tFreq) && (ge < a); ) {
aFreq += mtfFreq[++ge];
}
if ((ge > gs) && (nPart != nGroups) && (nPart != 1)
&& (((nGroups - nPart) & 1) != 0)) {
aFreq -= mtfFreq[ge--];
}
final byte[] lenNp = len[nPart - 1];
for (int v = alphaSize; --v >= 0; ) {
if ((v >= gs) && (v <= ge)) {
lenNp[v] = LESSER_ICOST;
}
else {
lenNp[v] = GREATER_ICOST;
}
}
gs = ge + 1;
remF -= aFreq;
}
}
private int sendMTFValues1(final int nGroups, final int alphaSize)
{
final Data dataShadow = this.data;
final int[][] rfreq = dataShadow.sendMTFValuesRfreq;
final int[] fave = dataShadow.sendMTFValuesFave;
final short[] cost = dataShadow.sendMTFValuesCost;
final char[] sfmap = dataShadow.sfmap;
final byte[] selector = dataShadow.selector;
final byte[][] len = dataShadow.sendMTFValuesLen;
final byte[] len0 = len[0];
final byte[] len1 = len[1];
final byte[] len2 = len[2];
final byte[] len3 = len[3];
final byte[] len4 = len[4];
final byte[] len5 = len[5];
final int nMTFShadow = this.nMTF;
int nSelectors = 0;
for (int iter = 0; iter < N_ITERS; iter++) {
for (int t = nGroups; --t >= 0; ) {
fave[t] = 0;
int[] rfreqt = rfreq[t];
for (int i = alphaSize; --i >= 0; ) {
rfreqt[i] = 0;
}
}
nSelectors = 0;
for (int gs = 0; gs < this.nMTF; ) {
/* Set group start & end marks. */
/*
* Calculate the cost of this group as coded by each of the
* coding tables.
*/
final int ge = Math.min(gs + G_SIZE - 1, nMTFShadow - 1);
if (nGroups == N_GROUPS) {
// unrolled version of the else-block
short cost0 = 0;
short cost1 = 0;
short cost2 = 0;
short cost3 = 0;
short cost4 = 0;
short cost5 = 0;
for (int i = gs; i <= ge; i++) {
final int icv = sfmap[i];
cost0 += len0[icv] & 0xff;
cost1 += len1[icv] & 0xff;
cost2 += len2[icv] & 0xff;
cost3 += len3[icv] & 0xff;
cost4 += len4[icv] & 0xff;
cost5 += len5[icv] & 0xff;
}
cost[0] = cost0;
cost[1] = cost1;
cost[2] = cost2;
cost[3] = cost3;
cost[4] = cost4;
cost[5] = cost5;
}
else {
for (int t = nGroups; --t >= 0; ) {
cost[t] = 0;
}
for (int i = gs; i <= ge; i++) {
final int icv = sfmap[i];
for (int t = nGroups; --t >= 0; ) {
cost[t] += len[t][icv] & 0xff;
}
}
}
/*
* Find the coding table which is best for this group, and
* record its identity in the selector table.
*/
int bt = -1;
for (int t = nGroups, bc = 999999999; --t >= 0; ) {
final int costT = cost[t];
if (costT < bc) {
bc = costT;
bt = t;
}
}
fave[bt]++;
selector[nSelectors] = (byte) bt;
nSelectors++;
/*
* Increment the symbol frequencies for the selected table.
*/
final int[] rfreqBt = rfreq[bt];
for (int i = gs; i <= ge; i++) {
rfreqBt[sfmap[i]]++;
}
gs = ge + 1;
}
/*
* Recompute the tables based on the accumulated frequencies.
*/
for (int t = 0; t < nGroups; t++) {
hbMakeCodeLengths(len[t], rfreq[t], this.data, alphaSize, 20);
}
}
return nSelectors;
}
private void sendMTFValues2(final int nGroups, final int nSelectors)
{
// assert (nGroups < 8) : nGroups;
final Data dataShadow = this.data;
byte[] pos = dataShadow.sendMTFValues2Pos;
for (int i = nGroups; --i >= 0; ) {
pos[i] = (byte) i;
}
for (int i = 0; i < nSelectors; i++) {
final byte llI = dataShadow.selector[i];
byte tmp = pos[0];
int j = 0;
while (llI != tmp) {
j++;
byte tmp2 = tmp;
tmp = pos[j];
pos[j] = tmp2;
}
pos[0] = tmp;
dataShadow.selectorMtf[i] = (byte) j;
}
}
private void sendMTFValues3(final int nGroups, final int alphaSize)
{
int[][] code = this.data.sendMTFValuesCode;
byte[][] len = this.data.sendMTFValuesLen;
for (int t = 0; t < nGroups; t++) {
int minLen = 32;
int maxLen = 0;
final byte[] lenT = len[t];
for (int i = alphaSize; --i >= 0; ) {
final int l = lenT[i] & 0xff;
if (l > maxLen) {
maxLen = l;
}
if (l < minLen) {
minLen = l;
}
}
// assert (maxLen <= 20) : maxLen;
// assert (minLen >= 1) : minLen;
hbAssignCodes(code[t], len[t], minLen, maxLen, alphaSize);
}
}
private void sendMTFValues4()
throws IOException
{
final boolean[] inUse = this.data.inUse;
final boolean[] inUse16 = this.data.sentMTFValues4InUse16;
for (int i = 16; --i >= 0; ) {
inUse16[i] = false;
final int i16 = i * 16;
for (int j = 16; --j >= 0; ) {
if (inUse[i16 + j]) {
inUse16[i] = true;
break;
}
}
}
for (int i = 0; i < 16; i++) {
bsW(1, inUse16[i] ? 1 : 0);
}
final OutputStream outShadow = this.out;
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
for (int i = 0; i < 16; i++) {
if (inUse16[i]) {
final int i16 = i * 16;
for (int j = 0; j < 16; j++) {
// inlined: bsW(1, inUse[i16 + j] ? 1 : 0);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24); // write 8-bit
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
if (inUse[i16 + j]) {
bsBuffShadow |= 1 << (32 - bsLiveShadow - 1);
}
bsLiveShadow++;
}
}
}
this.bsBuff = bsBuffShadow;
this.bsLive = bsLiveShadow;
}
private void sendMTFValues5(final int nGroups, final int nSelectors)
throws IOException
{
bsW(3, nGroups);
bsW(15, nSelectors);
final OutputStream outShadow = this.out;
final byte[] selectorMtf = this.data.selectorMtf;
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
for (int i = 0; i < nSelectors; i++) {
for (int j = 0, hj = selectorMtf[i] & 0xff; j < hj; j++) {
// inlined: bsW(1, 1);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24);
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
bsBuffShadow |= 1 << (32 - bsLiveShadow - 1);
bsLiveShadow++;
}
// inlined: bsW(1, 0);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24);
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
// bsBuffShadow |= 0 << (32 - bsLiveShadow - 1);
bsLiveShadow++;
}
this.bsBuff = bsBuffShadow;
this.bsLive = bsLiveShadow;
}
private void sendMTFValues6(final int nGroups, final int alphaSize)
throws IOException
{
final byte[][] len = this.data.sendMTFValuesLen;
final OutputStream outShadow = this.out;
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
for (int t = 0; t < nGroups; t++) {
byte[] lenT = len[t];
int curr = lenT[0] & 0xff;
// inlined: bsW(5, curr);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24); // write 8-bit
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
bsBuffShadow |= curr << (32 - bsLiveShadow - 5);
bsLiveShadow += 5;
for (int i = 0; i < alphaSize; i++) {
int lti = lenT[i] & 0xff;
while (curr < lti) {
// inlined: bsW(2, 2);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24); // write 8-bit
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
bsBuffShadow |= 2 << (32 - bsLiveShadow - 2);
bsLiveShadow += 2;
curr++; /* 10 */
}
while (curr > lti) {
// inlined: bsW(2, 3);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24); // write 8-bit
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
bsBuffShadow |= 3 << (32 - bsLiveShadow - 2);
bsLiveShadow += 2;
curr--; /* 11 */
}
// inlined: bsW(1, 0);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24); // write 8-bit
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
// bsBuffShadow |= 0 << (32 - bsLiveShadow - 1);
bsLiveShadow++;
}
}
this.bsBuff = bsBuffShadow;
this.bsLive = bsLiveShadow;
}
private void sendMTFValues7()
throws IOException
{
final Data dataShadow = this.data;
final byte[][] len = dataShadow.sendMTFValuesLen;
final int[][] code = dataShadow.sendMTFValuesCode;
final OutputStream outShadow = this.out;
final byte[] selector = dataShadow.selector;
final char[] sfmap = dataShadow.sfmap;
final int nMTFShadow = this.nMTF;
int selCtr = 0;
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
for (int gs = 0; gs < nMTFShadow; ) {
final int ge = Math.min(gs + G_SIZE - 1, nMTFShadow - 1);
final int selectorSelCtr = selector[selCtr] & 0xff;
final int[] codeSelCtr = code[selectorSelCtr];
final byte[] lenSelCtr = len[selectorSelCtr];
while (gs <= ge) {
final int sfmapI = sfmap[gs];
// inlined: bsW(lenSelCtr[sfmapI] & 0xff,
// codeSelCtr[sfmapI]);
while (bsLiveShadow >= 8) {
outShadow.write(bsBuffShadow >> 24);
bsBuffShadow <<= 8;
bsLiveShadow -= 8;
}
final int n = lenSelCtr[sfmapI] & 0xFF;
bsBuffShadow |= codeSelCtr[sfmapI] << (32 - bsLiveShadow - n);
bsLiveShadow += n;
gs++;
}
gs = ge + 1;
selCtr++;
}
this.bsBuff = bsBuffShadow;
this.bsLive = bsLiveShadow;
}
private void moveToFrontCodeAndSend()
throws IOException
{
bsW(24, this.origPtr);
generateMTFValues();
sendMTFValues();
}
/**
* This is the most hammered method of this class.
*
* <p>
* This is the version using unrolled loops. Normally I never use such ones
* in Java code. The unrolling has shown a noticeable performance improvement
* on JRE 1.4.2 (Linux i586 / HotSpot Client). Of course it depends on the
* JIT compiler of the vm.
* </p>
*/
@SuppressWarnings("checkstyle:InnerAssignment")
private boolean mainSimpleSort(final Data dataShadow, final int lo,
final int hi, final int d)
{
final int bigN = hi - lo + 1;
if (bigN < 2) {
return this.firstAttempt && (this.workDone > this.workLimit);
}
int hp = 0;
while (INCS[hp] < bigN) {
hp++;
}
final int[] fmap = dataShadow.fmap;
final char[] quadrant = dataShadow.quadrant;
final byte[] block = dataShadow.block;
final int lastShadow = this.last;
final int lastPlus1 = lastShadow + 1;
final boolean firstAttemptShadow = this.firstAttempt;
final int workLimitShadow = this.workLimit;
int workDoneShadow = this.workDone;
// Following block contains unrolled code which could be shortened by
// coding it in additional loops.
HP:
while (--hp >= 0) {
final int h = INCS[hp];
final int mj = lo + h - 1;
for (int i = lo + h; i <= hi; ) {
// copy
for (int k = 3; (i <= hi) && (--k >= 0); i++) {
final int v = fmap[i];
final int vd = v + d;
int j = i;
// for (int a;
// (j > mj) && mainGtU((a = fmap[j - h]) + d, vd,
// block, quadrant, lastShadow);
// j -= h) {
// fmap[j] = a;
// }
//
// unrolled version:
// start inline mainGTU
boolean onceRunned = false;
int a = 0;
HAMMER:
while (true) {
if (onceRunned) {
fmap[j] = a;
if ((j -= h) <= mj) {
break;
}
}
else {
onceRunned = true;
}
a = fmap[j - h];
int i1 = a + d;
int i2 = vd;
// following could be done in a loop, but
// unrolled it for performance:
if (block[i1 + 1] == block[i2 + 1]) {
if (block[i1 + 2] == block[i2 + 2]) {
if (block[i1 + 3] == block[i2 + 3]) {
if (block[i1 + 4] == block[i2 + 4]) {
if (block[i1 + 5] == block[i2 + 5]) {
if (block[(i1 += 6)] == block[(i2 += 6)]) {
int x = lastShadow;
while (x > 0) {
x -= 4;
if (block[i1 + 1] == block[i2 + 1]) {
if (quadrant[i1] == quadrant[i2]) {
if (block[i1 + 2] == block[i2 + 2]) {
if (quadrant[i1 + 1] == quadrant[i2 + 1]) {
if (block[i1 + 3] == block[i2 + 3]) {
if (quadrant[i1 + 2] == quadrant[i2 + 2]) {
if (block[i1 + 4] == block[i2 + 4]) {
if (quadrant[i1 + 3] == quadrant[i2 + 3]) {
if ((i1 += 4) >= lastPlus1) {
i1 -= lastPlus1;
}
if ((i2 += 4) >= lastPlus1) {
i2 -= lastPlus1;
}
workDoneShadow++;
}
else if ((quadrant[i1 + 3] > quadrant[i2 + 3])) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((block[i1 + 4] & 0xff) > (block[i2 + 4] & 0xff)) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((quadrant[i1 + 2] > quadrant[i2 + 2])) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((block[i1 + 3] & 0xff) > (block[i2 + 3] & 0xff)) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((quadrant[i1 + 1] > quadrant[i2 + 1])) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((block[i1 + 2] & 0xff) > (block[i2 + 2] & 0xff)) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((quadrant[i1] > quadrant[i2])) {
continue HAMMER;
}
else {
break HAMMER;
}
}
else if ((block[i1 + 1] & 0xff) > (block[i2 + 1] & 0xff)) {
continue HAMMER;
}
else {
break HAMMER;
}
}
break;
} // while x > 0
else {
if ((block[i1] & 0xff) <= (block[i2] & 0xff)) {
break;
}
}
}
else if ((block[i1 + 5] & 0xff) > (block[i2 + 5] & 0xff)) {
// ignored
}
else {
break;
}
}
else if ((block[i1 + 4] & 0xff) > (block[i2 + 4] & 0xff)) {
// ignored
}
else {
break;
}
}
else if ((block[i1 + 3] & 0xff) > (block[i2 + 3] & 0xff)) {
// ignored
}
else {
break;
}
}
else if ((block[i1 + 2] & 0xff) > (block[i2 + 2] & 0xff)) {
// ignored
}
else {
break;
}
}
else if ((block[i1 + 1] & 0xff) > (block[i2 + 1] & 0xff)) {
// ignored
}
else {
break;
}
}
fmap[j] = v;
}
if (firstAttemptShadow && (i <= hi)
&& (workDoneShadow > workLimitShadow)) {
break HP;
}
}
}
this.workDone = workDoneShadow;
return firstAttemptShadow && (workDoneShadow > workLimitShadow);
}
private static void vswap(int[] fmap, int p1, int p2, int n)
{
n += p1;
while (p1 < n) {
int t = fmap[p1];
fmap[p1++] = fmap[p2];
fmap[p2++] = t;
}
}
private static byte med3(byte a, byte b, byte c)
{
return (a < b) ? (b < c ? b : a < c ? c : a) : (b > c ? b : a > c ? c : a);
}
private void blockSort()
{
this.workLimit = WORK_FACTOR * this.last;
this.workDone = 0;
this.blockRandomised = false;
this.firstAttempt = true;
mainSort();
if (this.firstAttempt && (this.workDone > this.workLimit)) {
randomiseBlock();
this.workLimit = 0;
this.workDone = 0;
this.firstAttempt = false;
mainSort();
}
int[] fmap = this.data.fmap;
this.origPtr = -1;
for (int i = 0, lastShadow = this.last; i <= lastShadow; i++) {
if (fmap[i] == 0) {
this.origPtr = i;
break;
}
}
}
/**
* Method "mainQSort3", file "blocksort.c", BZip2 1.0.2
*/
private void mainQSort3(final Data dataShadow, final int loSt, final int hiSt, final int dSt)
{
final int[] stackLl = dataShadow.stackLl;
final int[] stackHh = dataShadow.stackHh;
final int[] stackDd = dataShadow.stackDd;
final int[] fmap = dataShadow.fmap;
final byte[] block = dataShadow.block;
stackLl[0] = loSt;
stackHh[0] = hiSt;
stackDd[0] = dSt;
for (int sp = 1; --sp >= 0; ) {
final int lo = stackLl[sp];
final int hi = stackHh[sp];
final int d = stackDd[sp];
if ((hi - lo < SMALL_THRESH) || (d > DEPTH_THRESH)) {
if (mainSimpleSort(dataShadow, lo, hi, d)) {
return;
}
}
else {
final int d1 = d + 1;
final int med = med3(block[fmap[lo] + d1], block[fmap[hi] + d1], block[fmap[(lo + hi) >>> 1] + d1]) & 0xff;
int unLo = lo;
int unHi = hi;
int ltLo = lo;
int gtHi = hi;
while (true) {
while (unLo <= unHi) {
final int n = ((int) block[fmap[unLo] + d1] & 0xff) - med;
if (n == 0) {
final int temp = fmap[unLo];
fmap[unLo++] = fmap[ltLo];
fmap[ltLo++] = temp;
}
else if (n < 0) {
unLo++;
}
else {
break;
}
}
while (unLo <= unHi) {
final int n = ((int) block[fmap[unHi] + d1] & 0xff) - med;
if (n == 0) {
final int temp = fmap[unHi];
fmap[unHi--] = fmap[gtHi];
fmap[gtHi--] = temp;
}
else if (n > 0) {
unHi--;
}
else {
break;
}
}
if (unLo <= unHi) {
final int temp = fmap[unLo];
fmap[unLo++] = fmap[unHi];
fmap[unHi--] = temp;
}
else {
break;
}
}
if (gtHi < ltLo) {
stackLl[sp] = lo;
stackHh[sp] = hi;
stackDd[sp] = d1;
sp++;
}
else {
int n = Math.min((ltLo - lo), (unLo - ltLo));
vswap(fmap, lo, unLo - n, n);
int m = Math.min((hi - gtHi), (gtHi - unHi));
vswap(fmap, unLo, hi - m + 1, m);
n = lo + unLo - ltLo - 1;
m = hi - (gtHi - unHi) + 1;
stackLl[sp] = lo;
stackHh[sp] = n;
stackDd[sp] = d;
sp++;
stackLl[sp] = n + 1;
stackHh[sp] = m - 1;
stackDd[sp] = d1;
sp++;
stackLl[sp] = m;
stackHh[sp] = hi;
stackDd[sp] = d;
sp++;
}
}
}
}
private void mainSort()
{
final Data dataShadow = this.data;
final int[] runningOrder = dataShadow.mainSortRunningOrder;
final int[] copy = dataShadow.mainSortCopy;
final boolean[] bigDone = dataShadow.mainSortBigDone;
final int[] ftab = dataShadow.ftab;
final byte[] block = dataShadow.block;
final int[] fmap = dataShadow.fmap;
final char[] quadrant = dataShadow.quadrant;
final int lastShadow = this.last;
final int workLimitShadow = this.workLimit;
final boolean firstAttemptShadow = this.firstAttempt;
// Set up the 2-byte frequency table
for (int i = 65537; --i >= 0; ) {
ftab[i] = 0;
}
/*
* In the various block-sized structures, live data runs from 0 to
* last+NUM_OVERSHOOT_BYTES inclusive. First, set up the overshoot area
* for block.
*/
for (int i = 0; i < NUM_OVERSHOOT_BYTES; i++) {
block[lastShadow + i + 2] = block[(i % (lastShadow + 1)) + 1];
}
for (int i = lastShadow + NUM_OVERSHOOT_BYTES + 1; --i >= 0; ) {
quadrant[i] = 0;
}
block[0] = block[lastShadow + 1];
// Complete the initial radix sort:
int c1 = block[0] & 0xff;
for (int i = 0; i <= lastShadow; i++) {
final int c2 = block[i + 1] & 0xff;
ftab[(c1 << 8) + c2]++;
c1 = c2;
}
for (int i = 1; i <= 65536; i++) {
ftab[i] += ftab[i - 1];
}
c1 = block[1] & 0xff;
for (int i = 0; i < lastShadow; i++) {
final int c2 = block[i + 2] & 0xff;
fmap[--ftab[(c1 << 8) + c2]] = i;
c1 = c2;
}
fmap[--ftab[((block[lastShadow + 1] & 0xff) << 8) + (block[1] & 0xff)]] = lastShadow;
/*
* Now ftab contains the first loc of every small bucket. Calculate the
* running order, from smallest to largest big bucket.
*/
for (int i = 256; --i >= 0; ) {
bigDone[i] = false;
runningOrder[i] = i;
}
for (int h = 364; h != 1; ) {
h /= 3;
for (int i = h; i <= 255; i++) {
final int vv = runningOrder[i];
final int a = ftab[(vv + 1) << 8] - ftab[vv << 8];
final int b = h - 1;
int j = i;
for (int ro = runningOrder[j - h]; (ftab[(ro + 1) << 8] - ftab[ro << 8]) > a; ro = runningOrder[j - h]) {
runningOrder[j] = ro;
j -= h;
if (j <= b) {
break;
}
}
runningOrder[j] = vv;
}
}
/*
* The main sorting loop.
*/
for (int i = 0; i <= 255; i++) {
/*
* Process big buckets, starting with the least full.
*/
final int ss = runningOrder[i];
// Step 1:
/*
* Complete the big bucket [ss] by quick sorting any unsorted small
* buckets [ss, j]. Hopefully previous pointer-scanning phases have
* already completed many of the small buckets [ss, j], so we don't
* have to sort them at all.
*/
for (int j = 0; j <= 255; j++) {
final int sb = (ss << 8) + j;
final int ftabSb = ftab[sb];
if ((ftabSb & SET_MASK) != SET_MASK) {
final int lo = ftabSb & CLEAR_MASK;
final int hi = (ftab[sb + 1] & CLEAR_MASK) - 1;
if (hi > lo) {
mainQSort3(dataShadow, lo, hi, 2);
if (firstAttemptShadow
&& (this.workDone > workLimitShadow)) {
return;
}
}
ftab[sb] = ftabSb | SET_MASK;
}
}
// Step 2:
// Now scan this big bucket to synthesise the
// sorted order for small buckets [t, ss] for all t != ss.
for (int j = 0; j <= 255; j++) {
copy[j] = ftab[(j << 8) + ss] & CLEAR_MASK;
}
for (int j = ftab[ss << 8] & CLEAR_MASK, hj = (ftab[(ss + 1) << 8] & CLEAR_MASK); j < hj; j++) {
final int fmapJ = fmap[j];
c1 = block[fmapJ] & 0xff;
if (!bigDone[c1]) {
fmap[copy[c1]] = (fmapJ == 0) ? lastShadow : (fmapJ - 1);
copy[c1]++;
}
}
for (int j = 256; --j >= 0; ) {
ftab[(j << 8) + ss] |= SET_MASK;
}
// Step 3:
/*
* The ss big bucket is now done. Record this fact, and update the
* quadrant descriptors. Remember to update quadrants in the
* overshoot area too, if necessary. The "if (i < 255)" test merely
* skips this updating for the last bucket processed, since updating
* for the last bucket is pointless.
*/
bigDone[ss] = true;
if (i < 255) {
final int bbStart = ftab[ss << 8] & CLEAR_MASK;
final int bbSize = (ftab[(ss + 1) << 8] & CLEAR_MASK) - bbStart;
int shifts = 0;
while ((bbSize >> shifts) > 65534) {
shifts++;
}
for (int j = 0; j < bbSize; j++) {
final int a2update = fmap[bbStart + j];
final char qVal = (char) (j >> shifts);
quadrant[a2update] = qVal;
if (a2update < NUM_OVERSHOOT_BYTES) {
quadrant[a2update + lastShadow + 1] = qVal;
}
}
}
}
}
private void randomiseBlock()
{
final boolean[] inUse = this.data.inUse;
final byte[] block = this.data.block;
final int lastShadow = this.last;
for (int i = 256; --i >= 0; ) {
inUse[i] = false;
}
int rNToGo = 0;
int rTPos = 0;
for (int i = 0, j = 1; i <= lastShadow; i = j, j++) {
if (rNToGo == 0) {
rNToGo = (char) R_NUMS[rTPos];
if (++rTPos == 512) {
rTPos = 0;
}
}
rNToGo--;
block[j] ^= ((rNToGo == 1) ? 1 : 0);
// handle 16 bit signed numbers
inUse[block[j] & 0xff] = true;
}
this.blockRandomised = true;
}
private void generateMTFValues()
{
final int lastShadow = this.last;
final Data dataShadow = this.data;
final boolean[] inUse = dataShadow.inUse;
final byte[] block = dataShadow.block;
final int[] fmap = dataShadow.fmap;
final char[] sfmap = dataShadow.sfmap;
final int[] mtfFreq = dataShadow.mtfFreq;
final byte[] unseqToSeq = dataShadow.unseqToSeq;
final byte[] yy = dataShadow.generateMTFValuesYy;
// make maps
int nInUseShadow = 0;
for (int i = 0; i < 256; i++) {
if (inUse[i]) {
unseqToSeq[i] = (byte) nInUseShadow;
nInUseShadow++;
}
}
this.nInUse = nInUseShadow;
final int eob = nInUseShadow + 1;
for (int i = eob; i >= 0; i--) {
mtfFreq[i] = 0;
}
for (int i = nInUseShadow; --i >= 0; ) {
yy[i] = (byte) i;
}
int wr = 0;
int zPend = 0;
for (int i = 0; i <= lastShadow; i++) {
final byte llI = unseqToSeq[block[fmap[i]] & 0xff];
byte tmp = yy[0];
int j = 0;
while (llI != tmp) {
j++;
byte tmp2 = tmp;
tmp = yy[j];
yy[j] = tmp2;
}
yy[0] = tmp;
if (j == 0) {
zPend++;
}
else {
if (zPend > 0) {
zPend--;
while (true) {
if ((zPend & 1) == 0) {
sfmap[wr] = RUN_A;
wr++;
mtfFreq[RUN_A]++;
}
else {
sfmap[wr] = RUN_B;
wr++;
mtfFreq[RUN_B]++;
}
if (zPend >= 2) {
zPend = (zPend - 2) >> 1;
}
else {
break;
}
}
zPend = 0;
}
sfmap[wr] = (char) (j + 1);
wr++;
mtfFreq[j + 1]++;
}
}
if (zPend > 0) {
zPend--;
while (true) {
if ((zPend & 1) == 0) {
sfmap[wr] = RUN_A;
wr++;
mtfFreq[RUN_A]++;
}
else {
sfmap[wr] = RUN_B;
wr++;
mtfFreq[RUN_B]++;
}
if (zPend >= 2) {
zPend = (zPend - 2) >> 1;
}
else {
break;
}
}
}
sfmap[wr] = (char) eob;
mtfFreq[eob]++;
this.nMTF = wr + 1;
}
private static final class Data
{
// with blockSize 900k
final boolean[] inUse = new boolean[256]; // 256 byte
final byte[] unseqToSeq = new byte[256]; // 256 byte
final int[] mtfFreq = new int[MAX_ALPHA_SIZE]; // 1032 byte
final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte
final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte
final byte[] generateMTFValuesYy = new byte[256]; // 256 byte
final byte[][] sendMTFValuesLen = new byte[N_GROUPS][MAX_ALPHA_SIZE]; // 1548
// byte
final int[][] sendMTFValuesRfreq = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192
// byte
final int[] sendMTFValuesFave = new int[N_GROUPS]; // 24 byte
final short[] sendMTFValuesCost = new short[N_GROUPS]; // 12 byte
final int[][] sendMTFValuesCode = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192
// byte
final byte[] sendMTFValues2Pos = new byte[N_GROUPS]; // 6 byte
final boolean[] sentMTFValues4InUse16 = new boolean[16]; // 16 byte
final int[] stackLl = new int[QSORT_STACK_SIZE]; // 4000 byte
final int[] stackHh = new int[QSORT_STACK_SIZE]; // 4000 byte
final int[] stackDd = new int[QSORT_STACK_SIZE]; // 4000 byte
final int[] mainSortRunningOrder = new int[256]; // 1024 byte
final int[] mainSortCopy = new int[256]; // 1024 byte
final boolean[] mainSortBigDone = new boolean[256]; // 256 byte
final int[] heap = new int[MAX_ALPHA_SIZE + 2]; // 1040 byte
final int[] weight = new int[MAX_ALPHA_SIZE * 2]; // 2064 byte
final int[] parent = new int[MAX_ALPHA_SIZE * 2]; // 2064 byte
final int[] ftab = new int[65537]; // 262148 byte
// ------------
// 333408 byte
final byte[] block; // 900021 byte
final int[] fmap; // 3600000 byte
final char[] sfmap; // 3600000 byte
// ------------
// 8433529 byte
// ============
/**
* Array instance identical to sfmap, both are used only temporarily and
* independently, so we do not need to allocate additional memory.
*/
final char[] quadrant;
Data(int blockSize100k)
{
final int n = blockSize100k * BZip2Constants.BASE_BLOCK_SIZE;
this.block = new byte[(n + 1 + NUM_OVERSHOOT_BYTES)];
this.fmap = new int[n];
this.sfmap = new char[2 * n];
this.quadrant = this.sfmap;
}
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/bzip2/Crc32.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.bzip2;
// forked from Apache Hadoop
final class Crc32
{
private static final int[] CRC_32_TABLE = {0x00000000, 0x04c11db7, 0x09823b6e,
0x0d4326d9, 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005,
0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, 0x350c9b64,
0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7,
0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2,
0x52568b75, 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011,
0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, 0x9823b6e0,
0x9ce2ab57, 0x91a18d8e, 0x95609039, 0x8b27c03c, 0x8fe6dd8b,
0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef, 0xb7a96036,
0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d,
0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c,
0xc3f706fb, 0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f,
0xfbb8bb46, 0xff79a6f1, 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a,
0xec7dd02d, 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae,
0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, 0x128e9dcf,
0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x018aeb13, 0x054bf6a4,
0x0808d07d, 0x0cc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069,
0x75d48dde, 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02,
0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, 0x4d9b3063,
0x495a2dd4, 0x44190b0d, 0x40d816ba, 0xaca5c697, 0xa864db20,
0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc, 0xb6238b25,
0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6,
0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7,
0xe4750050, 0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c,
0xfa325055, 0xfef34de2, 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31,
0xcbffd686, 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a,
0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, 0x7a089632,
0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1,
0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4,
0x51435d53, 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47,
0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, 0x0315d626,
0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, 0x1011a0fa, 0x14d0bd4d,
0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60,
0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b,
0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a,
0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9,
0xb4bcb610, 0xb07daba7, 0xae3afba2, 0xaafbe615, 0xa7b8c0cc,
0xa379dd7b, 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f,
0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, 0x5d8a9099,
0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2,
0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f,
0x76c15bf8, 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24,
0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, 0x029f3d35,
0x065e2082, 0x0b1d065b, 0x0fdc1bec, 0x3793a651, 0x3352bbe6,
0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a, 0x2d15ebe3,
0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0,
0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1,
0xe760d676, 0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa,
0xf9278673, 0xfde69bc4, 0x89b8fd09, 0x8d79e0be, 0x803ac667,
0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c,
0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, 0xbcb4666d,
0xb8757bda, 0xb5365d03, 0xb1f740b4};
private int globalCrc;
Crc32()
{
initialiseCRC();
}
void initialiseCRC()
{
globalCrc = 0xffffffff;
}
int getFinalCRC()
{
return ~globalCrc;
}
void updateCRC(int value)
{
int temp = (globalCrc >> 24) ^ value;
if (temp < 0) {
temp = 256 + temp;
}
globalCrc = (globalCrc << 8) ^ CRC_32_TABLE[temp];
}
void updateCRC(int value, int repeat)
{
int globalCrcShadow = this.globalCrc;
while (repeat-- > 0) {
int temp = (globalCrcShadow >> 24) ^ value;
globalCrcShadow = (globalCrcShadow << 8) ^ CRC_32_TABLE[(temp >= 0) ? temp : (temp + 256)];
}
this.globalCrc = globalCrcShadow;
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateCompressor.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.deflate;
import io.airlift.compress.v3.Compressor;
public interface DeflateCompressor
extends Compressor
{
static DeflateCompressor create()
{
if (DeflateNativeCompressor.isEnabled()) {
return new DeflateNativeCompressor();
}
return new DeflateJavaCompressor();
}
static DeflateCompressor create(int compressionLevel)
{
if (DeflateNativeCompressor.isEnabled()) {
return new DeflateNativeCompressor(compressionLevel);
}
return new DeflateJavaCompressor(compressionLevel);
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateDecompressor.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.deflate;
import io.airlift.compress.v3.Decompressor;
public interface DeflateDecompressor
extends Decompressor
{
static DeflateDecompressor create()
{
if (DeflateNativeDecompressor.isEnabled()) {
return new DeflateNativeDecompressor();
}
return new DeflateJavaDecompressor();
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateJavaCompressor.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.deflate;
import java.lang.foreign.MemorySegment;
import java.util.zip.Deflater;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
import static java.util.zip.Deflater.FULL_FLUSH;
public class DeflateJavaCompressor
implements DeflateCompressor
{
private static final int EXTRA_COMPRESSION_SPACE = 16;
private static final int COMPRESSION_LEVEL = 4;
private final int compressionLevel;
public DeflateJavaCompressor()
{
this(COMPRESSION_LEVEL);
}
public DeflateJavaCompressor(int compressionLevel)
{
if (compressionLevel < 0 || compressionLevel > 9) {
throw new IllegalArgumentException("Invalid compression level: %d (must be 0-9)".formatted(compressionLevel));
}
this.compressionLevel = compressionLevel;
}
@Override
public int maxCompressedLength(int uncompressedSize)
{
// From Mark Adler's post http://stackoverflow.com/questions/1207877/java-size-of-compression-output-bytearray
return uncompressedSize + ((uncompressedSize + 7) >> 3) + ((uncompressedSize + 63) >> 6) + 5 + EXTRA_COMPRESSION_SPACE;
}
@Override
public int compress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
{
verifyRange(input, inputOffset, inputLength);
verifyRange(output, outputOffset, maxOutputLength);
try (Deflater deflater = new Deflater(compressionLevel, true)) {
deflater.setInput(input, inputOffset, inputLength);
deflater.finish();
int compressedDataLength = deflater.deflate(output, outputOffset, maxOutputLength, FULL_FLUSH);
if (!deflater.finished()) {
throw new IllegalStateException("Output buffer too small");
}
return compressedDataLength;
}
}
@Override
public int compress(MemorySegment input, MemorySegment output)
{
int maxCompressedLength = maxCompressedLength(toIntExact(input.byteSize()));
if (output.byteSize() < maxCompressedLength) {
throw new IllegalArgumentException("Output buffer must be at least " + maxCompressedLength + " bytes");
}
try (Deflater deflater = new Deflater(compressionLevel, true)) {
deflater.setInput(input.asByteBuffer());
deflater.finish();
int compressedDataLength = deflater.deflate(output.asByteBuffer(), FULL_FLUSH);
if (!deflater.finished()) {
throw new IllegalStateException("maxCompressedLength formula is incorrect, because deflate produced more data");
}
return compressedDataLength;
}
}
private static void verifyRange(byte[] data, int offset, int length)
{
requireNonNull(data, "data is null");
if (offset < 0 || length < 0 || offset + length > data.length) {
throw new IllegalArgumentException("Invalid offset or length (%s, %s) in array of length %s".formatted(offset, length, data.length));
}
}
}
================================================
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateJavaDecompressor.java
================================================
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.compress.v3.deflate;
import io.airlift.compress.v3.MalformedInputException;
import java.lang.foreign.MemorySegment;
import java.nio.ByteBuffer;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
public class DeflateJavaDecompressor
implements DeflateDecompressor
{
@Override
public int decompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
throws MalformedInputException
{
verifyRange(input, inputOffset, inputLength);
verifyRange(output, outputOffset, maxOutputLength);
try (Inflater inflater = new Inflater(true)) {
inflater.setInput(input, inputOffset, inputLength);
int uncompressedLength = 0;
while (true) {
uncompressedLength += inflater.inflate(output, outputOffset + uncompressedLength, maxOutputLength - uncompressedLength);
if (inflater.finished() || uncompressedLength >= maxOutputLength) {
break;
}
if (inflater.needsInput()) {
throw new MalformedInputException(0, format("Premature end of input stream. Input length = %s, uncompressed length = %d", inputLength, uncompressedLength));
}
}
gitextract_x_n9lj4e/
├── .github/
│ ├── dependabot.yml
│ ├── release.yml
│ └── workflows/
│ ├── main.yml
│ ├── release-2x.yml
│ └── release.yml
├── .gitignore
├── .mvn/
│ ├── maven.config
│ ├── settings.xml
│ └── wrapper/
│ └── maven-wrapper.properties
├── README.md
├── bin/
│ └── download.sh
├── license.txt
├── mvnw
├── notice.md
├── pom.xml
├── src/
│ ├── checkstyle/
│ │ └── checks.xml
│ ├── license/
│ │ └── LICENSE-HEADER.txt
│ ├── main/
│ │ └── java/
│ │ └── io/
│ │ └── airlift/
│ │ └── compress/
│ │ └── v3/
│ │ ├── Compressor.java
│ │ ├── Decompressor.java
│ │ ├── IncompatibleJvmException.java
│ │ ├── MalformedInputException.java
│ │ ├── bzip2/
│ │ │ ├── BZip2Codec.java
│ │ │ ├── BZip2Constants.java
│ │ │ ├── BZip2HadoopInputStream.java
│ │ │ ├── BZip2HadoopOutputStream.java
│ │ │ ├── BZip2HadoopStreams.java
│ │ │ ├── CBZip2InputStream.java
│ │ │ ├── CBZip2OutputStream.java
│ │ │ └── Crc32.java
│ │ ├── deflate/
│ │ │ ├── DeflateCompressor.java
│ │ │ ├── DeflateDecompressor.java
│ │ │ ├── DeflateJavaCompressor.java
│ │ │ ├── DeflateJavaDecompressor.java
│ │ │ ├── DeflateNative.java
│ │ │ ├── DeflateNativeCompressor.java
│ │ │ ├── DeflateNativeDecompressor.java
│ │ │ ├── JdkDeflateCodec.java
│ │ │ ├── JdkDeflateHadoopInputStream.java
│ │ │ ├── JdkDeflateHadoopOutputStream.java
│ │ │ └── JdkDeflateHadoopStreams.java
│ │ ├── gzip/
│ │ │ ├── JdkGzipCodec.java
│ │ │ ├── JdkGzipConstants.java
│ │ │ ├── JdkGzipHadoopInputStream.java
│ │ │ ├── JdkGzipHadoopOutputStream.java
│ │ │ └── JdkGzipHadoopStreams.java
│ │ ├── hadoop/
│ │ │ ├── CodecAdapter.java
│ │ │ ├── CompressionInputStreamAdapter.java
│ │ │ ├── CompressionOutputStreamAdapter.java
│ │ │ ├── HadoopInputStream.java
│ │ │ ├── HadoopOutputStream.java
│ │ │ └── HadoopStreams.java
│ │ ├── internal/
│ │ │ ├── NativeLoader.java
│ │ │ └── NativeSignature.java
│ │ ├── lz4/
│ │ │ ├── Lz4Codec.java
│ │ │ ├── Lz4Compressor.java
│ │ │ ├── Lz4Constants.java
│ │ │ ├── Lz4Decompressor.java
│ │ │ ├── Lz4HadoopInputStream.java
│ │ │ ├── Lz4HadoopOutputStream.java
│ │ │ ├── Lz4HadoopStreams.java
│ │ │ ├── Lz4JavaCompressor.java
│ │ │ ├── Lz4JavaDecompressor.java
│ │ │ ├── Lz4Native.java
│ │ │ ├── Lz4NativeCompressor.java
│ │ │ ├── Lz4NativeDecompressor.java
│ │ │ ├── Lz4RawCompressor.java
│ │ │ ├── Lz4RawDecompressor.java
│ │ │ └── UnsafeUtil.java
│ │ ├── lzo/
│ │ │ ├── LzoCodec.java
│ │ │ ├── LzoCompressor.java
│ │ │ ├── LzoConstants.java
│ │ │ ├── LzoDecompressor.java
│ │ │ ├── LzoHadoopInputStream.java
│ │ │ ├── LzoHadoopOutputStream.java
│ │ │ ├── LzoHadoopStreams.java
│ │ │ ├── LzoRawCompressor.java
│ │ │ ├── LzoRawDecompressor.java
│ │ │ ├── LzopCodec.java
│ │ │ ├── LzopHadoopInputStream.java
│ │ │ ├── LzopHadoopOutputStream.java
│ │ │ ├── LzopHadoopStreams.java
│ │ │ └── UnsafeUtil.java
│ │ ├── snappy/
│ │ │ ├── Crc32C.java
│ │ │ ├── SnappyCodec.java
│ │ │ ├── SnappyCompressor.java
│ │ │ ├── SnappyConstants.java
│ │ │ ├── SnappyDecompressor.java
│ │ │ ├── SnappyFramed.java
│ │ │ ├── SnappyFramedInputStream.java
│ │ │ ├── SnappyFramedOutputStream.java
│ │ │ ├── SnappyHadoopInputStream.java
│ │ │ ├── SnappyHadoopOutputStream.java
│ │ │ ├── SnappyHadoopStreams.java
│ │ │ ├── SnappyInternalUtils.java
│ │ │ ├── SnappyJavaCompressor.java
│ │ │ ├── SnappyJavaDecompressor.java
│ │ │ ├── SnappyNative.java
│ │ │ ├── SnappyNativeCompressor.java
│ │ │ ├── SnappyNativeDecompressor.java
│ │ │ ├── SnappyRawCompressor.java
│ │ │ ├── SnappyRawDecompressor.java
│ │ │ └── UnsafeUtil.java
│ │ ├── xxhash/
│ │ │ ├── XxHash128.java
│ │ │ ├── XxHash3Bindings.java
│ │ │ ├── XxHash3Hasher.java
│ │ │ ├── XxHash3Hasher128.java
│ │ │ ├── XxHash3Native.java
│ │ │ ├── XxHash64Bindings.java
│ │ │ ├── XxHash64Hasher.java
│ │ │ ├── XxHash64JavaHasher.java
│ │ │ └── XxHash64NativeHasher.java
│ │ └── zstd/
│ │ ├── BitInputStream.java
│ │ ├── BitOutputStream.java
│ │ ├── BlockCompressionState.java
│ │ ├── BlockCompressor.java
│ │ ├── CompressionContext.java
│ │ ├── CompressionParameters.java
│ │ ├── Constants.java
│ │ ├── DoubleFastBlockCompressor.java
│ │ ├── FiniteStateEntropy.java
│ │ ├── FrameHeader.java
│ │ ├── FseCompressionTable.java
│ │ ├── FseTableReader.java
│ │ ├── Histogram.java
│ │ ├── Huffman.java
│ │ ├── HuffmanCompressionContext.java
│ │ ├── HuffmanCompressionTable.java
│ │ ├── HuffmanCompressionTableWorkspace.java
│ │ ├── HuffmanCompressor.java
│ │ ├── HuffmanTableWriterWorkspace.java
│ │ ├── NodeTable.java
│ │ ├── RepeatedOffsets.java
│ │ ├── SequenceEncoder.java
│ │ ├── SequenceEncodingContext.java
│ │ ├── SequenceStore.java
│ │ ├── UnsafeUtil.java
│ │ ├── Util.java
│ │ ├── XxHash64.java
│ │ ├── ZstdCodec.java
│ │ ├── ZstdCompressor.java
│ │ ├── ZstdDecompressor.java
│ │ ├── ZstdFrameCompressor.java
│ │ ├── ZstdFrameDecompressor.java
│ │ ├── ZstdHadoopInputStream.java
│ │ ├── ZstdHadoopOutputStream.java
│ │ ├── ZstdHadoopStreams.java
│ │ ├── ZstdIncrementalFrameDecompressor.java
│ │ ├── ZstdInputStream.java
│ │ ├── ZstdJavaCompressor.java
│ │ ├── ZstdJavaDecompressor.java
│ │ ├── ZstdNative.java
│ │ ├── ZstdNativeCompressor.java
│ │ ├── ZstdNativeDecompressor.java
│ │ └── ZstdOutputStream.java
│ └── test/
│ ├── java/
│ │ └── io/
│ │ └── airlift/
│ │ └── compress/
│ │ └── v3/
│ │ ├── AbstractTestCompression.java
│ │ ├── HadoopCodecCompressor.java
│ │ ├── HadoopCodecDecompressor.java
│ │ ├── HadoopCodecDecompressorByteAtATime.java
│ │ ├── HadoopNative.java
│ │ ├── TestingData.java
│ │ ├── Util.java
│ │ ├── benchmark/
│ │ │ ├── Algorithm.java
│ │ │ ├── BytesCounter.java
│ │ │ ├── CompressionBenchmark.java
│ │ │ ├── DataSet.java
│ │ │ └── HashBenchmark.java
│ │ ├── bzip2/
│ │ │ ├── TestBZip2Codec.java
│ │ │ └── TestBZip2CodecByteAtATime.java
│ │ ├── deflate/
│ │ │ ├── MockJdkDeflateCompressor.java
│ │ │ ├── TestDeflate.java
│ │ │ ├── TestDeflateNative.java
│ │ │ ├── TestJdkDeflateCodec.java
│ │ │ └── TestJdkDeflateCodecByteAtATime.java
│ │ ├── gzip/
│ │ │ ├── MockJdkGzipCompressor.java
│ │ │ ├── TestJdkGzipCodec.java
│ │ │ ├── TestJdkGzipCodecByteAtATime.java
│ │ │ └── TestJdkGzipHadoopInputStream.java
│ │ ├── internal/
│ │ │ └── TestNativeLoader.java
│ │ ├── lz4/
│ │ │ ├── AbstractTestLz4.java
│ │ │ ├── BenchmarkCount.java
│ │ │ ├── TestLz4.java
│ │ │ ├── TestLz4Codec.java
│ │ │ ├── TestLz4CodecByteAtATime.java
│ │ │ ├── TestLz4Native.java
│ │ │ └── TestLz4NativeFastest.java
│ │ ├── lzo/
│ │ │ ├── TestLzo.java
│ │ │ ├── TestLzoCodec.java
│ │ │ ├── TestLzoCodecByteAtATime.java
│ │ │ ├── TestLzopCodec.java
│ │ │ └── TestLzopCodecByteAtATime.java
│ │ ├── snappy/
│ │ │ ├── AbstractTestSnappy.java
│ │ │ ├── ByteArrayOutputStream.java
│ │ │ ├── RandomGenerator.java
│ │ │ ├── TestSnappyCodec.java
│ │ │ ├── TestSnappyCodecByteAtATime.java
│ │ │ ├── TestSnappyJava.java
│ │ │ ├── TestSnappyNative.java
│ │ │ └── TestSnappyStream.java
│ │ ├── thirdparty/
│ │ │ ├── HadoopLzoCompressor.java
│ │ │ ├── HadoopLzoDecompressor.java
│ │ │ ├── JPountzLz4Compressor.java
│ │ │ ├── JPountzLz4Decompressor.java
│ │ │ ├── JdkDeflateCompressor.java
│ │ │ ├── JdkInflateDecompressor.java
│ │ │ ├── XerialSnappyCompressor.java
│ │ │ ├── XerialSnappyDecompressor.java
│ │ │ ├── ZstdJniCompressor.java
│ │ │ └── ZstdJniDecompressor.java
│ │ ├── xxhash/
│ │ │ ├── AbstractTestXxHash64.java
│ │ │ ├── TestXxHash3.java
│ │ │ ├── TestXxHash64.java
│ │ │ └── TestXxHash64Java.java
│ │ └── zstd/
│ │ ├── AbstractTestZstd.java
│ │ ├── TestCompressor.java
│ │ ├── TestUtil.java
│ │ ├── TestXxHash64.java
│ │ ├── TestZstd.java
│ │ ├── TestZstdCodec.java
│ │ ├── TestZstdCodecByteAtATime.java
│ │ ├── TestZstdFast.java
│ │ ├── TestZstdHigh.java
│ │ ├── TestZstdNative.java
│ │ ├── TestZstdPartial.java
│ │ ├── TestZstdStream.java
│ │ ├── ZstdPartialDecompressor.java
│ │ ├── ZstdStreamCompressor.java
│ │ └── ZstdStreamDecompressor.java
│ └── resources/
│ └── data/
│ ├── lzo/
│ │ ├── test
│ │ ├── test-adler32-both.lzo
│ │ ├── test-adler32.lzo
│ │ ├── test-crc32-both.lzo
│ │ ├── test-crc32.lzo
│ │ └── test-no-checksum.lzo
│ └── zstd/
│ ├── bad-second-frame.zst
│ ├── incompressible
│ ├── large-rle
│ ├── multiple-frames
│ ├── multiple-frames.zst
│ ├── offset-before-start.zst
│ ├── small-literals-after-incompressible-literals
│ ├── with-checksum
│ └── with-checksum.zst
└── testdata/
├── artificial/
│ ├── a.txt
│ ├── aaa.txt
│ ├── alphabet.txt
│ └── random.txt
├── calgary/
│ ├── bib
│ ├── book1
│ ├── book2
│ ├── geo
│ ├── news
│ ├── obj1
│ ├── obj2
│ ├── paper1
│ ├── paper2
│ ├── paper3
│ ├── paper4
│ ├── paper5
│ ├── paper6
│ ├── pic
│ ├── progc
│ ├── progl
│ ├── progp
│ └── trans
├── canterbury/
│ ├── alice29.txt
│ ├── asyoulik.txt
│ ├── cp.html
│ ├── fields.c
│ ├── grammar.lsp
│ ├── kennedy.xls
│ ├── lcet10.txt
│ ├── plrabn12.txt
│ ├── ptt5
│ ├── sum
│ └── xargs.1
├── geo.protodata
├── html
├── kppkn.gtb
├── large/
│ ├── E.coli
│ ├── bible.txt
│ └── world192.txt
├── silesia/
│ ├── dickens
│ ├── mozilla
│ ├── mr
│ ├── nci
│ ├── ooffice
│ ├── osdb
│ ├── reymont
│ ├── samba
│ ├── sao
│ ├── webster
│ ├── x-ray
│ └── xml
└── urls.10K
SYMBOL INDEX (1650 symbols across 209 files)
FILE: src/main/java/io/airlift/compress/v3/Compressor.java
type Compressor (line 18) | public interface Compressor
method maxCompressedLength (line 20) | int maxCompressedLength(int uncompressedSize);
method compress (line 25) | int compress(byte[] input, int inputOffset, int inputLength, byte[] ou...
method compress (line 30) | int compress(MemorySegment input, MemorySegment output);
method getRetainedSizeInBytes (line 32) | default int getRetainedSizeInBytes(int inputLength)
FILE: src/main/java/io/airlift/compress/v3/Decompressor.java
type Decompressor (line 18) | public interface Decompressor
method decompress (line 23) | int decompress(byte[] input, int inputOffset, int inputLength, byte[] ...
method decompress (line 29) | int decompress(MemorySegment input, MemorySegment output)
FILE: src/main/java/io/airlift/compress/v3/IncompatibleJvmException.java
class IncompatibleJvmException (line 16) | public class IncompatibleJvmException
method IncompatibleJvmException (line 19) | public IncompatibleJvmException(String message)
FILE: src/main/java/io/airlift/compress/v3/MalformedInputException.java
class MalformedInputException (line 16) | public class MalformedInputException
method MalformedInputException (line 21) | public MalformedInputException(long offset)
method MalformedInputException (line 26) | public MalformedInputException(long offset, String reason)
method getOffset (line 32) | public long getOffset()
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2Codec.java
class BZip2Codec (line 18) | public class BZip2Codec
method BZip2Codec (line 21) | public BZip2Codec()
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2Constants.java
class BZip2Constants (line 30) | final class BZip2Constants
method BZip2Constants (line 40) | private BZip2Constants() {}
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopInputStream.java
class BZip2HadoopInputStream (line 23) | class BZip2HadoopInputStream
method BZip2HadoopInputStream (line 29) | public BZip2HadoopInputStream(InputStream in)
method read (line 34) | @Override
method read (line 62) | @Override
method resetState (line 74) | @Override
method close (line 81) | @Override
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopOutputStream.java
class BZip2HadoopOutputStream (line 24) | class BZip2HadoopOutputStream
method BZip2HadoopOutputStream (line 31) | public BZip2HadoopOutputStream(OutputStream out)
method write (line 36) | @Override
method write (line 44) | @Override
method finish (line 52) | @Override
method flush (line 62) | @Override
method close (line 69) | @Override
method openStreamIfNecessary (line 85) | private void openStreamIfNecessary()
FILE: src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopStreams.java
class BZip2HadoopStreams (line 26) | public class BZip2HadoopStreams
method getDefaultFileExtension (line 29) | @Override
method getHadoopCodecName (line 35) | @Override
method createInputStream (line 41) | @Override
method createOutputStream (line 47) | @Override
FILE: src/main/java/io/airlift/compress/v3/bzip2/CBZip2InputStream.java
class CBZip2InputStream (line 75) | @SuppressWarnings({ "AssignmentToForLoopParameter", "SpellCheckingInspec...
type STATE (line 131) | public enum STATE
method CBZip2InputStream (line 176) | public CBZip2InputStream(final InputStream in)
method getProcessedByteCount (line 188) | public long getProcessedByteCount()
method updateProcessedByteCount (line 200) | private void updateProcessedByteCount(int count)
method readAByte (line 212) | private int readAByte(InputStream inStream)
method skipToNextMarker (line 235) | private boolean skipToNextMarker(long marker, int markerBitLength)
method makeMaps (line 281) | private void makeMaps()
method changeStateToProcessABlock (line 297) | private void changeStateToProcessABlock()
method read (line 309) | @Override
method read (line 340) | @Override
method read0 (line 383) | private int read0()
method init (line 427) | private void init()
method initBlock (line 448) | private void initBlock()
method endBlock (line 489) | private void endBlock()
method complete (line 510) | private void complete()
method close (line 522) | @Override
method bsR (line 540) | private long bsR(final long n)
method bsGetBit (line 566) | private boolean bsGetBit()
method bsGetUByte (line 588) | private char bsGetUByte()
method bsGetInt (line 594) | private int bsGetInt()
method hbCreateDecodeTables (line 603) | private static void hbCreateDecodeTables(final int[] limit,
method recvDecodingTables (line 642) | private void recvDecodingTables()
method createHuffmanDecodingTables (line 728) | private void createHuffmanDecodingTables(final int alphaSize,
method getAndMoveToFrontDecode (line 757) | private void getAndMoveToFrontDecode()
method getAndMoveToFrontDecode0 (line 949) | private int getAndMoveToFrontDecode0(final int groupNo)
method setupBlock (line 984) | private void setupBlock()
method setupRandPartA (line 1025) | @SuppressWarnings("checkstyle:InnerAssignment")
method setupNoRandPartA (line 1055) | private void setupNoRandPartA()
method setupRandPartB (line 1076) | private void setupRandPartB()
method setupRandPartC (line 1109) | private void setupRandPartC()
method setupNoRandPartB (line 1125) | private void setupNoRandPartB()
method setupNoRandPartC (line 1143) | private void setupNoRandPartC()
class Data (line 1160) | private static final class Data
method Data (line 1195) | Data(int blockSize100k)
method initTT (line 1207) | int[] initTT(int length)
FILE: src/main/java/io/airlift/compress/v3/bzip2/CBZip2OutputStream.java
class CBZip2OutputStream (line 132) | class CBZip2OutputStream
method hbMakeCodeLengths (line 243) | private static void hbMakeCodeLengths(final byte[] len, final int[] freq,
method CBZip2OutputStream (line 449) | public CBZip2OutputStream(final OutputStream out)
method CBZip2OutputStream (line 471) | private CBZip2OutputStream(final OutputStream out, final int blockSize)
method write (line 488) | @Override
method writeRun (line 500) | private void writeRun()
method finalize (line 559) | @Override
method finish (line 567) | public void finish()
method close (line 586) | @Override
method flush (line 603) | @Override
method init (line 613) | private void init()
method initBlock (line 633) | private void initBlock()
method endBlock (line 649) | private void endBlock()
method endCompression (line 697) | private void endCompression()
method write (line 717) | @Override
method write0 (line 740) | private void write0(int b)
method hbAssignCodes (line 765) | private static void hbAssignCodes(final int[] code, final byte[] length,
method bsFinishedWithStream (line 780) | private void bsFinishedWithStream()
method bsW (line 791) | private void bsW(final int n, final int v)
method bsPutUByte (line 808) | private void bsPutUByte(final int c)
method bsPutInt (line 814) | private void bsPutInt(final int u)
method sendMTFValues (line 823) | private void sendMTFValues()
method sendMTFValues0 (line 868) | private void sendMTFValues0(final int nGroups, final int alphaSize)
method sendMTFValues1 (line 905) | private int sendMTFValues1(final int nGroups, final int alphaSize)
method sendMTFValues2 (line 1024) | private void sendMTFValues2(final int nGroups, final int nSelectors)
method sendMTFValues3 (line 1052) | private void sendMTFValues3(final int nGroups, final int alphaSize)
method sendMTFValues4 (line 1078) | private void sendMTFValues4()
method sendMTFValues5 (line 1125) | private void sendMTFValues5(final int nGroups, final int nSelectors)
method sendMTFValues6 (line 1163) | private void sendMTFValues6(final int nGroups, final int alphaSize)
method sendMTFValues7 (line 1228) | private void sendMTFValues7()
method moveToFrontCodeAndSend (line 1275) | private void moveToFrontCodeAndSend()
method mainSimpleSort (line 1293) | @SuppressWarnings("checkstyle:InnerAssignment")
method vswap (line 1502) | private static void vswap(int[] fmap, int p1, int p2, int n)
method med3 (line 1512) | private static byte med3(byte a, byte b, byte c)
method blockSort (line 1517) | private void blockSort()
method mainQSort3 (line 1546) | private void mainQSort3(final Data dataShadow, final int loSt, final i...
method mainSort (line 1652) | private void mainSort()
method randomiseBlock (line 1818) | private void randomiseBlock()
method generateMTFValues (line 1848) | private void generateMTFValues()
class Data (line 1957) | private static final class Data
method Data (line 2007) | Data(int blockSize100k)
FILE: src/main/java/io/airlift/compress/v3/bzip2/Crc32.java
class Crc32 (line 17) | final class Crc32
method Crc32 (line 74) | Crc32()
method initialiseCRC (line 79) | void initialiseCRC()
method getFinalCRC (line 84) | int getFinalCRC()
method updateCRC (line 89) | void updateCRC(int value)
method updateCRC (line 98) | void updateCRC(int value, int repeat)
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateCompressor.java
type DeflateCompressor (line 18) | public interface DeflateCompressor
method create (line 21) | static DeflateCompressor create()
method create (line 29) | static DeflateCompressor create(int compressionLevel)
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateDecompressor.java
type DeflateDecompressor (line 18) | public interface DeflateDecompressor
method create (line 21) | static DeflateDecompressor create()
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateJavaCompressor.java
class DeflateJavaCompressor (line 23) | public class DeflateJavaCompressor
method DeflateJavaCompressor (line 31) | public DeflateJavaCompressor()
method DeflateJavaCompressor (line 36) | public DeflateJavaCompressor(int compressionLevel)
method maxCompressedLength (line 44) | @Override
method compress (line 51) | @Override
method compress (line 69) | @Override
method verifyRange (line 89) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateJavaDecompressor.java
class DeflateJavaDecompressor (line 26) | public class DeflateJavaDecompressor
method decompress (line 29) | @Override
method decompress (line 61) | @Override
method verifyRange (line 92) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateNative.java
class DeflateNative (line 25) | final class DeflateNative
method DeflateNative (line 43) | private DeflateNative() {}
method isEnabled (line 72) | public static boolean isEnabled()
method verifyEnabled (line 77) | public static void verifyEnabled()
method allocCompressor (line 84) | public static MemorySegment allocCompressor(int compressionLevel)
method freeCompressor (line 101) | public static void freeCompressor(MemorySegment compressor)
method compressBound (line 114) | public static long compressBound(MemorySegment compressor, long inputL...
method compress (line 127) | public static long compress(MemorySegment compressor, MemorySegment in...
method allocDecompressor (line 146) | public static MemorySegment allocDecompressor()
method freeDecompressor (line 163) | public static void freeDecompressor(MemorySegment decompressor)
method decompress (line 176) | public static int decompress(MemorySegment decompressor, MemorySegment...
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateNativeCompressor.java
class DeflateNativeCompressor (line 25) | public class DeflateNativeCompressor
method DeflateNativeCompressor (line 33) | public DeflateNativeCompressor()
method DeflateNativeCompressor (line 38) | public DeflateNativeCompressor(int compressionLevel)
method isEnabled (line 48) | public static boolean isEnabled()
method maxCompressedLength (line 53) | @Override
method compress (line 59) | @Override
method compress (line 67) | @Override
method run (line 76) | @Override
FILE: src/main/java/io/airlift/compress/v3/deflate/DeflateNativeDecompressor.java
class DeflateNativeDecompressor (line 33) | public class DeflateNativeDecompressor
method DeflateNativeDecompressor (line 40) | public DeflateNativeDecompressor()
method isEnabled (line 47) | public static boolean isEnabled()
method decompress (line 52) | @Override
method decompress (line 61) | @Override
method decompress (line 68) | private int decompress(MemorySegment input, long inputLength, MemorySe...
method run (line 86) | @Override
FILE: src/main/java/io/airlift/compress/v3/deflate/JdkDeflateCodec.java
class JdkDeflateCodec (line 18) | public class JdkDeflateCodec
method JdkDeflateCodec (line 21) | public JdkDeflateCodec()
FILE: src/main/java/io/airlift/compress/v3/deflate/JdkDeflateHadoopInputStream.java
class JdkDeflateHadoopInputStream (line 26) | class JdkDeflateHadoopInputStream
method JdkDeflateHadoopInputStream (line 36) | public JdkDeflateHadoopInputStream(InputStream input, int bufferSize)
method read (line 43) | @Override
method read (line 54) | @Override
method resetState (line 111) | @Override
method close (line 117) | @Override
FILE: src/main/java/io/airlift/compress/v3/deflate/JdkDeflateHadoopOutputStream.java
class JdkDeflateHadoopOutputStream (line 24) | class JdkDeflateHadoopOutputStream
method JdkDeflateHadoopOutputStream (line 33) | public JdkDeflateHadoopOutputStream(OutputStream output, int bufferSize)
method write (line 40) | @Override
method write (line 48) | @Override
method finish (line 58) | @Override
method compress (line 71) | private void compress()
method flush (line 78) | @Override
method close (line 85) | @Override
FILE: src/main/java/io/airlift/compress/v3/deflate/JdkDeflateHadoopStreams.java
class JdkDeflateHadoopStreams (line 27) | public class JdkDeflateHadoopStreams
method getDefaultFileExtension (line 32) | @Override
method getHadoopCodecName (line 38) | @Override
method createInputStream (line 44) | @Override
method createOutputStream (line 51) | @Override
FILE: src/main/java/io/airlift/compress/v3/gzip/JdkGzipCodec.java
class JdkGzipCodec (line 18) | public class JdkGzipCodec
method JdkGzipCodec (line 21) | public JdkGzipCodec()
FILE: src/main/java/io/airlift/compress/v3/gzip/JdkGzipConstants.java
class JdkGzipConstants (line 16) | final class JdkGzipConstants
method JdkGzipConstants (line 20) | private JdkGzipConstants() {}
FILE: src/main/java/io/airlift/compress/v3/gzip/JdkGzipHadoopInputStream.java
class JdkGzipHadoopInputStream (line 26) | class JdkGzipHadoopInputStream
method JdkGzipHadoopInputStream (line 32) | public JdkGzipHadoopInputStream(InputStream input, int bufferSize)
method read (line 38) | @Override
method read (line 49) | @Override
method resetState (line 56) | @Override
method close (line 62) | @Override
class GzipBufferedInputStream (line 70) | private static class GzipBufferedInputStream
method GzipBufferedInputStream (line 73) | public GzipBufferedInputStream(InputStream input, int bufferSize)
method available (line 78) | @Override
FILE: src/main/java/io/airlift/compress/v3/gzip/JdkGzipHadoopOutputStream.java
class JdkGzipHadoopOutputStream (line 24) | class JdkGzipHadoopOutputStream
method JdkGzipHadoopOutputStream (line 30) | public JdkGzipHadoopOutputStream(OutputStream output, int bufferSize)
method write (line 36) | @Override
method write (line 44) | @Override
method finish (line 51) | @Override
method flush (line 63) | @Override
method close (line 70) | @Override
class GZIPOutputStreamWrapper (line 82) | private static class GZIPOutputStreamWrapper
method GZIPOutputStreamWrapper (line 85) | GZIPOutputStreamWrapper(OutputStream output, int bufferSize)
method end (line 91) | public void end()
FILE: src/main/java/io/airlift/compress/v3/gzip/JdkGzipHadoopStreams.java
class JdkGzipHadoopStreams (line 27) | public class JdkGzipHadoopStreams
method getDefaultFileExtension (line 32) | @Override
method getHadoopCodecName (line 38) | @Override
method createInputStream (line 44) | @Override
method createOutputStream (line 51) | @Override
FILE: src/main/java/io/airlift/compress/v3/hadoop/CodecAdapter.java
class CodecAdapter (line 35) | public class CodecAdapter
method CodecAdapter (line 42) | public CodecAdapter(Function<Optional<Configuration>, HadoopStreams> s...
method getConf (line 48) | @Override
method setConf (line 54) | @Override
method createOutputStream (line 61) | @Override
method createOutputStream (line 68) | @Override
method getCompressorType (line 78) | @Override
method createCompressor (line 84) | @Override
method createInputStream (line 90) | @Override
method createInputStream (line 97) | @Override
method getPositionSupplier (line 107) | private static PositionSupplier getPositionSupplier(InputStream inputS...
method getDecompressorType (line 115) | @Override
method createDecompressor (line 121) | @Override
method getDefaultExtension (line 127) | @Override
class CompressorAdapter (line 137) | @DoNotPool
method setInput (line 141) | @Override
method needsInput (line 147) | @Override
method setDictionary (line 153) | @Override
method getBytesRead (line 159) | @Override
method getBytesWritten (line 165) | @Override
method finish (line 171) | @Override
method finished (line 177) | @Override
method compress (line 183) | @Override
method reset (line 190) | @Override
method end (line 193) | @Override
method reinit (line 196) | @Override
class DecompressorAdapter (line 204) | @DoNotPool
method setInput (line 208) | @Override
method needsInput (line 214) | @Override
method setDictionary (line 220) | @Override
method needsDictionary (line 226) | @Override
method finished (line 232) | @Override
method decompress (line 238) | @Override
method getRemaining (line 245) | @Override
method reset (line 251) | @Override
method end (line 254) | @Override
FILE: src/main/java/io/airlift/compress/v3/hadoop/CompressionInputStreamAdapter.java
class CompressionInputStreamAdapter (line 23) | final class CompressionInputStreamAdapter
method read (line 28) | @Override
method CompressionInputStreamAdapter (line 38) | public CompressionInputStreamAdapter(HadoopInputStream input, Position...
method read (line 46) | @Override
method read (line 53) | @Override
method getPos (line 60) | @Override
method resetState (line 67) | @Override
method close (line 73) | @Override
type PositionSupplier (line 85) | public interface PositionSupplier
method getPosition (line 87) | long getPosition()
FILE: src/main/java/io/airlift/compress/v3/hadoop/CompressionOutputStreamAdapter.java
class CompressionOutputStreamAdapter (line 21) | final class CompressionOutputStreamAdapter
method write (line 25) | @Override
method CompressionOutputStreamAdapter (line 34) | public CompressionOutputStreamAdapter(HadoopOutputStream output)
method write (line 40) | @Override
method write (line 47) | @Override
method finish (line 54) | @Override
method resetState (line 61) | @Override
method close (line 64) | @Override
FILE: src/main/java/io/airlift/compress/v3/hadoop/HadoopInputStream.java
class HadoopInputStream (line 19) | @SuppressWarnings("AbstractMethodOverridesConcreteMethod")
method resetState (line 23) | public abstract void resetState();
method read (line 25) | @Override
method close (line 29) | @Override
FILE: src/main/java/io/airlift/compress/v3/hadoop/HadoopOutputStream.java
class HadoopOutputStream (line 19) | @SuppressWarnings("AbstractMethodOverridesConcreteMethod")
method finish (line 27) | public abstract void finish()
method write (line 30) | @Override
method flush (line 34) | @Override
method close (line 38) | @Override
FILE: src/main/java/io/airlift/compress/v3/hadoop/HadoopStreams.java
type HadoopStreams (line 25) | public interface HadoopStreams
method getDefaultFileExtension (line 27) | String getDefaultFileExtension();
method getHadoopCodecName (line 29) | List<String> getHadoopCodecName();
method createInputStream (line 31) | HadoopInputStream createInputStream(InputStream in)
method createOutputStream (line 34) | HadoopOutputStream createOutputStream(OutputStream out)
FILE: src/main/java/io/airlift/compress/v3/internal/NativeLoader.java
class NativeLoader (line 48) | public final class NativeLoader
method NativeLoader (line 62) | private NativeLoader() {}
method loadSymbols (line 66) | public static <T> Symbols<T> loadSymbols(String name, Class<T> methodH...
method getFunctionDescriptor (line 119) | private static FunctionDescriptor getFunctionDescriptor(Class<?> retur...
method createErrorMethodHandle (line 132) | private static MethodHandle createErrorMethodHandle(String name, Linka...
method getMemoryLayout (line 138) | private static ValueLayout getMemoryLayout(Class<?> type)
method loadLibrary (line 155) | public static SymbolLookup loadLibrary(String name)
method temporaryFile (line 177) | private static Path temporaryFile(String name, URL url)
method getLibraryPath (line 193) | private static String getLibraryPath(String name)
method getPlatform (line 198) | private static String getPlatform()
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4Codec.java
class Lz4Codec (line 24) | public class Lz4Codec
method Lz4Codec (line 27) | public Lz4Codec()
method getBufferSize (line 32) | private static int getBufferSize(Optional<Configuration> configuration)
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4Compressor.java
type Lz4Compressor (line 22) | public sealed interface Lz4Compressor
method compress (line 26) | int compress(MemorySegment input, MemorySegment output);
method create (line 28) | static Lz4Compressor create()
method create (line 33) | static Lz4Compressor create(int acceleration)
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4Constants.java
class Lz4Constants (line 16) | final class Lz4Constants
method Lz4Constants (line 25) | private Lz4Constants() {}
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4Decompressor.java
type Lz4Decompressor (line 20) | public sealed interface Lz4Decompressor
method decompress (line 24) | int decompress(MemorySegment input, MemorySegment output);
method create (line 26) | static Lz4Decompressor create()
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4HadoopInputStream.java
class Lz4HadoopInputStream (line 25) | class Lz4HadoopInputStream
method Lz4HadoopInputStream (line 38) | public Lz4HadoopInputStream(Lz4Decompressor decompressor, InputStream ...
method read (line 46) | @Override
method read (line 60) | @Override
method resetState (line 85) | @Override
method close (line 93) | @Override
method bufferCompressedData (line 100) | private int bufferCompressedData()
method readInput (line 127) | private void readInput(int length, byte[] buffer)
method readBigEndianInt (line 140) | private int readBigEndianInt()
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4HadoopOutputStream.java
class Lz4HadoopOutputStream (line 24) | class Lz4HadoopOutputStream
method Lz4HadoopOutputStream (line 36) | public Lz4HadoopOutputStream(Lz4Compressor compressor, OutputStream ou...
method write (line 46) | @Override
method write (line 56) | @Override
method finish (line 79) | @Override
method flush (line 88) | @Override
method close (line 95) | @Override
method writeNextChunk (line 107) | private void writeNextChunk(byte[] input, int inputOffset, int inputLe...
method writeBigEndianInt (line 119) | private void writeBigEndianInt(int value)
method compressionOverhead (line 128) | private static int compressionOverhead(int size)
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4HadoopStreams.java
class Lz4HadoopStreams (line 26) | public class Lz4HadoopStreams
method Lz4HadoopStreams (line 33) | public Lz4HadoopStreams()
method Lz4HadoopStreams (line 38) | public Lz4HadoopStreams(boolean useNative, int bufferSize)
method getDefaultFileExtension (line 44) | @Override
method getHadoopCodecName (line 50) | @Override
method createInputStream (line 56) | @Override
method createOutputStream (line 63) | @Override
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4JavaCompressor.java
class Lz4JavaCompressor (line 30) | public final class Lz4JavaCompressor
method maxCompressedLength (line 35) | @Override
method compress (line 41) | @Override
method compress (line 53) | @Override
method getRetainedSizeInBytes (line 72) | @Override
method verifyRange (line 78) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4JavaDecompressor.java
class Lz4JavaDecompressor (line 28) | public final class Lz4JavaDecompressor
method decompress (line 31) | @Override
method decompress (line 46) | @Override
method verifyRange (line 72) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4Native.java
class Lz4Native (line 26) | final class Lz4Native
method Lz4Native (line 28) | private Lz4Native() {}
method isEnabled (line 75) | public static boolean isEnabled()
method verifyEnabled (line 80) | public static void verifyEnabled()
method maxCompressedLength (line 87) | public static int maxCompressedLength(int inputLength)
method compress (line 97) | public static int compress(MemorySegment input, int inputLength, Memor...
method compress (line 114) | public static int compress(MemorySegment input, int inputLength, Memor...
method decompress (line 131) | public static int decompress(MemorySegment compressed, int compressedL...
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4NativeCompressor.java
class Lz4NativeCompressor (line 23) | public final class Lz4NativeCompressor
method Lz4NativeCompressor (line 29) | public Lz4NativeCompressor()
method Lz4NativeCompressor (line 34) | public Lz4NativeCompressor(int acceleration)
method isEnabled (line 43) | public static boolean isEnabled()
method maxCompressedLength (line 48) | @Override
method compress (line 54) | @Override
method compress (line 62) | @Override
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4NativeDecompressor.java
class Lz4NativeDecompressor (line 20) | public final class Lz4NativeDecompressor
method Lz4NativeDecompressor (line 23) | public Lz4NativeDecompressor()
method isEnabled (line 28) | public static boolean isEnabled()
method decompress (line 33) | @Override
method decompress (line 41) | @Override
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4RawCompressor.java
class Lz4RawCompressor (line 25) | final class Lz4RawCompressor
method Lz4RawCompressor (line 48) | private Lz4RawCompressor() {}
method hash (line 50) | private static int hash(long value, int mask)
method maxCompressedLength (line 64) | public static int maxCompressedLength(int sourceLength)
method compress (line 69) | public static int compress(
method emitLiteral (line 194) | private static long emitLiteral(Object inputBase, Object outputBase, l...
method emitMatch (line 209) | private static long emitMatch(Object outputBase, long output, long tok...
method count (line 240) | static int count(Object inputBase, final long inputAddress, final long...
method emitLastLiteral (line 269) | private static long emitLastLiteral(
method encodeRunLength (line 282) | private static long encodeRunLength(
method computeTableSize (line 304) | static int computeTableSize(int inputSize)
FILE: src/main/java/io/airlift/compress/v3/lz4/Lz4RawDecompressor.java
class Lz4RawDecompressor (line 25) | final class Lz4RawDecompressor
method Lz4RawDecompressor (line 33) | private Lz4RawDecompressor() {}
method decompress (line 35) | public static int decompress(
FILE: src/main/java/io/airlift/compress/v3/lz4/UnsafeUtil.java
class UnsafeUtil (line 26) | final class UnsafeUtil
method UnsafeUtil (line 30) | private UnsafeUtil() {}
method getBase (line 48) | public static byte[] getBase(MemorySegment segment)
method getAddress (line 63) | public static long getAddress(MemorySegment segment)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoCodec.java
class LzoCodec (line 24) | public class LzoCodec
method LzoCodec (line 27) | public LzoCodec()
method getBufferSize (line 32) | static int getBufferSize(Optional<Configuration> configuration)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoCompressor.java
class LzoCompressor (line 32) | public class LzoCompressor
method maxCompressedLength (line 37) | @Override
method compress (line 43) | @Override
method compress (line 55) | @Override
method getRetainedSizeInBytes (line 74) | @Override
method verifyRange (line 80) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoConstants.java
class LzoConstants (line 16) | final class LzoConstants
method LzoConstants (line 25) | private LzoConstants() {}
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoDecompressor.java
class LzoDecompressor (line 29) | public class LzoDecompressor
method decompress (line 32) | @Override
method decompress (line 47) | @Override
method verifyRange (line 74) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoHadoopInputStream.java
class LzoHadoopInputStream (line 25) | class LzoHadoopInputStream
method LzoHadoopInputStream (line 38) | public LzoHadoopInputStream(InputStream in, int maxUncompressedLength)
method read (line 45) | @Override
method read (line 59) | @Override
method resetState (line 84) | @Override
method close (line 92) | @Override
method bufferCompressedData (line 99) | private int bufferCompressedData()
method readInput (line 126) | private void readInput(int length, byte[] buffer)
method readBigEndianInt (line 139) | private int readBigEndianInt()
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoHadoopOutputStream.java
class LzoHadoopOutputStream (line 24) | class LzoHadoopOutputStream
method LzoHadoopOutputStream (line 36) | public LzoHadoopOutputStream(OutputStream out, int bufferSize)
method write (line 45) | @Override
method write (line 55) | @Override
method finish (line 78) | @Override
method flush (line 87) | @Override
method close (line 94) | @Override
method writeNextChunk (line 106) | private void writeNextChunk(byte[] input, int inputOffset, int inputLe...
method writeBigEndianInt (line 118) | private void writeBigEndianInt(int value)
method compressionOverhead (line 127) | private static int compressionOverhead(int size)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoHadoopStreams.java
class LzoHadoopStreams (line 24) | public class LzoHadoopStreams
method LzoHadoopStreams (line 32) | public LzoHadoopStreams()
method LzoHadoopStreams (line 37) | public LzoHadoopStreams(int bufferSize)
method getDefaultFileExtension (line 42) | @Override
method getHadoopCodecName (line 48) | @Override
method createInputStream (line 54) | @Override
method createOutputStream (line 60) | @Override
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoRawCompressor.java
class LzoRawCompressor (line 24) | final class LzoRawCompressor
method LzoRawCompressor (line 49) | private LzoRawCompressor() {}
method hash (line 51) | private static int hash(long value, int mask)
method maxCompressedLength (line 65) | public static int maxCompressedLength(int sourceLength)
method compress (line 70) | public static int compress(
method count (line 203) | private static int count(Object inputBase, final long start, long matc...
method emitLastLiteral (line 236) | private static long emitLastLiteral(
method emitLiteral (line 257) | private static long emitLiteral(
method encodeLiteralLength (line 278) | private static long encodeLiteralLength(
method emitCopy (line 311) | private static long emitCopy(Object outputBase, long output, int match...
method encodeOffset (line 354) | private static long encodeOffset(final Object outputBase, final long o...
method encodeMatchLength (line 360) | private static long encodeMatchLength(Object outputBase, long output, ...
method computeTableSize (line 382) | private static int computeTableSize(int inputSize)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzoRawDecompressor.java
class LzoRawDecompressor (line 24) | final class LzoRawDecompressor
method LzoRawDecompressor (line 29) | private LzoRawDecompressor() {}
method decompress (line 31) | @SuppressWarnings("InnerAssignment")
method toBinary (line 354) | private static String toBinary(int command)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzopCodec.java
class LzopCodec (line 20) | public class LzopCodec
method LzopCodec (line 23) | public LzopCodec()
FILE: src/main/java/io/airlift/compress/v3/lzo/LzopHadoopInputStream.java
class LzopHadoopInputStream (line 33) | class LzopHadoopInputStream
method LzopHadoopInputStream (line 64) | public LzopHadoopInputStream(InputStream in, int maxUncompressedLength)
method read (line 159) | @Override
method read (line 178) | @Override
method resetState (line 207) | @Override
method close (line 215) | @Override
method bufferCompressedData (line 222) | private int bufferCompressedData()
method skipChecksums (line 246) | private void skipChecksums(boolean compressed)
method decompress (line 263) | private void decompress(int compressedLength, byte[] output, int outpu...
method readInput (line 282) | private void readInput(byte[] buffer, int offset, int length)
method readBigEndianShort (line 295) | private static int readBigEndianShort(InputStream in)
method readBigEndianInt (line 311) | private static int readBigEndianInt(InputStream in)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzopHadoopOutputStream.java
class LzopHadoopOutputStream (line 29) | class LzopHadoopOutputStream
method LzopHadoopOutputStream (line 46) | public LzopHadoopOutputStream(OutputStream out, int bufferSize)
method write (line 84) | @Override
method write (line 94) | @Override
method finish (line 117) | @Override
method flush (line 126) | @Override
method close (line 133) | @Override
method writeNextChunk (line 146) | private void writeNextChunk(byte[] input, int inputOffset, int inputLe...
method writeBigEndianInt (line 164) | private void writeBigEndianInt(int value)
method compressionOverhead (line 173) | private static int compressionOverhead(int size)
FILE: src/main/java/io/airlift/compress/v3/lzo/LzopHadoopStreams.java
class LzopHadoopStreams (line 27) | public class LzopHadoopStreams
method LzopHadoopStreams (line 33) | public LzopHadoopStreams()
method LzopHadoopStreams (line 38) | public LzopHadoopStreams(int bufferSize)
method getDefaultFileExtension (line 43) | @Override
method getHadoopCodecName (line 49) | @Override
method createInputStream (line 55) | @Override
method createOutputStream (line 62) | @Override
FILE: src/main/java/io/airlift/compress/v3/lzo/UnsafeUtil.java
class UnsafeUtil (line 26) | final class UnsafeUtil
method UnsafeUtil (line 30) | private UnsafeUtil() {}
method getBase (line 48) | public static byte[] getBase(MemorySegment segment)
method getAddress (line 63) | public static long getAddress(MemorySegment segment)
FILE: src/main/java/io/airlift/compress/v3/snappy/Crc32C.java
class Crc32C (line 24) | final class Crc32C
method maskedCrc32c (line 29) | public static int maskedCrc32c(byte[] data)
method maskedCrc32c (line 34) | public static int maskedCrc32c(byte[] data, int offset, int length)
method mask (line 48) | public static int mask(int crc)
method unmask (line 57) | public static int unmask(int maskedCrc)
method Crc32C (line 71) | public Crc32C()
method getMaskedValue (line 76) | public int getMaskedValue()
method getIntValue (line 81) | public int getIntValue()
method getValue (line 86) | @Override
method reset (line 93) | @Override
method update (line 99) | @Override
method update (line 129) | @Override
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyCodec.java
class SnappyCodec (line 24) | public class SnappyCodec
method SnappyCodec (line 27) | public SnappyCodec()
method getBufferSize (line 32) | private static int getBufferSize(Optional<Configuration> configuration)
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyCompressor.java
type SnappyCompressor (line 20) | public sealed interface SnappyCompressor
method compress (line 24) | int compress(MemorySegment input, MemorySegment output);
method create (line 26) | static SnappyCompressor create()
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyConstants.java
class SnappyConstants (line 16) | final class SnappyConstants
method SnappyConstants (line 26) | private SnappyConstants() {}
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyDecompressor.java
type SnappyDecompressor (line 18) | public sealed interface SnappyDecompressor
method getUncompressedLength (line 22) | int getUncompressedLength(byte[] compressed, int compressedOffset);
method create (line 24) | static SnappyDecompressor create()
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyFramed.java
class SnappyFramed (line 19) | final class SnappyFramed
method SnappyFramed (line 33) | private SnappyFramed()
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyFramedInputStream.java
class SnappyFramedInputStream (line 28) | public final class SnappyFramedInputStream
method SnappyFramedInputStream (line 68) | public SnappyFramedInputStream(SnappyDecompressor decompressor, InputS...
method SnappyFramedInputStream (line 74) | public SnappyFramedInputStream(SnappyDecompressor decompressor, InputS...
method read (line 95) | @Override
method read (line 108) | @Override
method available (line 131) | @Override
method close (line 141) | @Override
method ensureBuffer (line 155) | private boolean ensureBuffer()
method allocateBuffersBasedOnSize (line 219) | private void allocateBuffersBasedOnSize(int size)
method getFrameMetaData (line 233) | private static FrameMetaData getFrameMetaData(byte[] frameHeader)
method getFrameData (line 283) | private static FrameData getFrameData(byte[] content)
method readBlockHeader (line 294) | private boolean readBlockHeader()
type FrameAction (line 310) | enum FrameAction
class FrameMetaData (line 315) | public static final class FrameMetaData
method FrameMetaData (line 320) | public FrameMetaData(FrameAction frameAction, int length)
class FrameData (line 327) | public static final class FrameData
method FrameData (line 332) | public FrameData(int checkSum, int offset)
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyFramedOutputStream.java
class SnappyFramedOutputStream (line 24) | public final class SnappyFramedOutputStream
method SnappyFramedOutputStream (line 53) | public SnappyFramedOutputStream(SnappyCompressor compressor, OutputStr...
method newChecksumFreeBenchmarkOutputStream (line 65) | public static SnappyFramedOutputStream newChecksumFreeBenchmarkOutputS...
method SnappyFramedOutputStream (line 71) | private SnappyFramedOutputStream(SnappyCompressor compressor, OutputSt...
method SnappyFramedOutputStream (line 77) | public SnappyFramedOutputStream(SnappyCompressor compressor, OutputStr...
method write (line 93) | @Override
method write (line 106) | @Override
method flush (line 143) | @Override
method close (line 154) | @Override
method copyToBuffer (line 170) | private void copyToBuffer(byte[] input, int offset, int length)
method flushBuffer (line 180) | private void flushBuffer()
method writeCompressed (line 199) | private void writeCompressed(byte[] input, int offset, int length)
method writeBlock (line 234) | private void writeBlock(OutputStream out, byte[] data, int offset, int...
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyHadoopInputStream.java
class SnappyHadoopInputStream (line 24) | class SnappyHadoopInputStream
method SnappyHadoopInputStream (line 37) | public SnappyHadoopInputStream(SnappyDecompressor decompressor, InputS...
method read (line 43) | @Override
method read (line 56) | @Override
method resetState (line 76) | @Override
method close (line 84) | @Override
method readNextChunk (line 91) | private boolean readNextChunk(byte[] userBuffer, int userOffset, int u...
method readInput (line 140) | private void readInput(int length, byte[] buffer)
method readBigEndianInt (line 153) | private int readBigEndianInt()
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyHadoopOutputStream.java
class SnappyHadoopOutputStream (line 24) | class SnappyHadoopOutputStream
method SnappyHadoopOutputStream (line 36) | public SnappyHadoopOutputStream(SnappyCompressor compressor, OutputStr...
method write (line 46) | @Override
method write (line 56) | @Override
method finish (line 79) | @Override
method flush (line 88) | @Override
method close (line 95) | @Override
method writeNextChunk (line 107) | private void writeNextChunk(byte[] input, int inputOffset, int inputLe...
method writeBigEndianInt (line 119) | private void writeBigEndianInt(int value)
method compressionOverhead (line 128) | private static int compressionOverhead(int size)
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyHadoopStreams.java
class SnappyHadoopStreams (line 26) | public class SnappyHadoopStreams
method SnappyHadoopStreams (line 33) | public SnappyHadoopStreams()
method SnappyHadoopStreams (line 38) | public SnappyHadoopStreams(boolean useNative, int bufferSize)
method getDefaultFileExtension (line 44) | @Override
method getHadoopCodecName (line 50) | @Override
method createInputStream (line 56) | @Override
method createOutputStream (line 63) | @Override
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyInternalUtils.java
class SnappyInternalUtils (line 19) | final class SnappyInternalUtils
method SnappyInternalUtils (line 21) | private SnappyInternalUtils()
method checkNotNull (line 27) | static <T> T checkNotNull(T reference, String errorMessageTemplate, Ob...
method checkArgument (line 36) | static void checkArgument(boolean expression, String errorMessageTempl...
method checkPositionIndexes (line 43) | static void checkPositionIndexes(int start, int end, int size)
method badPositionIndexes (line 51) | static String badPositionIndexes(int start, int end, int size)
method badPositionIndex (line 63) | static String badPositionIndex(int index, int size, String desc)
method readBytes (line 88) | static int readBytes(InputStream source, byte[] dest, int offset, int ...
method skip (line 115) | static int skip(InputStream source, int skip)
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyJavaCompressor.java
class SnappyJavaCompressor (line 26) | public final class SnappyJavaCompressor
method maxCompressedLength (line 31) | @Override
method compress (line 37) | @Override
method compress (line 51) | @Override
method getRetainedSizeInBytes (line 78) | @Override
method verifyRange (line 84) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyJavaDecompressor.java
class SnappyJavaDecompressor (line 28) | public final class SnappyJavaDecompressor
method getUncompressedLength (line 31) | @Override
method decompress (line 40) | @Override
method decompress (line 55) | @Override
method verifyRange (line 82) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyNative.java
class SnappyNative (line 28) | final class SnappyNative
method maxCompressedLength (line 32) | public static long maxCompressedLength(long inputLength)
method compress (line 37) | public long compress(MemorySegment input, long inputLength, MemorySegm...
method decompress (line 44) | public long decompress(MemorySegment compressed, long compressedLength...
method decompressedLength (line 51) | public long decompressedLength(MemorySegment compressed, long compress...
method isEnabled (line 93) | public static boolean isEnabled()
method verifyEnabled (line 98) | public static void verifyEnabled()
method compressInternal (line 105) | private static void compressInternal(MemorySegment input, long inputLe...
method decompressInternal (line 122) | private static void decompressInternal(MemorySegment compressed, long ...
method maxCompressedLengthInternal (line 140) | private static long maxCompressedLengthInternal(long inputLength)
method decompressedLengthInternal (line 150) | private static void decompressedLengthInternal(MemorySegment compresse...
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyNativeCompressor.java
class SnappyNativeCompressor (line 20) | public final class SnappyNativeCompressor
method SnappyNativeCompressor (line 25) | public SnappyNativeCompressor()
method isEnabled (line 30) | public static boolean isEnabled()
method maxCompressedLength (line 35) | @Override
method compress (line 41) | @Override
method compress (line 49) | @Override
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyNativeDecompressor.java
class SnappyNativeDecompressor (line 22) | public final class SnappyNativeDecompressor
method SnappyNativeDecompressor (line 27) | public SnappyNativeDecompressor()
method isEnabled (line 32) | public static boolean isEnabled()
method getUncompressedLength (line 37) | @Override
method decompress (line 48) | @Override
method decompress (line 56) | @Override
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyRawCompressor.java
class SnappyRawCompressor (line 26) | final class SnappyRawCompressor
method SnappyRawCompressor (line 45) | private SnappyRawCompressor() {}
method maxCompressedLength (line 47) | public static int maxCompressedLength(int sourceLength)
method compress (line 73) | @SuppressWarnings("IllegalToken")
method count (line 235) | private static int count(Object inputBase, final long start, long matc...
method emitLiteralLength (line 268) | private static long emitLiteralLength(Object outputBase, long output, ...
method fastCopy (line 300) | private static long fastCopy(final Object inputBase, long input, final...
method emitCopy (line 312) | private static long emitCopy(Object outputBase, long output, long inpu...
method getHashTableSize (line 347) | @SuppressWarnings("IllegalToken")
method hashBytes (line 368) | private static int hashBytes(int value, int shift)
method log2Floor (line 373) | private static int log2Floor(int n)
method writeUncompressedLength (line 383) | private static long writeUncompressedLength(Object outputBase, long ou...
FILE: src/main/java/io/airlift/compress/v3/snappy/SnappyRawDecompressor.java
class SnappyRawDecompressor (line 23) | final class SnappyRawDecompressor
method SnappyRawDecompressor (line 28) | private SnappyRawDecompressor() {}
method getUncompressedLength (line 30) | public static int getUncompressedLength(Object compressed, long compre...
method decompress (line 35) | public static int decompress(
method uncompressAll (line 70) | private static int uncompressAll(
method readUncompressedLength (line 277) | static int[] readUncompressedLength(Object compressed, long compressed...
method getUnsignedByteSafe (line 315) | private static int getUnsignedByteSafe(Object base, long address, long...
FILE: src/main/java/io/airlift/compress/v3/snappy/UnsafeUtil.java
class UnsafeUtil (line 26) | final class UnsafeUtil
method UnsafeUtil (line 30) | private UnsafeUtil() {}
method getBase (line 48) | public static byte[] getBase(MemorySegment segment)
method getAddress (line 63) | public static long getAddress(MemorySegment segment)
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash3Bindings.java
class XxHash3Bindings (line 36) | final class XxHash3Bindings
method XxHash3Bindings (line 40) | private XxHash3Bindings() {}
method isEnabled (line 164) | public static boolean isEnabled()
method verifyEnabled (line 169) | public static void verifyEnabled()
method hash64 (line 178) | public static long hash64(MemorySegment input, long length)
method hash64 (line 188) | public static long hash64(MemorySegment input, long length, long seed)
method hash128 (line 198) | public static XxHash128 hash128(MemorySegment input, long length)
method hash128 (line 211) | public static XxHash128 hash128(MemorySegment input, long length, long...
method createState (line 226) | public static MemorySegment createState()
method freeState (line 236) | public static void freeState(MemorySegment state)
method reset64 (line 251) | public static void reset64(MemorySegment state)
method reset64 (line 264) | public static void reset64(MemorySegment state, long seed)
method update64 (line 277) | public static void update64(MemorySegment state, MemorySegment input, ...
method digest64 (line 290) | public static long digest64(MemorySegment state)
method reset128 (line 302) | public static void reset128(MemorySegment state)
method reset128 (line 315) | public static void reset128(MemorySegment state, long seed)
method update128 (line 328) | public static void update128(MemorySegment state, MemorySegment input,...
method digest128 (line 341) | public static XxHash128 digest128(MemorySegment state)
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash3Hasher.java
type XxHash3Hasher (line 30) | public interface XxHash3Hasher
method update (line 38) | XxHash3Hasher update(byte[] input);
method update (line 45) | XxHash3Hasher update(byte[] input, int offset, int length);
method update (line 52) | XxHash3Hasher update(MemorySegment input);
method updateLE (line 60) | XxHash3Hasher updateLE(long value);
method updateLE (line 68) | XxHash3Hasher updateLE(int value);
method digest (line 74) | long digest();
method reset (line 81) | XxHash3Hasher reset();
method reset (line 88) | XxHash3Hasher reset(long seed);
method close (line 93) | @Override
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash3Hasher128.java
type XxHash3Hasher128 (line 30) | public interface XxHash3Hasher128
method update (line 38) | XxHash3Hasher128 update(byte[] input);
method update (line 45) | XxHash3Hasher128 update(byte[] input, int offset, int length);
method update (line 52) | XxHash3Hasher128 update(MemorySegment input);
method updateLE (line 60) | XxHash3Hasher128 updateLE(long value);
method updateLE (line 68) | XxHash3Hasher128 updateLE(int value);
method digest (line 74) | XxHash128 digest();
method reset (line 81) | XxHash3Hasher128 reset();
method reset (line 88) | XxHash3Hasher128 reset(long seed);
method close (line 93) | @Override
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash3Native.java
class XxHash3Native (line 59) | public final class XxHash3Native
method XxHash3Native (line 70) | private XxHash3Native() {}
method newHasher (line 77) | public static XxHash3Hasher newHasher()
method newHasher (line 85) | public static XxHash3Hasher newHasher(long seed)
method newHasher128 (line 93) | public static XxHash3Hasher128 newHasher128()
method newHasher128 (line 101) | public static XxHash3Hasher128 newHasher128(long seed)
method isEnabled (line 108) | public static boolean isEnabled()
method hash (line 115) | public static long hash(long value)
method hash (line 120) | public static long hash(long input, long seed)
method hash (line 137) | public static long hash(byte[] input)
method hash (line 142) | public static long hash(byte[] input, int offset, int length)
method hash (line 150) | public static long hash(MemorySegment input)
method hash (line 156) | public static long hash(byte[] input, long seed)
method hash (line 161) | public static long hash(byte[] input, int offset, int length, long seed)
method hash (line 169) | public static long hash(MemorySegment input, long seed)
method hash128 (line 177) | public static XxHash128 hash128(byte[] input)
method hash128 (line 182) | public static XxHash128 hash128(byte[] input, int offset, int length)
method hash128 (line 190) | public static XxHash128 hash128(MemorySegment input)
method hash128 (line 196) | public static XxHash128 hash128(byte[] input, long seed)
method hash128 (line 201) | public static XxHash128 hash128(byte[] input, int offset, int length, ...
method hash128 (line 209) | public static XxHash128 hash128(MemorySegment input, long seed)
class Hasher64Impl (line 220) | private static final class Hasher64Impl
method Hasher64Impl (line 228) | Hasher64Impl()
method Hasher64Impl (line 245) | Hasher64Impl(long seed)
method update (line 262) | @Override
method update (line 268) | @Override
method update (line 278) | @Override
method updateLE (line 286) | @Override
method updateLE (line 296) | @Override
method digest (line 306) | @Override
method reset (line 313) | @Override
method reset (line 321) | @Override
method checkNotClosed (line 329) | private void checkNotClosed()
method close (line 336) | @Override
class Hasher128Impl (line 349) | private static final class Hasher128Impl
method Hasher128Impl (line 357) | Hasher128Impl()
method Hasher128Impl (line 374) | Hasher128Impl(long seed)
method update (line 391) | @Override
method update (line 397) | @Override
method update (line 407) | @Override
method updateLE (line 415) | @Override
method updateLE (line 425) | @Override
method digest (line 435) | @Override
method reset (line 442) | @Override
method reset (line 450) | @Override
method checkNotClosed (line 458) | private void checkNotClosed()
method close (line 465) | @Override
method NativeResources (line 482) | NativeResources()
method run (line 487) | @Override
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash64Bindings.java
class XxHash64Bindings (line 26) | final class XxHash64Bindings
method XxHash64Bindings (line 28) | private XxHash64Bindings() {}
method isEnabled (line 69) | public static boolean isEnabled()
method verifyEnabled (line 74) | public static void verifyEnabled()
method hash (line 81) | public static long hash(MemorySegment input, long length, long seed)
method createState (line 91) | public static MemorySegment createState()
method freeState (line 101) | public static void freeState(MemorySegment state)
method reset (line 114) | public static void reset(MemorySegment state, long seed)
method update (line 127) | public static void update(MemorySegment state, MemorySegment input, lo...
method digest (line 140) | public static long digest(MemorySegment state)
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash64Hasher.java
type XxHash64Hasher (line 36) | public sealed interface XxHash64Hasher
method hash (line 44) | static long hash(long value)
method hash (line 49) | static long hash(long value, long seed)
method hash (line 55) | static long hash(byte[] input)
method hash (line 60) | static long hash(byte[] input, int offset, int length)
method hash (line 65) | static long hash(byte[] input, long seed)
method hash (line 70) | static long hash(byte[] input, int offset, int length, long seed)
method hash (line 78) | static long hash(MemorySegment input)
method hash (line 83) | static long hash(MemorySegment input, long seed)
method create (line 93) | static XxHash64Hasher create()
method create (line 98) | static XxHash64Hasher create(long seed)
method update (line 113) | XxHash64Hasher update(byte[] input);
method update (line 120) | XxHash64Hasher update(byte[] input, int offset, int length);
method update (line 127) | XxHash64Hasher update(MemorySegment input);
method updateLE (line 135) | XxHash64Hasher updateLE(long value);
method updateLE (line 143) | XxHash64Hasher updateLE(int value);
method digest (line 149) | long digest();
method reset (line 156) | XxHash64Hasher reset();
method reset (line 163) | XxHash64Hasher reset(long seed);
method close (line 168) | @Override
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash64JavaHasher.java
class XxHash64JavaHasher (line 26) | public final class XxHash64JavaHasher
method XxHash64JavaHasher (line 50) | public XxHash64JavaHasher(long seed)
method resetState (line 56) | private void resetState(long seed)
method hash (line 66) | public static long hash(long value, long seed)
method hash (line 73) | public static long hash(byte[] input, int offset, int length, long seed)
method hash (line 126) | public static long hash(MemorySegment input, long seed)
method hashSegment (line 149) | private static long hashSegment(MemorySegment input, long seed)
method update (line 199) | @Override
method update (line 205) | @Override
method update (line 249) | @Override
method updateSegment (line 267) | private XxHash64Hasher updateSegment(MemorySegment input)
method updateLE (line 309) | @Override
method updateLE (line 317) | @Override
method updateBodyFromBuffer (line 325) | private void updateBodyFromBuffer()
method digest (line 334) | @Override
method computeBody (line 367) | private long computeBody()
method reset (line 379) | @Override
method reset (line 385) | @Override
method close (line 393) | @Override
method mix (line 396) | private static long mix(long current, long value)
method update (line 401) | private static long update(long hash, long value)
method updateTail (line 407) | private static long updateTail(long hash, long value)
method updateTail (line 413) | private static long updateTail(long hash, int value)
method updateTail (line 420) | private static long updateTail(long hash, byte value)
method finalShuffle (line 427) | private static long finalShuffle(long hash)
FILE: src/main/java/io/airlift/compress/v3/xxhash/XxHash64NativeHasher.java
class XxHash64NativeHasher (line 24) | public final class XxHash64NativeHasher
method XxHash64NativeHasher (line 36) | public XxHash64NativeHasher(long seed)
method isEnabled (line 55) | public static boolean isEnabled()
method hash (line 60) | public static long hash(byte[] input, int offset, int length, long seed)
method hash (line 68) | public static long hash(MemorySegment input, long seed)
method update (line 76) | @Override
method update (line 82) | @Override
method update (line 92) | @Override
method updateLE (line 100) | @Override
method updateLE (line 110) | @Override
method digest (line 120) | @Override
method reset (line 127) | @Override
method reset (line 133) | @Override
method checkNotClosed (line 141) | private void checkNotClosed()
method close (line 148) | @Override
method NativeResources (line 164) | NativeResources()
method run (line 169) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/BitInputStream.java
class BitInputStream (line 28) | final class BitInputStream
method BitInputStream (line 30) | private BitInputStream()
method isEndOfStream (line 34) | public static boolean isEndOfStream(long startAddress, long currentAdd...
method readTail (line 39) | private static long readTail(Object inputBase, long inputAddress, int ...
method peekBits (line 64) | public static long peekBits(int bitsConsumed, long bitContainer, int n...
method peekBitsFast (line 74) | public static long peekBitsFast(int bitsConsumed, long bitContainer, i...
class Initializer (line 79) | static class Initializer
method Initializer (line 88) | public Initializer(Object inputBase, long startAddress, long endAddr...
method getBits (line 95) | public long getBits()
method getCurrentAddress (line 100) | public long getCurrentAddress()
method getBitsConsumed (line 105) | public int getBitsConsumed()
method initialize (line 110) | public void initialize()
class Loader (line 133) | static final class Loader
method Loader (line 142) | public Loader(Object inputBase, long startAddress, long currentAddre...
method getBits (line 151) | public long getBits()
method getCurrentAddress (line 156) | public long getCurrentAddress()
method getBitsConsumed (line 161) | public int getBitsConsumed()
method isOverflow (line 166) | public boolean isOverflow()
method load (line 171) | public boolean load()
FILE: src/main/java/io/airlift/compress/v3/zstd/BitOutputStream.java
class BitOutputStream (line 20) | class BitOutputStream
method BitOutputStream (line 38) | public BitOutputStream(Object outputBase, long outputAddress, int outp...
method addBits (line 49) | public void addBits(int value, int bits)
method addBitsFast (line 58) | public void addBitsFast(int value, int bits)
method flush (line 64) | public void flush()
method close (line 79) | public int close()
FILE: src/main/java/io/airlift/compress/v3/zstd/BlockCompressionState.java
class BlockCompressionState (line 18) | class BlockCompressionState
method BlockCompressionState (line 28) | public BlockCompressionState(CompressionParameters parameters, long ba...
method slideWindow (line 35) | public void slideWindow(int slideWindowSize)
method reset (line 51) | public void reset()
method enforceMaxDistance (line 57) | public void enforceMaxDistance(long inputLimit, int maxDistance)
method getBaseAddress (line 67) | public long getBaseAddress()
method getWindowBaseOffset (line 72) | public int getWindowBaseOffset()
FILE: src/main/java/io/airlift/compress/v3/zstd/BlockCompressor.java
type BlockCompressor (line 16) | interface BlockCompressor
method compressBlock (line 20) | int compressBlock(Object inputBase, long inputAddress, int inputSize, ...
FILE: src/main/java/io/airlift/compress/v3/zstd/CompressionContext.java
class CompressionContext (line 20) | class CompressionContext
method CompressionContext (line 31) | public CompressionContext(CompressionParameters parameters, long baseA...
method slideWindow (line 46) | public void slideWindow(int slideWindowSize)
method commit (line 52) | public void commit()
FILE: src/main/java/io/airlift/compress/v3/zstd/CompressionParameters.java
class CompressionParameters (line 23) | class CompressionParameters
type Strategy (line 147) | public enum Strategy
method Strategy (line 187) | Strategy(BlockCompressor compressor)
method getCompressor (line 192) | public BlockCompressor getCompressor()
method CompressionParameters (line 198) | public CompressionParameters(int windowLog, int chainLog, int hashLog,...
method getWindowLog (line 211) | public int getWindowLog()
method getWindowSize (line 216) | public int getWindowSize()
method getBlockSize (line 221) | public int getBlockSize()
method getSearchLength (line 226) | public int getSearchLength()
method getChainLog (line 231) | public int getChainLog()
method getHashLog (line 236) | public int getHashLog()
method getSearchLog (line 241) | public int getSearchLog()
method getTargetLength (line 246) | public int getTargetLength()
method getStrategy (line 251) | public Strategy getStrategy()
method compute (line 256) | public static CompressionParameters compute(int compressionLevel, int ...
method getDefaultParameters (line 301) | private static CompressionParameters getDefaultParameters(int compress...
FILE: src/main/java/io/airlift/compress/v3/zstd/Constants.java
class Constants (line 16) | final class Constants
method Constants (line 80) | private Constants()
FILE: src/main/java/io/airlift/compress/v3/zstd/DoubleFastBlockCompressor.java
class DoubleFastBlockCompressor (line 20) | class DoubleFastBlockCompressor
method compressBlock (line 27) | @Override
method count (line 187) | public static int count(Object inputBase, final long inputAddress, fin...
method hash (line 216) | private static int hash(Object inputBase, long inputAddress, int bits,...
method hash4 (line 238) | private static int hash4(int value, int bits)
method hash5 (line 243) | private static int hash5(long value, int bits)
method hash6 (line 248) | private static int hash6(long value, int bits)
method hash7 (line 253) | private static int hash7(long value, int bits)
method hash8 (line 258) | private static int hash8(long value, int bits)
FILE: src/main/java/io/airlift/compress/v3/zstd/FiniteStateEntropy.java
class FiniteStateEntropy (line 25) | final class FiniteStateEntropy
method FiniteStateEntropy (line 34) | private FiniteStateEntropy()
method decompress (line 38) | public static int decompress(FiniteStateEntropy.Table table, final Obj...
method compress (line 153) | public static int compress(Object outputBase, long outputAddress, int ...
method compress (line 158) | public static int compress(Object outputBase, long outputAddress, int ...
method optimalTableLog (line 238) | public static int optimalTableLog(int maxTableLog, int inputSize, int ...
method normalizeCounts (line 257) | public static int normalizeCounts(short[] normalizedCounts, int tableL...
method normalizeCounts2 (line 315) | private static int normalizeCounts2(short[] normalizedCounts, int tabl...
method writeNormalizedCounts (line 407) | public static int writeNormalizedCounts(Object outputBase, long output...
class Table (line 523) | public static final class Table
method Table (line 530) | public Table(int log2Capacity)
method Table (line 538) | public Table(int log2Size, int[] newState, byte[] symbol, byte[] num...
FILE: src/main/java/io/airlift/compress/v3/zstd/FrameHeader.java
class FrameHeader (line 23) | class FrameHeader
method FrameHeader (line 31) | public FrameHeader(long headerSize, int windowSize, long contentSize, ...
method computeRequiredOutputBufferLookBackSize (line 41) | public int computeRequiredOutputBufferLookBackSize()
method equals (line 52) | @Override
method hashCode (line 69) | @Override
method toString (line 75) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/FseCompressionTable.java
class FseCompressionTable (line 18) | class FseCompressionTable
method FseCompressionTable (line 26) | public FseCompressionTable(int maxTableLog, int maxSymbol)
method newInstance (line 33) | public static FseCompressionTable newInstance(short[] normalizedCounts...
method initializeRleTable (line 41) | public void initializeRleTable(int symbol)
method initialize (line 52) | public void initialize(short[] normalizedCounts, int maxSymbol, int ta...
method begin (line 116) | public int begin(byte symbol)
method encode (line 123) | public int encode(BitOutputStream stream, int state, int symbol)
method finish (line 130) | public void finish(BitOutputStream stream, int state)
method calculateStep (line 136) | private static int calculateStep(int tableSize)
method spreadSymbols (line 141) | public static int spreadSymbols(short[] normalizedCounters, int maxSym...
FILE: src/main/java/io/airlift/compress/v3/zstd/FseTableReader.java
class FseTableReader (line 22) | class FseTableReader
method readFseTable (line 27) | public int readFseTable(FiniteStateEntropy.Table table, Object inputBa...
method initializeRleTable (line 162) | public static void initializeRleTable(FiniteStateEntropy.Table table, ...
FILE: src/main/java/io/airlift/compress/v3/zstd/Histogram.java
class Histogram (line 21) | final class Histogram
method Histogram (line 23) | private Histogram()
method count (line 28) | private static void count(Object inputBase, long inputAddress, int inp...
method findLargestCount (line 41) | public static int findLargestCount(int[] counts, int maxSymbol)
method findMaxSymbol (line 53) | public static int findMaxSymbol(int[] counts, int maxSymbol)
method count (line 61) | public static void count(byte[] input, int length, int[] counts)
FILE: src/main/java/io/airlift/compress/v3/zstd/Huffman.java
class Huffman (line 26) | class Huffman
method isLoaded (line 47) | public boolean isLoaded()
method readTable (line 52) | public int readTable(final Object inputBase, final long inputAddress, ...
method decodeSingleStream (line 130) | public void decodeSingleStream(final Object inputBase, final long inpu...
method decode4Streams (line 166) | public void decode4Streams(final Object inputBase, final long inputAdd...
method decodeTail (line 291) | private void decodeTail(final Object inputBase, final long startAddres...
method decodeSymbol (line 319) | private static int decodeSymbol(Object outputBase, long outputAddress,...
FILE: src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressionContext.java
class HuffmanCompressionContext (line 16) | class HuffmanCompressionContext
method getPreviousTable (line 27) | public HuffmanCompressionTable getPreviousTable()
method borrowTemporaryTable (line 32) | public HuffmanCompressionTable borrowTemporaryTable()
method discardTemporaryTable (line 40) | public void discardTemporaryTable()
method saveChanges (line 46) | public void saveChanges()
method getCompressionTableWorkspace (line 52) | public HuffmanCompressionTableWorkspace getCompressionTableWorkspace()
method getTableWriterWorkspace (line 57) | public HuffmanTableWriterWorkspace getTableWriterWorkspace()
FILE: src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressionTable.java
class HuffmanCompressionTable (line 27) | final class HuffmanCompressionTable
method HuffmanCompressionTable (line 35) | public HuffmanCompressionTable(int capacity)
method optimalNumberOfBits (line 41) | public static int optimalNumberOfBits(int maxNumberOfBits, int inputSi...
method initialize (line 60) | public void initialize(int[] counts, int maxSymbol, int maxNumberOfBit...
method buildTree (line 105) | private int buildTree(int[] counts, int maxSymbol, NodeTable nodeTable)
method encodeSymbol (line 197) | public void encodeSymbol(BitOutputStream output, int symbol)
method write (line 202) | public int write(Object outputBase, long outputAddress, int outputSize...
method isValid (line 268) | public boolean isValid(int[] counts, int maxSymbol)
method estimateCompressedSize (line 283) | public int estimateCompressedSize(int[] counts, int maxSymbol)
method setMaxHeight (line 294) | private static int setMaxHeight(NodeTable nodeTable, int lastNonZero, ...
method compressWeights (line 395) | private static int compressWeights(Object outputBase, long outputAddre...
FILE: src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressionTableWorkspace.java
class HuffmanCompressionTableWorkspace (line 18) | class HuffmanCompressionTableWorkspace
method reset (line 28) | public void reset()
FILE: src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressor.java
class HuffmanCompressor (line 20) | final class HuffmanCompressor
method HuffmanCompressor (line 22) | private HuffmanCompressor()
method compress4streams (line 26) | public static int compress4streams(Object outputBase, long outputAddre...
method compressSingleStream (line 84) | public static int compressSingleStream(Object outputBase, long outputA...
FILE: src/main/java/io/airlift/compress/v3/zstd/HuffmanTableWriterWorkspace.java
class HuffmanTableWriterWorkspace (line 20) | class HuffmanTableWriterWorkspace
FILE: src/main/java/io/airlift/compress/v3/zstd/NodeTable.java
class NodeTable (line 18) | class NodeTable
method NodeTable (line 25) | public NodeTable(int size)
method reset (line 33) | public void reset()
method copyNode (line 41) | public void copyNode(int from, int to)
FILE: src/main/java/io/airlift/compress/v3/zstd/RepeatedOffsets.java
class RepeatedOffsets (line 16) | class RepeatedOffsets
method getOffset0 (line 24) | public int getOffset0()
method getOffset1 (line 29) | public int getOffset1()
method saveOffset0 (line 34) | public void saveOffset0(int offset)
method saveOffset1 (line 39) | public void saveOffset1(int offset)
method commit (line 44) | public void commit()
FILE: src/main/java/io/airlift/compress/v3/zstd/SequenceEncoder.java
class SequenceEncoder (line 34) | final class SequenceEncoder
method SequenceEncoder (line 62) | private SequenceEncoder()
method compressSequences (line 66) | public static int compressSequences(Object outputBase, final long outp...
method buildCompressionTable (line 220) | private static int buildCompressionTable(FseCompressionTable table, Ob...
method encodeSequences (line 237) | private static int encodeSequences(
method selectEncodingType (line 308) | private static int selectEncodingType(
FILE: src/main/java/io/airlift/compress/v3/zstd/SequenceEncodingContext.java
class SequenceEncodingContext (line 20) | class SequenceEncodingContext
FILE: src/main/java/io/airlift/compress/v3/zstd/SequenceStore.java
class SequenceStore (line 20) | final class SequenceStore
type LongField (line 37) | public enum LongField
method SequenceStore (line 60) | public SequenceStore(int blockSize, int maxSequences)
method appendLiterals (line 75) | public void appendLiterals(Object inputBase, long inputAddress, int in...
method storeSequence (line 81) | public void storeSequence(Object literalBase, long literalAddress, int...
method reset (line 114) | public void reset()
method generateCodes (line 121) | public void generateCodes()
method literalLengthToCode (line 137) | private static int literalLengthToCode(int literalLength)
method matchLengthToCode (line 151) | private static int matchLengthToCode(int matchLengthBase)
FILE: src/main/java/io/airlift/compress/v3/zstd/UnsafeUtil.java
class UnsafeUtil (line 26) | final class UnsafeUtil
method UnsafeUtil (line 30) | private UnsafeUtil() {}
method getBase (line 48) | public static byte[] getBase(MemorySegment segment)
method getAddress (line 63) | public static long getAddress(MemorySegment segment)
FILE: src/main/java/io/airlift/compress/v3/zstd/Util.java
class Util (line 21) | final class Util
method Util (line 23) | private Util()
method highestBit (line 27) | public static int highestBit(int value)
method isPowerOf2 (line 32) | public static boolean isPowerOf2(int value)
method mask (line 37) | public static int mask(int bits)
method verify (line 42) | public static void verify(boolean condition, long offset, String reason)
method checkArgument (line 49) | public static void checkArgument(boolean condition, String reason)
method checkPositionIndexes (line 56) | static void checkPositionIndexes(int start, int end, int size)
method badPositionIndexes (line 64) | private static String badPositionIndexes(int start, int end, int size)
method badPositionIndex (line 76) | private static String badPositionIndex(int index, int size, String desc)
method checkState (line 89) | public static void checkState(boolean condition, String reason)
method fail (line 96) | public static MalformedInputException fail(long offset, String reason)
method cycleLog (line 101) | public static int cycleLog(int hashLog, CompressionParameters.Strategy...
method get24BitLittleEndian (line 110) | public static int get24BitLittleEndian(Object inputBase, long inputAdd...
method put24BitLittleEndian (line 116) | public static void put24BitLittleEndian(Object outputBase, long output...
method minTableLog (line 123) | public static int minTableLog(int inputSize, int maxSymbolValue)
FILE: src/main/java/io/airlift/compress/v3/zstd/XxHash64.java
class XxHash64 (line 29) | final class XxHash64
method XxHash64 (line 52) | public XxHash64()
method XxHash64 (line 57) | private XxHash64(long seed)
method update (line 66) | public XxHash64 update(byte[] data)
method update (line 71) | public XxHash64 update(byte[] data, int offset, int length)
method hash (line 78) | public long hash()
method computeBody (line 93) | private long computeBody()
method updateHash (line 105) | private void updateHash(Object base, long address, int length)
method updateBody (line 134) | private int updateBody(Object base, long address, int length)
method hash (line 152) | public static long hash(long value)
method hash (line 161) | public static long hash(InputStream in)
method hash (line 167) | public static long hash(long seed, InputStream in)
method hash (line 182) | public static long hash(long seed, Object base, long address, int length)
method updateTail (line 201) | private static long updateTail(long hash, Object base, long address, i...
method updateBody (line 223) | private static long updateBody(long seed, Object base, long address, i...
method mix (line 251) | private static long mix(long current, long value)
method update (line 256) | private static long update(long hash, long value)
method updateTail (line 262) | private static long updateTail(long hash, long value)
method updateTail (line 268) | private static long updateTail(long hash, int value)
method updateTail (line 275) | private static long updateTail(long hash, byte value)
method finalShuffle (line 282) | private static long finalShuffle(long hash)
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdCodec.java
class ZstdCodec (line 18) | public class ZstdCodec
method ZstdCodec (line 21) | public ZstdCodec()
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdCompressor.java
type ZstdCompressor (line 20) | public interface ZstdCompressor
method compress (line 23) | int compress(MemorySegment input, MemorySegment output);
method create (line 25) | static ZstdCompressor create()
method create (line 33) | static ZstdCompressor create(int compressionLevel)
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdDecompressor.java
type ZstdDecompressor (line 18) | public interface ZstdDecompressor
method getDecompressedSize (line 21) | long getDecompressedSize(byte[] input, int offset, int length);
method create (line 23) | static ZstdDecompressor create()
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdFrameCompressor.java
class ZstdFrameCompressor (line 35) | final class ZstdFrameCompressor
method ZstdFrameCompressor (line 47) | private ZstdFrameCompressor()
method writeMagic (line 52) | static int writeMagic(final Object outputBase, final long outputAddres...
method writeFrameHeader (line 61) | static int writeFrameHeader(final Object outputBase, final long output...
method writeChecksum (line 124) | static int writeChecksum(Object outputBase, long outputAddress, long o...
method compress (line 137) | public static int compress(Object inputBase, long inputAddress, long i...
method compressFrame (line 153) | private static int compressFrame(Object inputBase, long inputAddress, ...
method writeCompressedBlock (line 182) | static int writeCompressedBlock(Object inputBase, long input, int bloc...
method compressBlock (line 207) | private static int compressBlock(Object inputBase, long inputAddress, ...
method encodeLiterals (line 263) | private static int encodeLiterals(
method rleLiterals (line 384) | private static int rleLiterals(Object outputBase, long outputAddress, ...
method calculateMinimumGain (line 407) | private static int calculateMinimumGain(int inputSize, CompressionPara...
method rawLiterals (line 414) | private static int rawLiterals(Object outputBase, long outputAddress, ...
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdFrameDecompressor.java
class ZstdFrameDecompressor (line 59) | class ZstdFrameDecompressor
method decompress (line 135) | public int decompress(
method reset (line 213) | void reset()
method decodeRawBlock (line 224) | static int decodeRawBlock(Object inputBase, long inputAddress, int blo...
method decodeRleBlock (line 232) | static int decodeRleBlock(int size, Object inputBase, long inputAddres...
method decodeCompressedBlock (line 266) | int decodeCompressedBlock(
method decompressSequences (line 313) | private int decompressSequences(
method copyLastLiteral (line 519) | private static long copyLastLiteral(long input, Object literalsBase, l...
method copyMatch (line 528) | private static void copyMatch(Object outputBase,
method copyMatchTail (line 544) | private static void copyMatchTail(Object outputBase, long fastOutputLi...
method copyMatchHead (line 573) | private static long copyMatchHead(Object outputBase, long output, int ...
method copyLiterals (line 597) | private static long copyLiterals(Object outputBase, Object literalsBas...
method computeMatchLengthTable (line 610) | private long computeMatchLengthTable(int matchLengthType, Object input...
method computeOffsetsTable (line 638) | private long computeOffsetsTable(int offsetCodesType, Object inputBase...
method computeLiteralsTable (line 666) | private long computeLiteralsTable(int literalsLengthType, Object input...
method executeLastSequence (line 694) | private void executeLastSequence(Object outputBase, long output, long ...
method decodeCompressedLiterals (line 724) | private int decodeCompressedLiterals(Object inputBase, final long inpu...
method decodeRleLiterals (line 792) | private int decodeRleLiterals(Object inputBase, final long inputAddres...
method decodeRawLiterals (line 830) | private int decodeRawLiterals(Object inputBase, final long inputAddres...
method readFrameHeader (line 880) | static FrameHeader readFrameHeader(final Object inputBase, final long ...
method getDecompressedSize (line 960) | public static long getDecompressedSize(final Object inputBase, final l...
method verifyMagic (line 967) | static int verifyMagic(Object inputBase, long inputAddress, long input...
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdHadoopInputStream.java
class ZstdHadoopInputStream (line 23) | class ZstdHadoopInputStream
method ZstdHadoopInputStream (line 29) | public ZstdHadoopInputStream(InputStream in)
method read (line 35) | @Override
method read (line 42) | @Override
method read (line 49) | @Override
method resetState (line 56) | @Override
method close (line 62) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdHadoopOutputStream.java
class ZstdHadoopOutputStream (line 23) | class ZstdHadoopOutputStream
method ZstdHadoopOutputStream (line 30) | public ZstdHadoopOutputStream(OutputStream out)
method write (line 35) | @Override
method write (line 43) | @Override
method finish (line 51) | @Override
method flush (line 61) | @Override
method close (line 68) | @Override
method openStreamIfNecessary (line 84) | private void openStreamIfNecessary()
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdHadoopStreams.java
class ZstdHadoopStreams (line 27) | public class ZstdHadoopStreams
method getDefaultFileExtension (line 30) | @Override
method getHadoopCodecName (line 36) | @Override
method createInputStream (line 42) | @Override
method createOutputStream (line 49) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdIncrementalFrameDecompressor.java
class ZstdIncrementalFrameDecompressor (line 42) | public class ZstdIncrementalFrameDecompressor
type State (line 44) | private enum State {
method isAtStoppingPoint (line 74) | public boolean isAtStoppingPoint()
method getInputConsumed (line 79) | public int getInputConsumed()
method getOutputBufferUsed (line 84) | public int getOutputBufferUsed()
method getInputRequired (line 89) | public int getInputRequired()
method getRequestedOutputSize (line 94) | public int getRequestedOutputSize()
method partialDecompress (line 99) | public void partialDecompress(
method reset (line 282) | private void reset()
method computeFlushableOutputSize (line 290) | private int computeFlushableOutputSize(FrameHeader frameHeader)
method resizeWindowBufferIfNecessary (line 295) | private void resizeWindowBufferIfNecessary(FrameHeader frameHeader, in...
method determineFrameHeaderSize (line 348) | private static int determineFrameHeaderSize(final Object inputBase, fi...
method requestOutput (line 363) | private void requestOutput(long inputAddress, int outputOffset, long i...
method inputRequired (line 373) | private void inputRequired(long inputAddress, int outputOffset, long i...
method updateInputOutputState (line 383) | private void updateInputOutputState(long inputAddress, int outputOffse...
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdInputStream.java
class ZstdInputStream (line 26) | public class ZstdInputStream
method ZstdInputStream (line 42) | public ZstdInputStream(InputStream inputStream)
method read (line 47) | @Override
method read (line 62) | @Override
method fillInputBufferIfNecessary (line 103) | private boolean fillInputBufferIfNecessary(int requiredSize)
method available (line 132) | @Override
method close (line 142) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdJavaCompressor.java
class ZstdJavaCompressor (line 27) | public class ZstdJavaCompressor
method maxCompressedLength (line 30) | @Override
method compress (line 42) | @Override
method compress (line 54) | @Override
method verifyRange (line 81) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdJavaDecompressor.java
class ZstdJavaDecompressor (line 28) | public class ZstdJavaDecompressor
method decompress (line 33) | @Override
method decompress (line 48) | @Override
method getDecompressedSize (line 75) | @Override
method verifyRange (line 82) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdNative.java
class ZstdNative (line 25) | final class ZstdNative
method ZstdNative (line 43) | private ZstdNative() {}
method isEnabled (line 81) | public static boolean isEnabled()
method verifyEnabled (line 86) | public static void verifyEnabled()
method maxCompressedLength (line 93) | public static long maxCompressedLength(long inputLength)
method compress (line 108) | public static long compress(MemorySegment input, long inputLength, Mem...
method decompress (line 127) | public static long decompress(MemorySegment compressed, long compresse...
method decompressedLength (line 146) | public static long decompressedLength(MemorySegment compressed, long c...
method isError (line 165) | private static boolean isError(long code)
method getErrorName (line 178) | private static String getErrorName(long code)
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdNativeCompressor.java
class ZstdNativeCompressor (line 21) | public class ZstdNativeCompressor
method ZstdNativeCompressor (line 26) | public ZstdNativeCompressor()
method ZstdNativeCompressor (line 31) | public ZstdNativeCompressor(int compressionLevel)
method isEnabled (line 37) | public static boolean isEnabled()
method maxCompressedLength (line 42) | @Override
method compress (line 48) | @Override
method compress (line 56) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdNativeDecompressor.java
class ZstdNativeDecompressor (line 20) | public class ZstdNativeDecompressor
method ZstdNativeDecompressor (line 23) | public ZstdNativeDecompressor()
method isEnabled (line 28) | public static boolean isEnabled()
method decompress (line 33) | @Override
method decompress (line 41) | @Override
method getDecompressedSize (line 47) | @Override
FILE: src/main/java/io/airlift/compress/v3/zstd/ZstdOutputStream.java
class ZstdOutputStream (line 29) | public class ZstdOutputStream
method ZstdOutputStream (line 48) | public ZstdOutputStream(OutputStream outputStream)
method write (line 62) | @Override
method write (line 77) | @Override
method write (line 84) | @Override
method growBufferIfNecessary (line 106) | private void growBufferIfNecessary(int length)
method compressIfNecessary (line 121) | private void compressIfNecessary()
method finishWithoutClosingSource (line 133) | void finishWithoutClosingSource()
method close (line 142) | @Override
method writeChunk (line 154) | private void writeChunk(boolean lastChunk)
FILE: src/test/java/io/airlift/compress/v3/AbstractTestCompression.java
class AbstractTestCompression (line 39) | public abstract class AbstractTestCompression
method AbstractTestCompression (line 43) | public AbstractTestCompression()
method getCompressor (line 61) | protected abstract Compressor getCompressor();
method getDecompressor (line 63) | protected abstract Decompressor getDecompressor();
method getVerifyCompressor (line 65) | protected abstract Compressor getVerifyCompressor();
method getVerifyDecompressor (line 67) | protected abstract Decompressor getVerifyDecompressor();
method isMemorySegmentSupported (line 69) | protected boolean isMemorySegmentSupported()
method testDecompress (line 74) | @Test
method testDecompress (line 82) | void testDecompress(DataSet dataSet)
method testDecompressWithOutputPadding (line 102) | @Test
method testDecompressWithOutputPadding (line 110) | private void testDecompressWithOutputPadding(DataSet dataSet)
method testDecompressionBufferOverrun (line 131) | @Test
method testDecompressionBufferOverrun (line 139) | private void testDecompressionBufferOverrun(DataSet dataSet)
method testDecompressInputBoundsChecks (line 165) | @Test
method testDecompressOutputBoundsChecks (line 212) | @Test
method testDecompressMemorySegmentHeapToHeap (line 264) | @Test
method testDecompressMemorySegmentHeapToHeap (line 272) | void testDecompressMemorySegmentHeapToHeap(DataSet dataSet)
method testDecompressMemorySegmentHeapToDirect (line 287) | @Test
method testDecompressMemorySegmentHeapToDirect (line 295) | private void testDecompressMemorySegmentHeapToDirect(DataSet dataSet)
method testDecompressMemorySegmentDirectToHeap (line 312) | @Test
method testDecompressMemorySegmentDirectToHeap (line 320) | private void testDecompressMemorySegmentDirectToHeap(DataSet dataSet)
method testDecompressMemorySegmentDirectToDirect (line 337) | @Test
method testDecompressMemorySegmentDirectToDirect (line 345) | private void testDecompressMemorySegmentDirectToDirect(DataSet dataSet)
method testCompress (line 362) | @Test
method testCompress (line 370) | private void testCompress(DataSet testCase)
method testCompressInputBoundsChecks (line 395) | @Test
method testCompressOutputBoundsChecks (line 439) | @Test
method testCompressMemorySegmentHeapToHeap (line 490) | @Test
method testCompressMemorySegmentHeapToHeap (line 498) | private void testCompressMemorySegmentHeapToHeap(DataSet dataSet)
method testCompressMemorySegmentHeapToDirect (line 514) | @Test
method testCompressMemorySegmentHeapToDirect (line 522) | private void testCompressMemorySegmentHeapToDirect(DataSet dataSet)
method testCompressMemorySegmentDirectToHeap (line 540) | @Test
method testCompressMemorySegmentDirectToHeap (line 548) | private void testCompressMemorySegmentDirectToHeap(DataSet dataSet)
method testCompressMemorySegmentDirectToDirect (line 566) | @Test
method testCompressMemorySegmentDirectToDirect (line 574) | private void testCompressMemorySegmentDirectToDirect(DataSet dataSet)
method verifyCompressMemorySegment (line 592) | private void verifyCompressMemorySegment(Compressor compressor, Memory...
method verifyCompressedData (line 609) | private void verifyCompressedData(byte[] originalUncompressed, byte[] ...
method testRoundTripSmallLiteral (line 617) | @Test
method assertByteArraysEqual (line 650) | protected static void assertByteArraysEqual(byte[] left, int leftOffse...
method assertMemorySegmentEqual (line 666) | private static void assertMemorySegmentEqual(MemorySegment left, Memor...
method toNativeSegment (line 682) | private static MemorySegment toNativeSegment(Arena arena, byte[] data)
method prepareCompressedData (line 687) | private byte[] prepareCompressedData(byte[] uncompressed)
FILE: src/test/java/io/airlift/compress/v3/HadoopCodecCompressor.java
class HadoopCodecCompressor (line 27) | public class HadoopCodecCompressor
method HadoopCodecCompressor (line 33) | public HadoopCodecCompressor(CompressionCodec codec, Compressor blockC...
method HadoopCodecCompressor (line 38) | public HadoopCodecCompressor(CompressionCodec codec, IntUnaryOperator ...
method maxCompressedLength (line 44) | @Override
method compress (line 51) | @Override
method compress (line 69) | @Override
FILE: src/test/java/io/airlift/compress/v3/HadoopCodecDecompressor.java
class HadoopCodecDecompressor (line 24) | public class HadoopCodecDecompressor
method HadoopCodecDecompressor (line 29) | public HadoopCodecDecompressor(CompressionCodec codec)
method decompress (line 34) | @Override
method decompress (line 59) | @Override
FILE: src/test/java/io/airlift/compress/v3/HadoopCodecDecompressorByteAtATime.java
class HadoopCodecDecompressorByteAtATime (line 24) | public class HadoopCodecDecompressorByteAtATime
method HadoopCodecDecompressorByteAtATime (line 29) | public HadoopCodecDecompressorByteAtATime(CompressionCodec codec)
method decompress (line 34) | @Override
method decompress (line 60) | @Override
FILE: src/test/java/io/airlift/compress/v3/HadoopNative.java
class HadoopNative (line 26) | public final class HadoopNative
method HadoopNative (line 31) | private HadoopNative() {}
method requireHadoopNative (line 33) | public static synchronized void requireHadoopNative()
method setStatic (line 56) | private static void setStatic(Field field, Object value)
method loadLibrary (line 63) | private static void loadLibrary(String name)
method getLibraryPath (line 81) | private static String getLibraryPath(String name)
method getPlatform (line 86) | private static String getPlatform()
FILE: src/test/java/io/airlift/compress/v3/TestingData.java
class TestingData (line 23) | public final class TestingData
method TestingData (line 48) | private TestingData() {}
FILE: src/test/java/io/airlift/compress/v3/Util.java
class Util (line 18) | public final class Util
method Util (line 20) | private Util()
method toHumanReadableSpeed (line 24) | public static String toHumanReadableSpeed(long bytesPerSecond)
FILE: src/test/java/io/airlift/compress/v3/benchmark/Algorithm.java
type Algorithm (line 56) | public enum Algorithm
method Algorithm (line 89) | Algorithm(CompressionCodec compressionCodec, Compressor compressor)
method Algorithm (line 98) | Algorithm(Decompressor decompressor, Compressor compressor)
method getCompressor (line 104) | public Compressor getCompressor()
method getDecompressor (line 109) | public Decompressor getDecompressor()
FILE: src/test/java/io/airlift/compress/v3/benchmark/BytesCounter.java
class BytesCounter (line 20) | @AuxCounters
FILE: src/test/java/io/airlift/compress/v3/benchmark/CompressionBenchmark.java
class CompressionBenchmark (line 42) | @State(Scope.Thread)
method setup (line 86) | @Setup
method compress (line 103) | @Benchmark
method decompress (line 111) | @Benchmark
method main (line 119) | public static void main(String[] args)
method compressSize (line 158) | private static int compressSize(String algorithmName, String name)
FILE: src/test/java/io/airlift/compress/v3/benchmark/DataSet.java
class DataSet (line 25) | @State(Scope.Thread)
method DataSet (line 93) | public DataSet()
method DataSet (line 97) | public DataSet(String name)
method DataSet (line 102) | public DataSet(String name, byte[] uncompressed)
method loadFile (line 108) | @Setup
method getUncompressed (line 115) | public byte[] getUncompressed()
method getName (line 120) | public String getName()
method toString (line 125) | public String toString()
FILE: src/test/java/io/airlift/compress/v3/benchmark/HashBenchmark.java
class HashBenchmark (line 52) | @State(Scope.Thread)
method setup (line 65) | @Setup
method xxhash64_java (line 75) | @Benchmark
method xxhash64_java_segment (line 81) | @Benchmark
method xxhash64_native (line 89) | @Benchmark
method xxhash64_native_segment (line 95) | @Benchmark
method xxhash3_64_native (line 103) | @Benchmark
method xxhash3_64_native_segment (line 109) | @Benchmark
method xxhash3_128_native (line 117) | @Benchmark
method xxhash3_128_native_segment (line 123) | @Benchmark
method main (line 129) | public static void main(String[] args)
FILE: src/test/java/io/airlift/compress/v3/bzip2/TestBZip2Codec.java
class TestBZip2Codec (line 25) | class TestBZip2Codec
method TestBZip2Codec (line 34) | TestBZip2Codec()
method isMemorySegmentSupported (line 41) | @Override
method getCompressor (line 47) | @Override
method getDecompressor (line 53) | @Override
method getVerifyCompressor (line 59) | @Override
method getVerifyDecompressor (line 65) | @Override
method guessMaxCompressedSize (line 71) | private static int guessMaxCompressedSize(int size)
FILE: src/test/java/io/airlift/compress/v3/bzip2/TestBZip2CodecByteAtATime.java
class TestBZip2CodecByteAtATime (line 26) | class TestBZip2CodecByteAtATime
method TestBZip2CodecByteAtATime (line 35) | TestBZip2CodecByteAtATime()
method isMemorySegmentSupported (line 42) | @Override
method getCompressor (line 48) | @Override
method getDecompressor (line 54) | @Override
method getVerifyCompressor (line 60) | @Override
method getVerifyDecompressor (line 66) | @Override
method guessMaxCompressedSize (line 72) | private static int guessMaxCompressedSize(int size)
FILE: src/test/java/io/airlift/compress/v3/deflate/MockJdkDeflateCompressor.java
class MockJdkDeflateCompressor (line 20) | public class MockJdkDeflateCompressor
method maxCompressedLength (line 25) | @Override
method compress (line 31) | @Override
method compress (line 37) | @Override
FILE: src/test/java/io/airlift/compress/v3/deflate/TestDeflate.java
class TestDeflate (line 29) | public class TestDeflate
method getCompressor (line 36) | @Override
method getDecompressor (line 42) | @Override
method getVerifyCompressor (line 48) | @Override
method getVerifyDecompressor (line 54) | @Override
method testLiteralLengthOverflow (line 60) | @Test
method testMatchLengthOverflow1 (line 77) | @Test
method testMatchLengthOverflow2 (line 106) | @Test
FILE: src/test/java/io/airlift/compress/v3/deflate/TestDeflateNative.java
class TestDeflateNative (line 20) | public class TestDeflateNative
method getCompressor (line 23) | @Override
method getDecompressor (line 29) | @Override
method getVerifyCompressor (line 35) | @Override
method getVerifyDecompressor (line 41) | @Override
FILE: src/test/java/io/airlift/compress/v3/deflate/TestJdkDeflateCodec.java
class TestJdkDeflateCodec (line 25) | class TestJdkDeflateCodec
method TestJdkDeflateCodec (line 30) | TestJdkDeflateCodec()
method isMemorySegmentSupported (line 37) | @Override
method getCompressor (line 43) | @Override
method getDecompressor (line 49) | @Override
method getVerifyCompressor (line 55) | @Override
method getVerifyDecompressor (line 61) | @Override
FILE: src/test/java/io/airlift/compress/v3/deflate/TestJdkDeflateCodecByteAtATime.java
class TestJdkDeflateCodecByteAtATime (line 26) | class TestJdkDeflateCodecByteAtATime
method TestJdkDeflateCodecByteAtATime (line 31) | TestJdkDeflateCodecByteAtATime()
method isMemorySegmentSupported (line 38) | @Override
method getCompressor (line 44) | @Override
method getDecompressor (line 50) | @Override
method getVerifyCompressor (line 56) | @Override
method getVerifyDecompressor (line 62) | @Override
FILE: src/test/java/io/airlift/compress/v3/gzip/MockJdkGzipCompressor.java
class MockJdkGzipCompressor (line 20) | public class MockJdkGzipCompressor
method maxCompressedLength (line 25) | @Override
method compress (line 31) | @Override
method compress (line 37) | @Override
FILE: src/test/java/io/airlift/compress/v3/gzip/TestJdkGzipCodec.java
class TestJdkGzipCodec (line 24) | class TestJdkGzipCodec
method TestJdkGzipCodec (line 29) | TestJdkGzipCodec()
method isMemorySegmentSupported (line 36) | @Override
method getCompressor (line 42) | @Override
method getDecompressor (line 48) | @Override
method getVerifyCompressor (line 54) | @Override
method getVerifyDecompressor (line 60) | @Override
FILE: src/test/java/io/airlift/compress/v3/gzip/TestJdkGzipCodecByteAtATime.java
class TestJdkGzipCodecByteAtATime (line 25) | class TestJdkGzipCodecByteAtATime
method TestJdkGzipCodecByteAtATime (line 30) | TestJdkGzipCodecByteAtATime()
method isMemorySegmentSupported (line 37) | @Override
method getCompressor (line 43) | @Override
method getDecompressor (line 49) | @Override
method getVerifyCompressor (line 55) | @Override
method getVerifyDecompressor (line 61) | @Override
FILE: src/test/java/io/airlift/compress/v3/gzip/TestJdkGzipHadoopInputStream.java
class TestJdkGzipHadoopInputStream (line 29) | class TestJdkGzipHadoopInputStream
method testGzipInputStreamBug (line 31) | @Test
method zip (line 44) | private static byte[] zip(byte[] data)
FILE: src/test/java/io/airlift/compress/v3/internal/TestNativeLoader.java
class TestNativeLoader (line 26) | class TestNativeLoader
method testUnknownLibrary (line 28) | @Test
method testLoadSymbols (line 36) | @Test
FILE: src/test/java/io/airlift/compress/v3/lz4/AbstractTestLz4.java
class AbstractTestLz4 (line 25) | public abstract class AbstractTestLz4
method testLiteralLengthOverflow (line 28) | @Test
method testMatchLengthOverflow (line 47) | @Test
FILE: src/test/java/io/airlift/compress/v3/lz4/BenchmarkCount.java
class BenchmarkCount (line 39) | @State(Scope.Thread)
method setup (line 55) | @Setup
method count (line 71) | @Benchmark
method main (line 77) | public static void main(String[] args)
FILE: src/test/java/io/airlift/compress/v3/lz4/TestLz4.java
class TestLz4 (line 26) | class TestLz4
method getCompressor (line 29) | @Override
method getDecompressor (line 35) | @Override
method getVerifyCompressor (line 41) | @Override
method getVerifyDecompressor (line 47) | @Override
method testZeroMatchOffset (line 53) | @Test
FILE: src/test/java/io/airlift/compress/v3/lz4/TestLz4Codec.java
class TestLz4Codec (line 25) | class TestLz4Codec
method TestLz4Codec (line 34) | TestLz4Codec()
method isMemorySegmentSupported (line 41) | @Override
method getCompressor (line 47) | @Override
method getDecompressor (line 53) | @Override
method getVerifyCompressor (line 59) | @Override
method getVerifyDecompressor (line 65) | @Override
FILE: src/test/java/io/airlift/compress/v3/lz4/TestLz4CodecByteAtATime.java
class TestLz4CodecByteAtATime (line 26) | class TestLz4CodecByteAtATime
method TestLz4CodecByteAtATime (line 35) | TestLz4CodecByteAtATime()
method isMemorySegmentSupported (line 42) | @Override
method getCompressor (line 48) | @Override
method getDecompressor (line 54) | @Override
method getVerifyCompressor (line 60) | @Override
method getVerifyDecompressor (line 66) | @Override
FILE: src/test/java/io/airlift/compress/v3/lz4/TestLz4Native.java
class TestLz4Native (line 22) | class TestLz4Native
method getCompressor (line 25) | @Override
method getDecompressor (line 31) | @Override
method getVerifyCompressor (line 37) | @Override
method getVerifyDecompressor (line 43) | @Override
FILE: src/test/java/io/airlift/compress/v3/lz4/TestLz4NativeFastest.java
class TestLz4NativeFastest (line 24) | class TestLz4NativeFastest
method getCompressor (line 27) | @Override
method getDecompressor (line 33) | @Override
method getVerifyCompressor (line 39) | @Override
method getVerifyDecompressor (line 45) | @Override
FILE: src/test/java/io/airlift/compress/v3/lzo/TestLzo.java
class TestLzo (line 30) | public class TestLzo
method getCompressor (line 37) | @Override
method getDecompressor (line 43) | @Override
method getVerifyCompressor (line 49) | @Override
method getVerifyDecompressor (line 55) | @Override
method testLiteralLengthOverflow (line 61) | @Test
method testMatchLengthOverflow1 (line 78) | @Test
method testMatchLengthOverflow2 (line 107) | @Test
FILE: src/test/java/io/airlift/compress/v3/lzo/TestLzoCodec.java
class TestLzoCodec (line 26) | public class TestLzoCodec
method TestLzoCodec (line 35) | TestLzoCodec()
method isMemorySegmentSupported (line 42) | @Override
method getCompressor (line 48) | @Override
method getDecompressor (line 54) | @Override
method getVerifyCompressor (line 60) | @Override
method getVerifyDecompressor (line 66) | @Override
FILE: src/test/java/io/airlift/compress/v3/lzo/TestLzoCodecByteAtATime.java
class TestLzoCodecByteAtATime (line 27) | public class TestLzoCodecByteAtATime
method TestLzoCodecByteAtATime (line 36) | TestLzoCodecByteAtATime()
method isMemorySegmentSupported (line 43) | @Override
method getCompressor (line 49) | @Override
method getDecompressor (line 55) | @Override
method getVerifyCompressor (line 61) | @Override
method getVerifyDecompressor (line 67) | @Override
FILE: src/test/java/io/airlift/compress/v3/lzo/TestLzopCodec.java
class TestLzopCodec (line 33) | class TestLzopCodec
method TestLzopCodec (line 42) | TestLzopCodec()
method isMemorySegmentSupported (line 49) | @Override
method getCompressor (line 55) | @Override
method getDecompressor (line 61) | @Override
method getVerifyCompressor (line 67) | @Override
method getVerifyDecompressor (line 73) | @Override
method testDecompressNewerVersion (line 79) | @Test
method assertDecompressed (line 96) | private void assertDecompressed(String variant)
FILE: src/test/java/io/airlift/compress/v3/lzo/TestLzopCodecByteAtATime.java
class TestLzopCodecByteAtATime (line 27) | class TestLzopCodecByteAtATime
method TestLzopCodecByteAtATime (line 36) | TestLzopCodecByteAtATime()
method isMemorySegmentSupported (line 43) | @Override
method getCompressor (line 49) | @Override
method getDecompressor (line 55) | @Override
method getVerifyCompressor (line 61) | @Override
method getVerifyDecompressor (line 67) | @Override
FILE: src/test/java/io/airlift/compress/v3/snappy/AbstractTestSnappy.java
class AbstractTestSnappy (line 22) | public abstract class AbstractTestSnappy
method getCompressor (line 25) | @Override
method getDecompressor (line 28) | @Override
method testInvalidLiteralLength (line 31) | @Test
method testNegativeLength (line 49) | @Test
FILE: src/test/java/io/airlift/compress/v3/snappy/ByteArrayOutputStream.java
class ByteArrayOutputStream (line 20) | public final class ByteArrayOutputStream
method ByteArrayOutputStream (line 28) | public ByteArrayOutputStream(byte[] buffer)
method ByteArrayOutputStream (line 33) | public ByteArrayOutputStream(byte[] buffer, int offset, int length)
method write (line 41) | @Override
method write (line 48) | @Override
method size (line 56) | public int size()
method getBuffer (line 61) | public byte[] getBuffer()
FILE: src/test/java/io/airlift/compress/v3/snappy/RandomGenerator.java
class RandomGenerator (line 20) | class RandomGenerator
method RandomGenerator (line 25) | public RandomGenerator(double compressionRatio)
method getNextPosition (line 38) | public int getNextPosition(int length)
method compressibleData (line 49) | private static byte[] compressibleData(Random random, double compressi...
method generateRandomData (line 67) | private static byte[] generateRandomData(Random random, int length)
FILE: src/test/java/io/airlift/compress/v3/snappy/TestSnappyCodec.java
class TestSnappyCodec (line 25) | class TestSnappyCodec
method TestSnappyCodec (line 34) | TestSnappyCodec()
method isMemorySegmentSupported (line 41) | @Override
method getCompressor (line 47) | @Override
method getDecompressor (line 53) | @Override
method getVerifyCompressor (line 59) | @Override
method getVerifyDecompressor (line 65) | @Override
FILE: src/test/java/io/airlift/compress/v3/snappy/TestSnappyCodecByteAtATime.java
class TestSnappyCodecByteAtATime (line 26) | class TestSnappyCodecByteAtATime
method TestSnappyCodecByteAtATime (line 35) | TestSnappyCodecByteAtATime()
method isMemorySegmentSupported (line 42) | @Override
method getCompressor (line 48) | @Override
method getDecompressor (line 54) | @Override
method getVerifyCompressor (line 60) | @Override
method getVerifyDecompressor (line 66) | @Override
FILE: src/test/java/io/airlift/compress/v3/snappy/TestSnappyJava.java
class TestSnappyJava (line 25) | class TestSnappyJava
method getCompressor (line 28) | @Override
method getDecompressor (line 34) | @Override
method getVerifyCompressor (line 40) | @Override
method getVerifyDecompressor (line 46) | @Override
method testZeroMatchOffsetFails (line 52) | @Test
FILE: src/test/java/io/airlift/compress/v3/snappy/TestSnappyNative.java
class TestSnappyNative (line 21) | public class TestSnappyNative
method getCompressor (line 24) | @Override
method getDecompressor (line 30) | @Override
method getVerifyCompressor (line 36) | @Override
method getVerifyDecompressor (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/snappy/TestSnappyStream.java
class TestSnappyStream (line 34) | class TestSnappyStream
method getRandom (line 36) | static byte[] getRandom(double compressionRatio, int length)
method getMarkerFrame (line 45) | static byte[] getMarkerFrame()
method testSimple (line 50) | @Test
method testUncompressible (line 81) | @Test
method testEmptyCompression (line 102) | @Test
method testShortBlockHeader (line 111) | @Test
method testShortBlockData (line 119) | @Test
method testUnskippableChunkFlags (line 128) | @Test
method testSkippableChunkFlags (line 138) | @Test
method testInvalidBlockSizeZero (line 151) | @Test
method testInvalidChecksum (line 160) | @Test
method testInvalidChecksumIgnoredWhenVerificationDisabled (line 169) | @Test
method testLargerFrames_raw_ (line 179) | @Test
method testLargerFrames_compressed_ (line 208) | @Test
method testLargerFrames_compressed_smaller_raw_larger (line 239) | @Test
method blockToStream (line 270) | private static byte[] blockToStream(byte[] block)
method testLargeWrites (line 278) | @Test
method testSingleByteWrites (line 314) | @Test
method testExtraFlushes (line 335) | @Test
method testUncompressibleRange (line 358) | @Test
method testByteForByteTestData (line 376) | @Test
method testEmptyStream (line 388) | @Test
method testInvalidStreamHeader (line 396) | @Test
method testCloseIsIdempotent (line 404) | @Test
method testMarkerFrameInStream (line 432) | @Test
method blockCompress (line 467) | private static byte[] blockCompress(byte[] data)
method compress (line 476) | private static byte[] compress(byte[] original)
method uncompress (line 486) | private static byte[] uncompress(byte[] compressed)
FILE: src/test/java/io/airlift/compress/v3/thirdparty/HadoopLzoCompressor.java
class HadoopLzoCompressor (line 22) | public class HadoopLzoCompressor
method HadoopLzoCompressor (line 27) | public HadoopLzoCompressor()
method maxCompressedLength (line 33) | @Override
method compress (line 39) | @Override
method compress (line 64) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/HadoopLzoDecompressor.java
class HadoopLzoDecompressor (line 26) | public class HadoopLzoDecompressor
method HadoopLzoDecompressor (line 32) | public HadoopLzoDecompressor()
method decompress (line 37) | @Override
method decompress (line 64) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/JPountzLz4Compressor.java
class JPountzLz4Compressor (line 22) | public class JPountzLz4Compressor
method JPountzLz4Compressor (line 27) | public JPountzLz4Compressor(LZ4Factory factory)
method maxCompressedLength (line 32) | @Override
method compress (line 38) | @Override
method compress (line 44) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/JPountzLz4Decompressor.java
class JPountzLz4Decompressor (line 23) | public class JPountzLz4Decompressor
method JPountzLz4Decompressor (line 28) | public JPountzLz4Decompressor(LZ4Factory factory)
method decompress (line 33) | @Override
method decompress (line 40) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/JdkDeflateCompressor.java
class JdkDeflateCompressor (line 23) | public class JdkDeflateCompressor
method maxCompressedLength (line 26) | @Override
method compress (line 32) | @Override
method compress (line 43) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/JdkInflateDecompressor.java
class JdkInflateDecompressor (line 23) | public class JdkInflateDecompressor
method decompress (line 26) | @Override
method decompress (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/XerialSnappyCompressor.java
class XerialSnappyCompressor (line 22) | public class XerialSnappyCompressor
method maxCompressedLength (line 25) | @Override
method compress (line 31) | @Override
method compress (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/XerialSnappyDecompressor.java
class XerialSnappyDecompressor (line 22) | public class XerialSnappyDecompressor
method decompress (line 25) | @Override
method decompress (line 37) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/ZstdJniCompressor.java
class ZstdJniCompressor (line 22) | public class ZstdJniCompressor
method ZstdJniCompressor (line 27) | public ZstdJniCompressor(int level)
method maxCompressedLength (line 32) | @Override
method compress (line 38) | @Override
method compress (line 44) | @Override
FILE: src/test/java/io/airlift/compress/v3/thirdparty/ZstdJniDecompressor.java
class ZstdJniDecompressor (line 23) | public class ZstdJniDecompressor
method decompress (line 26) | @Override
method decompress (line 33) | @Override
FILE: src/test/java/io/airlift/compress/v3/xxhash/AbstractTestXxHash64.java
class AbstractTestXxHash64 (line 24) | abstract class AbstractTestXxHash64
method createHasher (line 34) | protected abstract XxHash64Hasher createHasher();
method createHasher (line 36) | protected abstract XxHash64Hasher createHasher(long seed);
method hash (line 38) | protected abstract long hash(byte[] input);
method hash (line 40) | protected abstract long hash(byte[] input, long seed);
method hash (line 42) | protected abstract long hash(byte[] input, int offset, int length);
method hash (line 44) | protected abstract long hash(MemorySegment input);
method hash (line 46) | protected abstract long hash(long value);
method hash (line 48) | protected abstract long hash(long value, long seed);
method createSanityBuffer (line 51) | protected static byte[] createSanityBuffer(int length)
method testHash64Empty (line 64) | @Test
method testHash64EmptyWithSeed (line 72) | @Test
method testHash64SanityBuffer (line 80) | @Test
method assertSanityHash64 (line 94) | private void assertSanityHash64(int length, long seed, long expected)
method testHash64WithMemorySegment (line 109) | @Test
method testHash64WithOffset (line 122) | @Test
method testStreamingMatchesOneShot (line 134) | @Test
method testStreamingMultipleUpdates (line 146) | @Test
method testStreamingWithSeed (line 162) | @Test
method testStreamingReset (line 174) | @Test
method testStreamingResetWithSeed (line 194) | @Test
method testStreamingDigestDoesNotModifyState (line 214) | @Test
method testStreamingEmpty (line 232) | @Test
method testStreamingChunkedMatchesOneShot (line 240) | @Test
method testStreamingFluentApi (line 256) | @Test
method testUpdateLELong (line 274) | @Test
method testUpdateLEInt (line 286) | @Test
method testUpdateLELengthPrefixed (line 298) | @Test
method testHashLong (line 320) | @Test
method testHashLongWithSeed (line 330) | @Test
method testHashLongKnownValues (line 341) | @Test
method testHashLongMatchesStreaming (line 353) | @Test
FILE: src/test/java/io/airlift/compress/v3/xxhash/TestXxHash3.java
class TestXxHash3 (line 25) | class TestXxHash3
method createSanityBuffer (line 42) | private static byte[] createSanityBuffer(int length)
method testIsEnabled (line 53) | @Test
method testHash64Empty (line 60) | @Test
method testHash64EmptyWithSeed (line 70) | @Test
method testHash128Empty (line 80) | @Test
method testHash128EmptyWithSeed (line 90) | @Test
method testHash64SanityBuffer (line 101) | @Test
method assertSanityHash64 (line 125) | private void assertSanityHash64(int length, long seed, long expected)
method testHash128SanityBuffer (line 140) | @Test
method assertSanityHash128 (line 163) | private void assertSanityHash128(int length, long seed, long expectedL...
method testHash64WithMemorySegment (line 179) | @Test
method testHash64WithMemorySegmentAndSeed (line 194) | @Test
method testHash128WithMemorySegment (line 210) | @Test
method testHash128WithMemorySegmentAndSeed (line 225) | @Test
method testHash64WithOffset (line 241) | @Test
method testHash128WithOffset (line 253) | @Test
method testStreaming64MatchesOneShot (line 267) | @Test
method testStreaming128MatchesOneShot (line 281) | @Test
method testStreamingMultipleUpdates64 (line 295) | @Test
method testStreamingMultipleUpdates128 (line 313) | @Test
method testStreamingWithSeed64 (line 331) | @Test
method testStreamingWithSeed128 (line 345) | @Test
method testStreamingReset (line 359) | @Test
method testStreamingResetWithSeed (line 384) | @Test
method testStreamingDigestDoesNotModifyState (line 409) | @Test
method testStreamingEmpty (line 431) | @Test
method testStreamingChunkedMatchesOneShot (line 445) | @Test
method testStreamingFluentApi (line 465) | @Test
method testUpdateLELong (line 485) | @Test
method testUpdateLEInt (line 506) | @Test
method testUpdateLELengthPrefixed (line 527) | @Test
method testHashLong (line 553) | @Test
method testHashLongWithSeed (line 565) | @Test
method testHashLongKnownValues (line 578) | @Test
method testHashLongMatchesStreaming (line 592) | @Test
FILE: src/test/java/io/airlift/compress/v3/xxhash/TestXxHash64.java
class TestXxHash64 (line 22) | class TestXxHash64
method createHasher (line 25) | @Override
method createHasher (line 31) | @Override
method hash (line 37) | @Override
method hash (line 43) | @Override
method hash (line 49) | @Override
method hash (line 55) | @Override
method hash (line 61) | @Override
method hash (line 67) | @Override
method testJavaAndNativeProduceSameOneShot (line 75) | @Test
method testJavaAndNativeProduceSameStreaming (line 87) | @Test
FILE: src/test/java/io/airlift/compress/v3/xxhash/TestXxHash64Java.java
class TestXxHash64Java (line 18) | class TestXxHash64Java
method createHasher (line 21) | @Override
method createHasher (line 27) | @Override
method hash (line 33) | @Override
method hash (line 39) | @Override
method hash (line 45) | @Override
method hash (line 51) | @Override
method hash (line 57) | @Override
method hash (line 63) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/AbstractTestZstd.java
class AbstractTestZstd (line 30) | public abstract class AbstractTestZstd
method getCompressor (line 33) | @Override
method getDecompressor (line 36) | @Override
method testDecompressWithOutputPaddingAndChecksum (line 41) | @Test
method testConcatenatedFrames (line 56) | @Test
method testInvalidSequenceOffset (line 69) | @Test
method testSmallLiteralsAfterIncompressibleLiterals (line 80) | @Test
method testLargeRle (line 100) | @Test
method testIncompressibleData (line 120) | @Test
method testMaxCompressedSize (line 140) | @Test
method testGetDecompressedSize (line 150) | @Test
method testGetDecompressedSize (line 158) | private void testGetDecompressedSize(DataSet dataSet)
method testVerifyMagicInAllFrames (line 175) | @Test
method testDecompressIsMissingData (line 186) | @Test
method testBadHuffmanData (line 195) | @Test
FILE: src/test/java/io/airlift/compress/v3/zstd/TestCompressor.java
class TestCompressor (line 22) | class TestCompressor
method testMagic (line 24) | @Test
method testMagicFailsWithSmallBuffer (line 34) | @Test
method testFrameHeaderFailsWithSmallBuffer (line 43) | @Test
method testFrameHeader (line 52) | @Test
method testMinimumWindowSize (line 72) | @Test
method testWindowSizePrecision (line 83) | @Test
method verifyFrameHeader (line 94) | private static void verifyFrameHeader(int inputSize, int windowSize, F...
FILE: src/test/java/io/airlift/compress/v3/zstd/TestUtil.java
class TestUtil (line 26) | class TestUtil
method testGet24BitLittleEndian (line 37) | @Test
method testGet24BitLittleEndian (line 45) | private static void testGet24BitLittleEndian(TestData testData)
method testPut24BitLittleEndian (line 51) | @Test
method testPut24BitLittleEndian (line 59) | private static void testPut24BitLittleEndian(TestData testData)
FILE: src/test/java/io/airlift/compress/v3/zstd/TestXxHash64.java
class TestXxHash64 (line 24) | class TestXxHash64
method TestXxHash64 (line 30) | TestXxHash64()
method testSanity (line 39) | @Test
method testMultipleLengths (line 63) | @Test
method assertHash (line 74) | private static void assertHash(long seed, byte[] data, int length, lon...
method hash (line 79) | private static long hash(long seed, byte[] data, int length)
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstd.java
class TestZstd (line 21) | public class TestZstd
method getCompressor (line 24) | @Override
method getDecompressor (line 30) | @Override
method getVerifyCompressor (line 36) | @Override
method getVerifyDecompressor (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdCodec.java
class TestZstdCodec (line 28) | class TestZstdCodec
method isMemorySegmentSupported (line 31) | @Override
method getCompressor (line 37) | @Override
method getDecompressor (line 43) | @Override
method getVerifyCompressor (line 49) | @Override
method getVerifyDecompressor (line 56) | @Override
method testConcatenatedFrames (line 63) | @Test
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdCodecByteAtATime.java
class TestZstdCodecByteAtATime (line 30) | class TestZstdCodecByteAtATime
method TestZstdCodecByteAtATime (line 35) | TestZstdCodecByteAtATime()
method isMemorySegmentSupported (line 42) | @Override
method getCompressor (line 48) | @Override
method getDecompressor (line 54) | @Override
method getVerifyCompressor (line 60) | @Override
method getVerifyDecompressor (line 67) | @Override
method testConcatenatedFrames (line 74) | @Test
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdFast.java
class TestZstdFast (line 21) | public class TestZstdFast
method getCompressor (line 24) | @Override
method getDecompressor (line 30) | @Override
method getVerifyCompressor (line 36) | @Override
method getVerifyDecompressor (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdHigh.java
class TestZstdHigh (line 21) | public class TestZstdHigh
method getCompressor (line 24) | @Override
method getDecompressor (line 30) | @Override
method getVerifyCompressor (line 36) | @Override
method getVerifyDecompressor (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdNative.java
class TestZstdNative (line 21) | public class TestZstdNative
method getCompressor (line 24) | @Override
method getDecompressor (line 30) | @Override
method getVerifyCompressor (line 36) | @Override
method getVerifyDecompressor (line 42) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdPartial.java
class TestZstdPartial (line 25) | class TestZstdPartial
method isMemorySegmentSupported (line 28) | @Override
method getCompressor (line 34) | @Override
method getDecompressor (line 40) | @Override
method getVerifyCompressor (line 46) | @Override
method getVerifyDecompressor (line 52) | @Override
method testInvalidSequenceOffset (line 58) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/TestZstdStream.java
class TestZstdStream (line 25) | class TestZstdStream
method isMemorySegmentSupported (line 28) | @Override
method getCompressor (line 34) | @Override
method getDecompressor (line 40) | @Override
method getVerifyCompressor (line 46) | @Override
method getVerifyDecompressor (line 52) | @Override
method testInvalidSequenceOffset (line 58) | @Override
method testGetDecompressedSize (line 70) | @Override
FILE: src/test/java/io/airlift/compress/v3/zstd/ZstdPartialDecompressor.java
class ZstdPartialDecompressor (line 26) | public class ZstdPartialDecompressor
method decompress (line 31) | @Override
method decompress (line 88) | @Override
method getDecompressedSize (line 95) | @Override
method verifyRange (line 102) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/test/java/io/airlift/compress/v3/zstd/ZstdStreamCompressor.java
class ZstdStreamCompressor (line 26) | public class ZstdStreamCompressor
method maxCompressedLength (line 29) | @Override
method compress (line 41) | @Override
method compress (line 68) | @Override
method verifyRange (line 74) | private static void verifyRange(byte[] data, int offset, int length)
FILE: src/test/java/io/airlift/compress/v3/zstd/ZstdStreamDecompressor.java
class ZstdStreamDecompressor (line 27) | public class ZstdStreamDecompressor
method decompress (line 30) | @Override
method decompress (line 50) | @Override
method getDecompressedSize (line 57) | @Override
method verifyRange (line 64) | private static void verifyRange(byte[] data, int offset, int length)
FILE: testdata/canterbury/fields.c
function fieldbackch (line 309) | static int fieldbackch (str, out, strip)
function fieldwrite (line 400) | int fieldwrite (file, fieldp, delim)
function fieldfree (line 420) | void fieldfree (fieldp)
Copy disabled (too large)
Download .json
Condensed preview — 294 files, each showing path, character count, and a content snippet. Download the .json file for the full structured content (34,311K chars).
[
{
"path": ".github/dependabot.yml",
"chars": 237,
"preview": "version: 2\nupdates:\n - package-ecosystem: \"github-actions\"\n directory: \"/\"\n schedule:\n interval: \"daily\"\n -"
},
{
"path": ".github/release.yml",
"chars": 455,
"preview": "changelog:\n exclude:\n labels:\n - ignore-for-release\n categories:\n - title: Breaking Changes 🛠\n labels:"
},
{
"path": ".github/workflows/main.yml",
"chars": 509,
"preview": "name: ci\n\non:\n - push\n - pull_request\n\njobs:\n build:\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n"
},
{
"path": ".github/workflows/release-2x.yml",
"chars": 3173,
"preview": "name: Release new 2.x version\n\non:\n workflow_dispatch:\n\njobs:\n release:\n runs-on: ubuntu-latest\n permissions:\n "
},
{
"path": ".github/workflows/release.yml",
"chars": 3141,
"preview": "name: Release new version\n\non:\n workflow_dispatch:\n\njobs:\n release:\n runs-on: ubuntu-latest\n permissions:\n "
},
{
"path": ".gitignore",
"chars": 220,
"preview": "target/\n/var\npom.xml.versionsBackup\ntest-output/\n/atlassian-ide-plugin.x\n.idea\n.*.swp\n.*.swo\n*~\n*.swp\n.idea\n.idea/*\n*.im"
},
{
"path": ".mvn/maven.config",
"chars": 46,
"preview": "-s\n${session.rootDirectory}/.mvn/settings.xml\n"
},
{
"path": ".mvn/settings.xml",
"chars": 559,
"preview": "<settings>\n <pluginGroups>\n <pluginGroup>eu.maveniverse.maven.plugins</pluginGroup>\n </pluginGroups>\n <s"
},
{
"path": ".mvn/wrapper/maven-wrapper.properties",
"chars": 953,
"preview": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE f"
},
{
"path": "README.md",
"chars": 7622,
"preview": "# Compression for Java\n[/../src/main/resources/aircompressor\n\ndownload_linux()\n{\n echo"
},
{
"path": "license.txt",
"chars": 11359,
"preview": "\n Apache License\n Version 2.0, January 2004\n "
},
{
"path": "mvnw",
"chars": 10665,
"preview": "#!/bin/sh\n# ----------------------------------------------------------------------------\n# Licensed to the Apache Softwa"
},
{
"path": "notice.md",
"chars": 1662,
"preview": "Snappy Copyright Notices \n=========================\n\n* Copyright 2011 Dain Sundstrom <dain@iq80.com>\n* Copyright 2011, G"
},
{
"path": "pom.xml",
"chars": 6326,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2"
},
{
"path": "src/checkstyle/checks.xml",
"chars": 5110,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE module PUBLIC\n \"-//Puppy Crawl//DTD Check Configuration 1.3//EN\""
},
{
"path": "src/license/LICENSE-HEADER.txt",
"chars": 524,
"preview": "Licensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the "
},
{
"path": "src/main/java/io/airlift/compress/v3/Compressor.java",
"chars": 1127,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/Decompressor.java",
"chars": 1080,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/IncompatibleJvmException.java",
"chars": 758,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/MalformedInputException.java",
"chars": 1018,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/BZip2Codec.java",
"chars": 804,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/BZip2Constants.java",
"chars": 1413,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopInputStream.java",
"chars": 2432,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopOutputStream.java",
"chars": 2389,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/BZip2HadoopStreams.java",
"chars": 1488,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/CBZip2InputStream.java",
"chars": 42445,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/CBZip2OutputStream.java",
"chars": 69097,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/bzip2/Crc32.java",
"chars": 5151,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateCompressor.java",
"chars": 1175,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateDecompressor.java",
"chars": 931,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateJavaCompressor.java",
"chars": 3697,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateJavaDecompressor.java",
"chars": 3940,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateNative.java",
"chars": 7200,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateNativeCompressor.java",
"chars": 2837,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/DeflateNativeDecompressor.java",
"chars": 3783,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/JdkDeflateCodec.java",
"chars": 821,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/JdkDeflateHadoopInputStream.java",
"chars": 4219,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/JdkDeflateHadoopOutputStream.java",
"chars": 2560,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/deflate/JdkDeflateHadoopStreams.java",
"chars": 1681,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/gzip/JdkGzipCodec.java",
"chars": 809,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/gzip/JdkGzipConstants.java",
"chars": 725,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/gzip/JdkGzipHadoopInputStream.java",
"chars": 2452,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/gzip/JdkGzipHadoopOutputStream.java",
"chars": 2373,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/gzip/JdkGzipHadoopStreams.java",
"chars": 1676,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/hadoop/CodecAdapter.java",
"chars": 7823,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/hadoop/CompressionInputStreamAdapter.java",
"chars": 2264,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/hadoop/CompressionOutputStreamAdapter.java",
"chars": 1815,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/hadoop/HadoopInputStream.java",
"chars": 1013,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/hadoop/HadoopOutputStream.java",
"chars": 1319,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/hadoop/HadoopStreams.java",
"chars": 1137,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/internal/NativeLoader.java",
"chars": 9107,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/internal/NativeSignature.java",
"chars": 970,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4Codec.java",
"chars": 1708,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4Compressor.java",
"chars": 1448,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4Constants.java",
"chars": 892,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4Decompressor.java",
"chars": 1078,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4HadoopInputStream.java",
"chars": 5304,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4HadoopOutputStream.java",
"chars": 3938,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4HadoopStreams.java",
"chars": 2144,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4JavaCompressor.java",
"chars": 2986,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4JavaDecompressor.java",
"chars": 2994,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4Native.java",
"chars": 5927,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4NativeCompressor.java",
"chars": 2487,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4NativeDecompressor.java",
"chars": 1602,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4RawCompressor.java",
"chars": 10950,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/Lz4RawDecompressor.java",
"chars": 8352,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lz4/UnsafeUtil.java",
"chars": 2262,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoCodec.java",
"chars": 1686,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoCompressor.java",
"chars": 2984,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoConstants.java",
"chars": 965,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoDecompressor.java",
"chars": 3068,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoHadoopInputStream.java",
"chars": 5216,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoHadoopOutputStream.java",
"chars": 3845,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoHadoopStreams.java",
"chars": 1856,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoRawCompressor.java",
"chars": 14035,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzoRawDecompressor.java",
"chars": 16522,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzopCodec.java",
"chars": 893,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzopHadoopInputStream.java",
"chars": 11192,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzopHadoopOutputStream.java",
"chars": 5647,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/LzopHadoopStreams.java",
"chars": 1880,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/lzo/UnsafeUtil.java",
"chars": 2262,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/Crc32C.java",
"chars": 34750,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyCodec.java",
"chars": 1667,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyCompressor.java",
"chars": 1082,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyConstants.java",
"chars": 956,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyDecompressor.java",
"chars": 1070,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyFramed.java",
"chars": 1186,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyFramedInputStream.java",
"chars": 10113,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyFramedOutputStream.java",
"chars": 8668,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyHadoopInputStream.java",
"chars": 5676,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyHadoopOutputStream.java",
"chars": 3903,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyHadoopStreams.java",
"chars": 2177,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyInternalUtils.java",
"chars": 5013,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyJavaCompressor.java",
"chars": 3327,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyJavaDecompressor.java",
"chars": 3423,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyNative.java",
"chars": 6940,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyNativeCompressor.java",
"chars": 1787,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyNativeDecompressor.java",
"chars": 2154,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyRawCompressor.java",
"chars": 19787,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/SnappyRawDecompressor.java",
"chars": 15101,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/snappy/UnsafeUtil.java",
"chars": 2271,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash128.java",
"chars": 650,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash3Bindings.java",
"chars": 14080,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash3Hasher.java",
"chars": 2695,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash3Hasher128.java",
"chars": 2737,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash3Native.java",
"chars": 15321,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash64Bindings.java",
"chars": 5336,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash64Hasher.java",
"chars": 4726,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash64JavaHasher.java",
"chars": 12938,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/xxhash/XxHash64NativeHasher.java",
"chars": 5043,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/BitInputStream.java",
"chars": 6475,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/BitOutputStream.java",
"chars": 2707,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/BlockCompressionState.java",
"chars": 2344,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/BlockCompressor.java",
"chars": 997,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/CompressionContext.java",
"chars": 2073,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/CompressionParameters.java",
"chars": 18307,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/Constants.java",
"chars": 3429,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/DoubleFastBlockCompressor.java",
"chars": 11608,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/FiniteStateEntropy.java",
"chars": 20615,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/FrameHeader.java",
"chars": 2760,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/FseCompressionTable.java",
"chars": 5582,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/FseTableReader.java",
"chars": 6175,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/Histogram.java",
"chars": 1796,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/Huffman.java",
"chars": 15537,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressionContext.java",
"chars": 2025,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressionTable.java",
"chars": 17110,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressionTableWorkspace.java",
"chars": 1209,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/HuffmanCompressor.java",
"chars": 5455,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/HuffmanTableWriterWorkspace.java",
"chars": 1238,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/NodeTable.java",
"chars": 1307,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/RepeatedOffsets.java",
"chars": 1139,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/SequenceEncoder.java",
"chars": 17841,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/SequenceEncodingContext.java",
"chars": 1531,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/SequenceStore.java",
"chars": 5997,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/UnsafeUtil.java",
"chars": 2275,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/Util.java",
"chars": 4397,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/XxHash64.java",
"chars": 8347,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdCodec.java",
"chars": 800,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdCompressor.java",
"chars": 1463,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdDecompressor.java",
"chars": 982,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdFrameCompressor.java",
"chars": 19348,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdFrameDecompressor.java",
"chars": 43508,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdHadoopInputStream.java",
"chars": 1756,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdHadoopOutputStream.java",
"chars": 2340,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdHadoopStreams.java",
"chars": 1578,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdIncrementalFrameDecompressor.java",
"chars": 17028,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdInputStream.java",
"chars": 4881,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdJavaCompressor.java",
"chars": 3292,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdJavaDecompressor.java",
"chars": 3369,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdNative.java",
"chars": 7096,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdNativeCompressor.java",
"chars": 2066,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdNativeDecompressor.java",
"chars": 1864,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/main/java/io/airlift/compress/v3/zstd/ZstdOutputStream.java",
"chars": 7977,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/AbstractTestCompression.java",
"chars": 26637,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/HadoopCodecCompressor.java",
"chars": 2763,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/HadoopCodecDecompressor.java",
"chars": 2121,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/HadoopCodecDecompressorByteAtATime.java",
"chars": 2158,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/HadoopNative.java",
"chars": 2770,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/TestingData.java",
"chars": 1529,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/Util.java",
"chars": 1396,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/benchmark/Algorithm.java",
"chars": 5599,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/benchmark/BytesCounter.java",
"chars": 825,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/benchmark/CompressionBenchmark.java",
"chars": 6268,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/benchmark/DataSet.java",
"chars": 3334,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/benchmark/HashBenchmark.java",
"chars": 4161,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/bzip2/TestBZip2Codec.java",
"chars": 2203,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/bzip2/TestBZip2CodecByteAtATime.java",
"chars": 2324,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/deflate/MockJdkDeflateCompressor.java",
"chars": 1306,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/deflate/TestDeflate.java",
"chars": 4040,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/deflate/TestDeflateNative.java",
"chars": 1295,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/deflate/TestJdkDeflateCodec.java",
"chars": 1986,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/deflate/TestJdkDeflateCodecByteAtATime.java",
"chars": 2085,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/gzip/MockJdkGzipCompressor.java",
"chars": 1325,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/gzip/TestJdkGzipCodec.java",
"chars": 1968,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/gzip/TestJdkGzipCodecByteAtATime.java",
"chars": 2067,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/gzip/TestJdkGzipHadoopInputStream.java",
"chars": 1782,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/internal/TestNativeLoader.java",
"chars": 5830,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/AbstractTestLz4.java",
"chars": 2479,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/BenchmarkCount.java",
"chars": 3056,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/TestLz4.java",
"chars": 1976,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/TestLz4Codec.java",
"chars": 2049,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/TestLz4CodecByteAtATime.java",
"chars": 2148,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/TestLz4Native.java",
"chars": 1426,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lz4/TestLz4NativeFastest.java",
"chars": 1519,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lzo/TestLzo.java",
"chars": 4087,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lzo/TestLzoCodec.java",
"chars": 2110,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lzo/TestLzoCodecByteAtATime.java",
"chars": 2209,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lzo/TestLzopCodec.java",
"chars": 3507,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/lzo/TestLzopCodecByteAtATime.java",
"chars": 2214,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/AbstractTestSnappy.java",
"chars": 2024,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/ByteArrayOutputStream.java",
"chars": 1786,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/RandomGenerator.java",
"chars": 2508,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/TestSnappyCodec.java",
"chars": 2076,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/TestSnappyCodecByteAtATime.java",
"chars": 2175,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/TestSnappyJava.java",
"chars": 1922,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/TestSnappyNative.java",
"chars": 1377,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/snappy/TestSnappyStream.java",
"chars": 16285,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/thirdparty/HadoopLzoCompressor.java",
"chars": 2253,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
},
{
"path": "src/test/java/io/airlift/compress/v3/thirdparty/HadoopLzoDecompressor.java",
"chars": 2512,
"preview": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance "
}
]
// ... and 94 more files (download for full content)
About this extraction
This page contains the full source code of the airlift/aircompressor GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 294 files (172.2 MB), approximately 8.4M tokens, and a symbol index with 1650 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.